Many problems in machine learning (ML) can be cast as learning functions from
sets to graphs, or more generally to hypergraphs; in short, Set2Graph
functions. Examples include clustering, learning vertex and edge features on
graphs, and learning triplet data in a collection. Current neural network
models that approximate Set2Graph functions come from two main ML sub-fields:
equivariant learning, and similarity learning. Equivariant models would be in
general computationally challenging or even infeasible, while similarity
learning models can be shown to have limited expressive power. In this paper we
suggest a neural network model family for learning Set2Graph functions that is
both practical and of maximal expressive power (universal), that is, can
approximate arbitrary continuous Set2Graph functions over compact sets. Testing
our models on different machine learning tasks, including an application to
particle physics, we find them favorable to existing baselines.
%0 Journal Article
%1 serviansky2020set2graph
%A Serviansky, Hadar
%A Segol, Nimrod
%A Shlomi, Jonathan
%A Cranmer, Kyle
%A Gross, Eilam
%A Maron, Haggai
%A Lipman, Yaron
%D 2020
%K deep-learning graphs sets
%T Set2Graph: Learning Graphs From Sets
%U http://arxiv.org/abs/2002.08772
%X Many problems in machine learning (ML) can be cast as learning functions from
sets to graphs, or more generally to hypergraphs; in short, Set2Graph
functions. Examples include clustering, learning vertex and edge features on
graphs, and learning triplet data in a collection. Current neural network
models that approximate Set2Graph functions come from two main ML sub-fields:
equivariant learning, and similarity learning. Equivariant models would be in
general computationally challenging or even infeasible, while similarity
learning models can be shown to have limited expressive power. In this paper we
suggest a neural network model family for learning Set2Graph functions that is
both practical and of maximal expressive power (universal), that is, can
approximate arbitrary continuous Set2Graph functions over compact sets. Testing
our models on different machine learning tasks, including an application to
particle physics, we find them favorable to existing baselines.
@article{serviansky2020set2graph,
abstract = {Many problems in machine learning (ML) can be cast as learning functions from
sets to graphs, or more generally to hypergraphs; in short, Set2Graph
functions. Examples include clustering, learning vertex and edge features on
graphs, and learning triplet data in a collection. Current neural network
models that approximate Set2Graph functions come from two main ML sub-fields:
equivariant learning, and similarity learning. Equivariant models would be in
general computationally challenging or even infeasible, while similarity
learning models can be shown to have limited expressive power. In this paper we
suggest a neural network model family for learning Set2Graph functions that is
both practical and of maximal expressive power (universal), that is, can
approximate arbitrary continuous Set2Graph functions over compact sets. Testing
our models on different machine learning tasks, including an application to
particle physics, we find them favorable to existing baselines.},
added-at = {2020-02-24T22:01:29.000+0100},
author = {Serviansky, Hadar and Segol, Nimrod and Shlomi, Jonathan and Cranmer, Kyle and Gross, Eilam and Maron, Haggai and Lipman, Yaron},
biburl = {https://www.bibsonomy.org/bibtex/2150ebd50f57e99681ff0d36390e653c9/kirk86},
description = {[2002.08772] Set2Graph: Learning Graphs From Sets},
interhash = {f81fcc51e59f775d08732043f34967d8},
intrahash = {150ebd50f57e99681ff0d36390e653c9},
keywords = {deep-learning graphs sets},
note = {cite arxiv:2002.08772},
timestamp = {2020-02-24T22:01:29.000+0100},
title = {Set2Graph: Learning Graphs From Sets},
url = {http://arxiv.org/abs/2002.08772},
year = 2020
}