def __init__(self, model_id, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(
             edge_model_fn=lambda: get_model_from_config(model_id,
                                                         model_type="mlp")
             (n_neurons=EncodeProcessDecode_v5_no_skip_batch_norm.
              n_neurons_edges,
              n_layers=EncodeProcessDecode_v5_no_skip_batch_norm.
              n_layers_edges,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_edge"),
             node_model_fn=lambda: get_model_from_config(model_id,
                                                         model_type="mlp")
             (n_neurons=EncodeProcessDecode_v5_no_skip_batch_norm.
              n_neurons_nodes,
              n_layers=EncodeProcessDecode_v5_no_skip_batch_norm.
              n_layers_nodes,
              output_size=None,
              typ="mlp_layer_norm",
              activation_final=False,
              name="mlp_core_node"),
             global_model_fn=lambda: get_model_from_config(model_id,
                                                           model_type="mlp")
             (n_neurons=EncodeProcessDecode_v5_no_skip_batch_norm.
              n_neurons_globals,
              n_layers=EncodeProcessDecode_v5_no_skip_batch_norm.
              n_layers_globals,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_global"))
 def __init__(self, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(
             EncodeProcessDecode_v2.make_mlp_model_edges,
             EncodeProcessDecode_v2.make_mlp_model,
             EncodeProcessDecode_v2.make_mlp_model)
Esempio n. 3
0
 def __init__(self, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(
             edge_model_fn=make_mlp_model_small,
             node_model_fn=make_mlp_model,
             global_model_fn=make_mlp_model_small)
Esempio n. 4
0
 def __init__(self, model_id, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(
             edge_model_fn=lambda: get_model_from_config(model_id,
                                                         model_type="mlp")
             (n_neurons=EncodeProcessDecode_v3_172_latent_dim2.
              n_neurons_edges,
              n_layers=EncodeProcessDecode_v3_172_latent_dim2.
              n_layers_edges,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_edge"),
             node_model_fn=lambda: get_model_from_config(model_id,
                                                         model_type="mlp")
             (n_neurons=EncodeProcessDecode_v3_172_latent_dim2.
              n_neurons_nodes_total_dim,
              n_layers=EncodeProcessDecode_v3_172_latent_dim2.
              n_layers_nodes,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_node"),
             global_model_fn=lambda: get_model_from_config(model_id,
                                                           model_type="mlp")
             (n_neurons=EncodeProcessDecode_v3_172_latent_dim2.
              n_neurons_globals,
              n_layers=EncodeProcessDecode_v3_172_latent_dim2.
              n_layers_globals,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_global"))
 def __init__(self, model_id, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(
             edge_model_fn=lambda: get_model_from_config(model_id,
                                                         model_type="mlp")
             (n_neurons=EncodeProcessDecode_v5_512_improve_shapes_exp3.
              n_neurons_edges,
              n_layers=EncodeProcessDecode_v5_512_improve_shapes_exp3.
              n_layers_edges,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_edge"),
             node_model_fn=lambda: get_model_from_config(model_id,
                                                         model_type="mlp")
             (
                 n_neurons=EncodeProcessDecode_v5_512_improve_shapes_exp3.
                 n_neurons_nodes,
                 n_layers=EncodeProcessDecode_v5_512_improve_shapes_exp3.
                 n_layers_nodes,
                 output_size=EncodeProcessDecode_v5_512_improve_shapes_exp3.
                 n_neurons_nodes_total_dim,
                 typ="mlp_transform",  # todo: was earlier "mlp_layer_norm"
                 activation_final=False,
                 name="mlp_core_node"),
             global_model_fn=lambda: get_model_from_config(model_id,
                                                           model_type="mlp")
             (n_neurons=EncodeProcessDecode_v5_512_improve_shapes_exp3.
              n_neurons_globals,
              n_layers=EncodeProcessDecode_v5_512_improve_shapes_exp3.
              n_layers_globals,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_global"))
Esempio n. 6
0
    def __init__(self,
                 latent_sizes_edge,
                 latent_sizes_node,
                 latent_sizes_global,
                 name="MLPGraphNetwork"):
        super(MLPGraphNetwork, self).__init__(name=name)

        self.edge_fun = lambda: snt.Sequential([
            snt.nets.MLP(latent_sizes_edge, activate_final=True),
            snt.LayerNorm()
        ])
        self.node_fun = lambda: snt.Sequential([
            snt.nets.MLP(latent_sizes_node, activate_final=True),
            snt.LayerNorm()
        ])
        self.global_fun = lambda: snt.Sequential(
            [snt.nets.MLP(latent_sizes_global, activate_final=False)])
        with self._enter_variable_scope():
            self._network = modules.GraphNetwork(
                self.edge_fun,
                self.node_fun,
                self.global_fun,
                edge_block_opt=EDGE_BLOCK_OPT,
                node_block_opt=NODE_BLOCK_OPT,
                global_block_opt=GLOBAL_BLOCK_OPT)
Esempio n. 7
0
 def __init__(self, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = \
             modules.GraphNetwork(make_mlp_model, make_mlp_model,
                 make_mlp_model,
                 global_block_opt={"use_edges":False,"use_nodes":False})
 def test_incompatible_higher_rank_partial_outputs_raises(self):
   """A error should be raised if partial outputs have incompatible shapes."""
   input_graph = self._get_shaped_input_graph()
   edge_model_fn, node_model_fn, global_model_fn = self._get_shaped_model_fns()
   edge_model_fn_2 = functools.partial(
       snt.Conv2D, output_channels=10, kernel_shape=[3, 3], stride=[1, 2])
   graph_network = modules.GraphNetwork(
       edge_model_fn_2, node_model_fn, global_model_fn)
   with self.assertRaisesRegexp(ValueError, "in both shapes must be equal"):
     graph_network(input_graph)
   node_model_fn_2 = functools.partial(
       snt.Conv2D, output_channels=10, kernel_shape=[3, 3], stride=[1, 2])
   graph_network = modules.GraphNetwork(
       edge_model_fn, node_model_fn_2, global_model_fn)
   with self.assertRaisesRegexp(ValueError, "in both shapes must be equal"):
     graph_network(input_graph)
Esempio n. 9
0
 def __init__(self, latent_size, num_layers, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(
             make_mlp_model(latent_size, num_layers),
             make_mlp_model(latent_size, num_layers),
             make_mlp_model(latent_size, num_layers))
Esempio n. 10
0
 def __init__(self, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     self._network = modules.GraphNetwork(
         edge_model_fn=make_mlp_model,
         node_model_fn=make_mlp_model,
         global_model_fn=make_mlp_model
         )
Esempio n. 11
0
 def __init__(self, node_layer_s, edge_layer_s, globals_layer_s, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(node_model_fn=make_mlp(node_layer_s),
                                        edge_model_fn=make_mlp(edge_layer_s),
                                        global_model_fn=make_mlp(globals_layer_s),
                                        reducer = tf.math.unsorted_segment_mean)
Esempio n. 12
0
  def test_same_as_subblocks(self, reducer):
    """Compares the output to explicit subblocks output.

    Args:
      reducer: The reducer used in the `NodeBlock` and `GlobalBlock`.
    """
    input_graph = self._get_input_graph()

    edge_model_fn = functools.partial(snt.Linear, output_size=5)
    node_model_fn = functools.partial(snt.Linear, output_size=10)
    global_model_fn = functools.partial(snt.Linear, output_size=15)

    graph_network = modules.GraphNetwork(
        edge_model_fn=edge_model_fn,
        node_model_fn=node_model_fn,
        global_model_fn=global_model_fn,
        reducer=reducer)

    output_graph = graph_network(input_graph)

    edge_block = blocks.EdgeBlock(
        edge_model_fn=lambda: graph_network._edge_block._edge_model,
        use_sender_nodes=True,
        use_edges=True,
        use_receiver_nodes=True,
        use_globals=True)
    node_block = blocks.NodeBlock(
        node_model_fn=lambda: graph_network._node_block._node_model,
        use_nodes=True,
        use_sent_edges=False,
        use_received_edges=True,
        use_globals=True,
        received_edges_reducer=reducer)
    global_block = blocks.GlobalBlock(
        global_model_fn=lambda: graph_network._global_block._global_model,
        use_nodes=True,
        use_edges=True,
        use_globals=True,
        edges_reducer=reducer,
        nodes_reducer=reducer)

    expected_output_edge_block = edge_block(input_graph)
    expected_output_node_block = node_block(expected_output_edge_block)
    expected_output_global_block = global_block(expected_output_node_block)
    expected_edges = expected_output_edge_block.edges
    expected_nodes = expected_output_node_block.nodes
    expected_globals = expected_output_global_block.globals

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      (output_graph_out,
       expected_edges_out, expected_nodes_out, expected_globals_out) = sess.run(
           (output_graph, expected_edges, expected_nodes, expected_globals))

    self._assert_all_none_or_all_close(expected_edges_out,
                                       output_graph_out.edges)
    self._assert_all_none_or_all_close(expected_nodes_out,
                                       output_graph_out.nodes)
    self._assert_all_none_or_all_close(expected_globals_out,
                                       output_graph_out.globals)
Esempio n. 13
0
  def test_edge_block_options(self,
                              use_edges,
                              use_receiver_nodes,
                              use_sender_nodes,
                              use_globals):
    """Test for configuring the EdgeBlock options."""
    reducer = tf.math.unsorted_segment_sum
    input_graph = self._get_input_graph()
    edge_model_fn = functools.partial(snt.Linear, output_size=10)
    edge_block_opt = {"use_edges": use_edges,
                      "use_receiver_nodes": use_receiver_nodes,
                      "use_sender_nodes": use_sender_nodes,
                      "use_globals": use_globals}
    # Identity node model
    node_model_fn = lambda: tf.identity
    node_block_opt = {"use_received_edges": False,
                      "use_sent_edges": False,
                      "use_nodes": True,
                      "use_globals": False}
    # Identity global model
    global_model_fn = lambda: tf.identity
    global_block_opt = {"use_globals": True,
                        "use_nodes": False,
                        "use_edges": False}

    graph_network = modules.GraphNetwork(
        edge_model_fn=edge_model_fn,
        edge_block_opt=edge_block_opt,
        node_model_fn=node_model_fn,
        node_block_opt=node_block_opt,
        global_model_fn=global_model_fn,
        global_block_opt=global_block_opt,
        reducer=reducer)

    output_graph = graph_network(input_graph)

    edge_block = blocks.EdgeBlock(
        edge_model_fn=lambda: graph_network._edge_block._edge_model,
        use_edges=use_edges,
        use_receiver_nodes=use_receiver_nodes,
        use_sender_nodes=use_sender_nodes,
        use_globals=use_globals)

    expected_output_edge_block = edge_block(input_graph)
    expected_output_node_block = expected_output_edge_block
    expected_output_global_block = expected_output_node_block
    expected_edges = expected_output_edge_block.edges
    expected_nodes = expected_output_node_block.nodes
    expected_globals = expected_output_global_block.globals

    with self.test_session() as sess:
      sess.run(tf.compat.v1.global_variables_initializer())
      (output_graph_out,
       expected_edges_out, expected_nodes_out, expected_globals_out) = sess.run(
           (output_graph, expected_edges, expected_nodes, expected_globals))

    self.assertAllEqual(expected_edges_out, output_graph_out.edges)
    self.assertAllEqual(expected_nodes_out, output_graph_out.nodes)
    self.assertAllEqual(expected_globals_out, output_graph_out.globals)
Esempio n. 14
0
 def _get_model(self):
   edge_model_fn = functools.partial(snt.Linear, output_size=5)
   node_model_fn = functools.partial(snt.Linear, output_size=10)
   global_model_fn = functools.partial(snt.Linear, output_size=15)
   return modules.GraphNetwork(
       edge_model_fn=edge_model_fn,
       node_model_fn=node_model_fn,
       global_model_fn=global_model_fn)
Esempio n. 15
0
 def test_incompatible_higher_rank_inputs_raises(self, field_to_reshape):
   """A exception should be raised if the inputs have incompatible shapes."""
   input_graph = self._get_shaped_input_graph()
   edge_model_fn, node_model_fn, global_model_fn = self._get_shaped_model_fns()
   input_graph = input_graph.map(
       lambda v: tf.transpose(v, [0, 2, 1, 3]), [field_to_reshape])
   graph_network = modules.GraphNetwork(
       edge_model_fn, node_model_fn, global_model_fn)
   with self.assertRaisesRegexp(ValueError, "in both shapes must be equal"):
     graph_network(input_graph)
 def __init__(self, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     #with self._enter_variable_scope():
     if 2 > 1:
         self._network = modules.GraphNetwork(
             make_mlp_model('edge'),
             make_mlp_model('node'),
             make_mlp_model('global'),
             #name=name
         )
Esempio n. 17
0
    def __init__(self,
                 latent_size=16,
                 num_layers=2,
                 global_block=True,
                 last_round=False,
                 name="MLPGraphNetwork"):
        super(MLPGraphNetwork, self).__init__(name=name)
        partial_make_mlp_model = partial(make_mlp_model,
                                         latent_size=latent_size,
                                         num_layers=num_layers,
                                         last_round_edges=False)
        if last_round:
            partial_make_mlp_model_edges = partial(make_mlp_model,
                                                   latent_size=latent_size,
                                                   num_layers=num_layers,
                                                   last_round_edges=True)
        else:
            partial_make_mlp_model_edges = partial_make_mlp_model

        with self.name_scope:
            if global_block:
                self._network = modules.GraphNetwork(
                    partial_make_mlp_model_edges,
                    partial_make_mlp_model,
                    partial_make_mlp_model,
                    edge_block_opt={"use_globals": True},
                    node_block_opt={"use_globals": True},
                    global_block_opt={
                        "use_globals": True,
                        "edges_reducer": tf.unsorted_segment_mean,
                        "nodes_reducer": tf.unsorted_segment_mean
                    })
            else:
                self._network = modules.GraphNetwork(
                    partial_make_mlp_model_edges,
                    partial_make_mlp_model,
                    make_identity_model,
                    edge_block_opt={"use_globals": False},
                    node_block_opt={"use_globals": False},
                    global_block_opt={
                        "use_globals": False,
                    })
Esempio n. 18
0
  def __init__(self,
               n_recurrences,
               mlp_sizes,
               mlp_kwargs = None,
               name='Graph'):
    """Creates a new GraphBasedModel object.

    Args:
      n_recurrences: the number of message passing steps in the graph network.
      mlp_sizes: the number of neurons in each layer of the MLP.
      mlp_kwargs: additional keyword aguments passed to the MLP.
      name: the name of the Sonnet module.
    """
    super(GraphBasedModel, self).__init__(name=name)
    self._n_recurrences = n_recurrences

    if mlp_kwargs is None:
      mlp_kwargs = {}

    model_fn = functools.partial(
        snt.nets.MLP,
        output_sizes=mlp_sizes,
        activate_final=True,
        **mlp_kwargs)

    final_model_fn = functools.partial(
        snt.nets.MLP,
        output_sizes=mlp_sizes + (1,),
        activate_final=False,
        **mlp_kwargs)

    with self._enter_variable_scope():
      self._encoder = gn_modules.GraphIndependent(
          node_model_fn=model_fn,
          edge_model_fn=model_fn)

      if self._n_recurrences > 0:
        self._propagation_network = gn_modules.GraphNetwork(
            node_model_fn=model_fn,
            edge_model_fn=model_fn,
            # We do not use globals, hence we just pass the identity function.
            global_model_fn=lambda: lambda x: x,
            reducer=tf.unsorted_segment_sum,
            edge_block_opt=dict(use_globals=False),
            node_block_opt=dict(use_globals=False),
            global_block_opt=dict(use_globals=False))

      self._decoder = gn_modules.GraphIndependent(
          node_model_fn=final_model_fn,
          edge_model_fn=model_fn)
Esempio n. 19
0
 def _build(self, graphs):
     with self._enter_variable_scope():
         self.graph_transformer = modules.GraphNetwork(
             edge_model_fn=self._build_linear(self.edge_dimension,
                                              self.edge_activation,
                                              "edge-dense"),
             node_model_fn=self._build_linear(self.node_dimension,
                                              self.node_activation,
                                              "node-dense"),
             global_model_fn=self._build_linear(self.global_dimension,
                                                self.global_activation,
                                                "global-dense"),
             node_block_opt={"use_sent_edges": True},
             reducer=self.reducer,
             name="graph_transformer")
     return self.graph_transformer(graphs)
Esempio n. 20
0
 def __init__(
     self,
     # for simplicity, all layers have the same size and the edge,
     # node and global models use the same structure
     latent_size,
     num_layers,
     name="MLPGraphNetwork",
 ):
     super(MLPGraphNetwork, self).__init__(name=name)
     model_fn = partial(make_mlp_model,
                        latent_size=latent_size,
                        num_layers=num_layers)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(
             edge_model_fn=model_fn,
             node_model_fn=model_fn,
             global_model_fn=model_fn,
         )
Esempio n. 21
0
  def test_created_variables(self, name=None):
    """Verifies variable names and shapes created by a GraphNetwork."""
    name = name if name is not None else "graph_network"
    expected_var_shapes_dict = {
        name + "/edge_block/mlp/linear_0/b:0": [5],
        name + "/edge_block/mlp/linear_0/w:0": [4 + 4 + 3, 5],
        name + "/node_block/mlp/linear_0/b:0": [10],
        name + "/node_block/mlp/linear_0/w:0": [5 + 2 + 3, 10],
        name + "/global_block/mlp/linear_0/b:0": [15],
        name + "/global_block/mlp/linear_0/w:0": [10 + 5 + 3, 15],
    }
    input_graph = self._get_input_graph()
    extra_kwargs = {"name": name} if name else {}
    model = modules.GraphNetwork(
        edge_model_fn=functools.partial(snt.nets.MLP, output_sizes=[5]),
        node_model_fn=functools.partial(snt.nets.MLP, output_sizes=[10]),
        global_model_fn=functools.partial(snt.nets.MLP, output_sizes=[15]),
        **extra_kwargs)

    model(input_graph)
    variables = model.get_variables()
    var_shapes_dict = {var.name: var.get_shape().as_list() for var in variables}
    self.assertDictEqual(expected_var_shapes_dict, var_shapes_dict)
Esempio n. 22
0
 def __init__(self, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     self._network = modules.GraphNetwork(make_mlp_model, make_mlp_model,
                                          make_mlp_model)
Esempio n. 23
0
 def __init__(self, name="MLPGraphNetwork"):
     super(MLPGraphNetwork, self).__init__(name=name)
     with self._enter_variable_scope():
         self._network = modules.GraphNetwork(make_mlp_model,
                                              make_mlp_model,
                                              make_mlp_model)
Esempio n. 24
0
    def __init__(self,
                 edge_output_size=None,
                 node_output_size=None,
                 global_output_size=None,
                 edge_layer_activation=tf.nn.relu,
                 node_layer_activation=tf.nn.relu,
                 global_layer_activation=tf.nn.relu,
                 last_edge_layer_activation=tf.nn.softmax,
                 last_node_layer_activation=tf.nn.softmax,
                 last_global_layer_activation=tf.keras.activations.linear,
                 edge_vocab_size=20,
                 edge_embed_dim=100,
                 node_vocab_size=1000,
                 node_embed_dim=100,
                 name="GraphAttention"):
        """
        This network structure is supposed to handle NLP problems.
        :param edge_output_size: The size of the output vector corresponding to each edge
        :param node_output_size: The size of the output vector corresponding to each node
        :param global_output_size: The size of the output vector corresponding to the global feature
        :param edge_layer_activation: The activation used in each layer considering the edges. ReLU by default.
        :param node_layer_activation: The activation used in each layer considering the nodes. ReLU by default.
        :param global_layer_activation: The activation used in each layer considering the global feature.
                                        ReLU by default.
        :param last_edge_layer_activation: The activation function of the output layer corresponding to the edges.
                                           SoftMax by default.
        :param last_node_layer_activation: The activation function of the output layer corresponding to the nodes.
                                           SoftMax by default.
        :param last_global_layer_activation: The activation function of the output layer corresponding to
                                             the global features. Linear by default.
        :param edge_vocab_size: The size of the vocabulary containing the edges, if we use a non-pretrained embedding.
        :param edge_embed_dim: The dimension of the edge embedding, if we use a non-pretrained embedding.
        :param node_vocab_size: The size of the vocabulary containing the nodes, if we use a non-pretrained embedding.
        :param node_embed_dim: The dimension of the node embedding, if we use a non-pretrained embedding.
        :param name: The name of the network
        """

        super(SimpleGraphAttention, self).__init__(name=name)

        self.edge_layer_activation = edge_layer_activation
        self.node_layer_activation = node_layer_activation
        self.global_layer_activation = global_layer_activation
        self.edge_vocab_size = edge_vocab_size
        self.edge_embed_dim = edge_embed_dim
        self.node_vocab_size = node_vocab_size
        self.node_embed_dim = node_embed_dim

        self._encoder = Encoder()

        self._network = graph_net_modules.GraphNetwork(
            edge_model_fn=self.edge_model_fn,
            node_model_fn=self.node_model_fn,
            global_model_fn=self.global_model_fn,
            reducer=tf.unsorted_segment_sum)

        # Transforms the outputs into the appropriate shapes.
        edge_fn = None if edge_output_size is None else \
            lambda: sonnet_nets.ActivatedLinear(edge_output_size, last_edge_layer_activation)
        node_fn = None if node_output_size is None else \
            lambda: sonnet_nets.ActivatedLinear(node_output_size, last_node_layer_activation)
        global_fn = None if global_output_size is None else \
            lambda: sonnet_nets.ActivatedLinear(global_output_size, last_global_layer_activation)
        with self._enter_variable_scope():
            self._output_transform = graph_net_modules.GraphIndependent(
                edge_fn, node_fn, global_fn)
Esempio n. 25
0

print_graphs_tuple(graphs_tuple)

recovered_data_dict_list = utils_np.graphs_tuple_to_data_dicts(graphs_tuple)

#print(recovered_data_dict_list)

tf.reset_default_graph()

graph_dicts = data_dict_list
OUTPUT_EDGE_SIZE = 10
OUTPUT_NODE_SIZE = 11
OUTPUT_GLOBAL_SIZE = 12
graph_network = modules.GraphNetwork(
    edge_model_fn=lambda: snt.Linear(output_size=OUTPUT_EDGE_SIZE),
    node_model_fn=lambda: snt.Linear(output_size=OUTPUT_NODE_SIZE),
    global_model_fn=lambda: snt.Linear(output_size=OUTPUT_GLOBAL_SIZE))

input_graphs = utils_tf.data_dicts_to_graphs_tuple(graph_dicts)

print(len(graph_dicts))
print(graph_dicts)
print(input_graphs)
output_graphs = graph_network(input_graphs)
print(output_graphs.globals)

#print_graphs_tuple(output_graphs)


def zeros_graph(sample_graph, edge_size, node_size, global_size):
    zeros_graphs = sample_graph.replace(nodes=None, edges=None, globals=None)
Esempio n. 26
0
    def __init__(self,
                 num_processing_steps=None,
                 latent_size=None,
                 n_layers=None,
                 edge_output_size=None,
                 node_output_size=None,
                 global_output_size=None,
                 reducer=None,
                 out_init_scale=5.0,
                 name="AggregationNet"):
        super(AggregationDiffNet, self).__init__(name=name)

        if num_processing_steps is None:
            self._proc_hops = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
        else:
            self._proc_hops = num_processing_steps

        if reducer is None or reducer == 'max':
            reducer = unsorted_segment_max_or_zero
        elif reducer == 'logsumexp':
            reducer = segment_logsumexp
        elif reducer == 'softmax':
            reducer = segment_transformer
        elif reducer == 'sum':
            reducer = tf.math.unsorted_segment_sum
        else:
            raise ValueError('Unkown reducer!')

        if latent_size is None:
            latent_size = 16

        if n_layers is None:
            n_layers = 2

        self._num_processing_steps = len(self._proc_hops)
        self._n_stacked = latent_size * self._num_processing_steps

        def make_mlp():
            return snt.nets.MLP([latent_size] * n_layers, activate_final=True)

        # def make_linear():
        #     return snt.nets.MLP([latent_size], activate_final=False)

        self._core = modules.GraphNetwork(
            edge_model_fn=make_mlp,
            node_model_fn=make_mlp,
            global_model_fn=make_mlp,
            edge_block_opt={'use_globals': False},
            node_block_opt={
                'use_globals': False,
                'use_sent_edges': False
            },
            name="graph_net",
            reducer=reducer)

        self._encoder = modules.GraphIndependent(make_mlp,
                                                 make_mlp,
                                                 make_mlp,
                                                 name="encoder")
        self._decoder = modules.GraphIndependent(make_mlp,
                                                 make_mlp,
                                                 make_mlp,
                                                 name="decoder")

        inits = {
            'w': ortho_init(out_init_scale),
            'b': tf.constant_initializer(0.0)
        }

        # Transforms the outputs into the appropriate shapes.
        edge_fn = None if edge_output_size is None else lambda: snt.Linear(
            edge_output_size, initializers=inits, name="edge_output")
        node_fn = None if node_output_size is None else lambda: snt.Linear(
            node_output_size, initializers=inits, name="node_output")
        global_fn = None if global_output_size is None else lambda: snt.Linear(
            global_output_size, initializers=inits, name="global_output")

        with self._enter_variable_scope():
            self._output_transform = modules.GraphIndependent(edge_fn,
                                                              node_fn,
                                                              global_fn,
                                                              name="output")
Esempio n. 27
0
graph_dicts = [
    graph_3_nodes_4_edges, graph_5_nodes_8_edges, graph_7_nodes_13_edges,
    graph_9_nodes_25_edges
]

##########
# Connecting a GraphNetwork recurrently
input_graphs = utils_tf.data_dicts_to_graphs_tuple(graph_dicts)

# graph_network = modules.GraphNetwork(
#     edge_model_fn=lambda: snt.Linear(output_size=EDGE_SIZE),
#     node_model_fn=lambda: snt.Linear(output_size=NODE_SIZE),
#     global_model_fn=lambda: snt.Linear(output_size=GLOBAL_SIZE))

graph_network = modules.GraphNetwork(
    edge_model_fn=snt.Linear(output_size=EDGE_SIZE),
    node_model_fn=snt.Linear(output_size=NODE_SIZE),
    global_model_fn=snt.Linear(output_size=GLOBAL_SIZE))

num_recurrent_passes = 3
previous_graphs = input_graphs
for unused_pass in range(num_recurrent_passes):
    previous_graphs = graph_network(previous_graphs)
    print(previous_graphs.nodes[0])
output_graphs = previous_graphs

tvars = graph_network.trainable_variables
print('')

###############
# broadcast
Esempio n. 28
0
    def __init__(self,
                 num_processing_steps=None,
                 latent_size=None,
                 n_layers=None,
                 edge_output_size=None,
                 node_output_size=None,
                 global_output_size=None,
                 reducer=None,
                 out_init_scale=5.0,
                 name="AggregationNet"):
        super(NonLinearGraphNet, self).__init__(name=name)

        if num_processing_steps is None:
            self._num_processing_steps = 5
        else:
            self._num_processing_steps = num_processing_steps

        if reducer is None or reducer == 'max':
            reducer = unsorted_segment_max_or_zero
        elif reducer == 'mean':
            reducer = tf.math.unsorted_segment_mean
        elif reducer == 'sum':
            reducer = tf.math.unsorted_segment_sum
        else:
            raise ValueError('Unknown reducer!')

        if latent_size is None:
            latent_size = 16

        if n_layers is None:
            n_layers = 2

        def make_mlp():
            return snt.nets.MLP([latent_size] * n_layers, activate_final=False)

        if self._num_processing_steps > 0:
            # Edge model f^e(v_sender, v_receiver, e)     -   in the linear linear model, f^e = v_sender
            # Average over all the received edge features to get e'
            # Node model f^v(v, e'), but in the linear model, it was just f^v = e'
            self._core = modules.GraphNetwork(
                edge_model_fn=make_mlp,
                node_model_fn=make_mlp,
                global_model_fn=make_mlp,
                edge_block_opt={'use_globals': False},
                node_block_opt={
                    'use_globals': False,
                    'use_sent_edges': False
                },
                name="graph_net",
                reducer=reducer)

        self._encoder = modules.GraphIndependent(make_mlp,
                                                 make_mlp,
                                                 make_mlp,
                                                 name="encoder")
        self._decoder = modules.GraphIndependent(make_mlp,
                                                 make_mlp,
                                                 make_mlp,
                                                 name="decoder")

        inits = {
            'w': ortho_init(out_init_scale),
            'b': tf.constant_initializer(0.0)
        }

        # Transforms the outputs into the appropriate shapes.
        edge_fn = None if edge_output_size is None else lambda: snt.Linear(
            edge_output_size, initializers=inits, name="edge_output")
        node_fn = None if node_output_size is None else lambda: snt.Linear(
            node_output_size, initializers=inits, name="node_output")
        global_fn = None if global_output_size is None else lambda: snt.Linear(
            global_output_size, initializers=inits, name="global_output")
        with self._enter_variable_scope():
            self._output_transform = modules.GraphIndependent(edge_fn,
                                                              node_fn,
                                                              global_fn,
                                                              name="output")
Esempio n. 29
0
 def test_higher_rank_outputs(self):
   """Tests that a graph net can be build with higher rank inputs/outputs."""
   input_graph = self._get_shaped_input_graph()
   network = modules.GraphNetwork(*self._get_shaped_model_fns())
   self._assert_build_and_run(network, input_graph)
Esempio n. 30
0
  def test_global_block_options(self,
                                use_edges,
                                use_nodes,
                                use_globals,
                                edges_reducer,
                                nodes_reducer):
    """Test for configuring the NodeBlock options."""
    input_graph = self._get_input_graph()

    if use_edges:
      edges_reducer = edges_reducer or tf.unsorted_segment_sum
    if use_nodes:
      nodes_reducer = nodes_reducer or tf.unsorted_segment_sum

    # Identity edge model.
    edge_model_fn = lambda: tf.identity
    edge_block_opt = {"use_edges": True,
                      "use_receiver_nodes": False,
                      "use_sender_nodes": False,
                      "use_globals": False}
    # Identity node model
    node_model_fn = lambda: tf.identity
    node_block_opt = {"use_received_edges": False,
                      "use_sent_edges": False,
                      "use_nodes": True,
                      "use_globals": False}
    global_model_fn = functools.partial(snt.Linear, output_size=10)
    global_block_opt = {"use_globals": use_globals,
                        "use_nodes": use_nodes,
                        "use_edges": use_edges,
                        "edges_reducer": edges_reducer,
                        "nodes_reducer": nodes_reducer}

    graph_network = modules.GraphNetwork(
        edge_model_fn=edge_model_fn,
        edge_block_opt=edge_block_opt,
        node_model_fn=node_model_fn,
        node_block_opt=node_block_opt,
        global_model_fn=global_model_fn,
        global_block_opt=global_block_opt)

    output_graph = graph_network(input_graph)

    global_block = blocks.GlobalBlock(
        global_model_fn=lambda: graph_network._global_block._global_model,
        use_edges=use_edges,
        use_nodes=use_nodes,
        use_globals=use_globals,
        edges_reducer=edges_reducer,
        nodes_reducer=nodes_reducer)

    expected_output_edge_block = input_graph
    expected_output_node_block = expected_output_edge_block
    expected_output_global_block = global_block(expected_output_node_block)
    expected_edges = expected_output_edge_block.edges
    expected_nodes = expected_output_node_block.nodes
    expected_globals = expected_output_global_block.globals

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      (output_graph_out,
       expected_edges_out, expected_nodes_out, expected_globals_out) = sess.run(
           (output_graph, expected_edges, expected_nodes, expected_globals))

    self._assert_all_none_or_all_close(expected_edges_out,
                                       output_graph_out.edges)
    self._assert_all_none_or_all_close(expected_nodes_out,
                                       output_graph_out.nodes)
    self._assert_all_none_or_all_close(expected_globals_out,
                                       output_graph_out.globals)