Пример #1
0
    def __init__(self, name="DecaySimulator"):
        super(DecaySimulator, self).__init__(name=name)

        self._node_linear = make_mlp_model()
        self._node_rnn = snt.GRU(hidden_size=LATENT_SIZE, name='node_rnn')
        self._node_proper = snt.nets.MLP([4], activate_final=False)

        self._edge_block = blocks.EdgeBlock(edge_model_fn=make_mlp_model,
                                            use_edges=False,
                                            use_receiver_nodes=True,
                                            use_sender_nodes=True,
                                            use_globals=False,
                                            name='edge_encoder_block')
        self._node_encoder_block = blocks.NodeBlock(
            node_model_fn=make_mlp_model,
            use_received_edges=False,
            use_sent_edges=False,
            use_nodes=True,
            use_globals=False,
            name='node_encoder_block')

        self._global_encoder_block = blocks.GlobalBlock(
            global_model_fn=make_mlp_model,
            use_edges=True,
            use_nodes=True,
            use_globals=False,
            nodes_reducer=tf.math.unsorted_segment_sum,
            edges_reducer=tf.math.unsorted_segment_sum,
            name='global_encoder_block')

        self._core = MLPGraphNetwork()

        # self._core = InteractionNetwork(
        #     edge_model_fn=make_mlp_model,
        #     node_model_fn=make_mlp_model,
        #     reducer=tf.math.unsorted_segment_sum
        # )

        # # Transforms the outputs into appropriate shapes.
        node_output_size = 64
        node_fn = lambda: snt.Sequential([
            snt.nets.MLP(
                [node_output_size],
                activation=tf.nn.relu,  # default is relu
                name='node_output')
        ])

        global_output_size = 1
        global_fn = lambda: snt.Sequential([
            snt.nets.MLP(
                [global_output_size],
                activation=tf.nn.relu,  # default is relu
                name='global_output'),
            tf.sigmoid
        ])

        self._output_transform = modules.GraphIndependent(
            edge_model_fn=None,
            node_model_fn=node_fn,
            global_model_fn=global_fn)
Пример #2
0
    def __init__(self, node_model_fn, global_model_fn, name="rwrd_gnn"):
        # aggregator_fn = tf.math.unsorted_segment_sum,
        """Initializes the reward model"""

        super(GraphNeuralNetwork_reward, self).__init__(name=name)

        with self._enter_variable_scope():
            # self._edge_block = blocks.EdgeBlock(
            #     edge_model_fn=edge_model_fn,
            #     use_edges=False,
            #     use_receiver_nodes=True,
            #     use_sender_nodes=True,
            #     use_globals=True,
            #     name='edge_block')

            self._node_block = blocks.NodeBlock(
                node_model_fn=node_model_fn,
                use_received_edges=False,
                use_sent_edges=False,
                use_nodes=True,
                use_globals=True,
                received_edges_reducer=tf.math.unsorted_segment_sum,
                sent_edges_reducer=tf.math.unsorted_segment_sum,
                name="node_block")

            self._global_block = blocks.GlobalBlock(
                global_model_fn=global_model_fn,
                use_edges=False,
                use_nodes=True,
                use_globals=True,
                nodes_reducer=tf.math.unsorted_segment_sum,
                edges_reducer=tf.math.unsorted_segment_sum,
                name="global_block")
Пример #3
0
    def __init__(self, name, edge_model_fn, node_model_fn, global_model_fn):
        """
        description: initializes the model

        :param edge_model_fn: Function passed to the edge block, in this paper, it is an MLP
        :param node_model_fn: Function passed to the node block, in this paper for the task of bAbI, it is an LSTM over timesteps
        :param edge_model_fn: Function passed to the global block, in this paper, it is an MLP
        """

        super(RRN, self).__init__(name=name)

        self._edge_block = blocks.EdgeBlock(edge_model_fn=edge_model_fn,
                                            use_edges=False,
                                            use_receiver_nodes=True,
                                            use_sender_nodes=True,
                                            use_globals=False)

        self._global_block = blocks.GlobalBlock(
            global_model_fn=global_model_fn,
            use_edges=False,
            use_nodes=True,
            use_globals=False,
            nodes_reducer=tf.unsorted_segment_sum)

        self._node_model_fn = node_model_fn
Пример #4
0
    def __init__(self, name="FourTopPredictor"):
        super(FourTopPredictor, self).__init__(name=name)

        self._edge_block = blocks.EdgeBlock(edge_model_fn=make_mlp_model,
                                            use_edges=False,
                                            use_receiver_nodes=True,
                                            use_sender_nodes=True,
                                            use_globals=False,
                                            name='edge_encoder_block')
        self._node_encoder_block = blocks.NodeBlock(
            node_model_fn=make_mlp_model,
            use_received_edges=False,
            use_sent_edges=False,
            use_nodes=True,
            use_globals=False,
            name='node_encoder_block')

        self._global_block = blocks.GlobalBlock(
            global_model_fn=make_mlp_model,
            use_edges=True,
            use_nodes=True,
            use_globals=False,
        )

        self._core = MLPGraphNetwork()

        # Transforms the outputs into appropriate shapes.
        global_output_size = n_target_node_features * n_max_tops
        self._global_nn = snt.nets.MLP(
            [128, 128, global_output_size],
            activation=tf.nn.leaky_relu,  # default is relu, tanh
            dropout_rate=0.30,
            name='global_output')
Пример #5
0
 def test_compatible_higher_rank_no_raise(self):
   """No exception should occur with higher ranks tensors."""
   input_graph = self._get_shaped_input_graph()
   input_graph = input_graph.map(lambda v: tf.transpose(v, [0, 2, 1, 3]))
   network = blocks.GlobalBlock(
       functools.partial(snt.Conv2D, output_channels=10, kernel_shape=[3, 3]))
   self._assert_build_and_run(network, input_graph)
Пример #6
0
  def test_same_as_subblocks(self, reducer):
    """Compares the output to explicit subblocks output.

    Args:
      reducer: The reducer used in the `NodeBlock` and `GlobalBlock`.
    """
    input_graph = self._get_input_graph()

    edge_model_fn = functools.partial(snt.Linear, output_size=5)
    node_model_fn = functools.partial(snt.Linear, output_size=10)
    global_model_fn = functools.partial(snt.Linear, output_size=15)

    graph_network = modules.GraphNetwork(
        edge_model_fn=edge_model_fn,
        node_model_fn=node_model_fn,
        global_model_fn=global_model_fn,
        reducer=reducer)

    output_graph = graph_network(input_graph)

    edge_block = blocks.EdgeBlock(
        edge_model_fn=lambda: graph_network._edge_block._edge_model,
        use_sender_nodes=True,
        use_edges=True,
        use_receiver_nodes=True,
        use_globals=True)
    node_block = blocks.NodeBlock(
        node_model_fn=lambda: graph_network._node_block._node_model,
        use_nodes=True,
        use_sent_edges=False,
        use_received_edges=True,
        use_globals=True,
        received_edges_reducer=reducer)
    global_block = blocks.GlobalBlock(
        global_model_fn=lambda: graph_network._global_block._global_model,
        use_nodes=True,
        use_edges=True,
        use_globals=True,
        edges_reducer=reducer,
        nodes_reducer=reducer)

    expected_output_edge_block = edge_block(input_graph)
    expected_output_node_block = node_block(expected_output_edge_block)
    expected_output_global_block = global_block(expected_output_node_block)
    expected_edges = expected_output_edge_block.edges
    expected_nodes = expected_output_node_block.nodes
    expected_globals = expected_output_global_block.globals

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      (output_graph_out,
       expected_edges_out, expected_nodes_out, expected_globals_out) = sess.run(
           (output_graph, expected_edges, expected_nodes, expected_globals))

    self._assert_all_none_or_all_close(expected_edges_out,
                                       output_graph_out.edges)
    self._assert_all_none_or_all_close(expected_nodes_out,
                                       output_graph_out.nodes)
    self._assert_all_none_or_all_close(expected_globals_out,
                                       output_graph_out.globals)
Пример #7
0
    def __init__(self, name="GlobalClassifierNoEdgeInfo"):
        super(GlobalClassifierNoEdgeInfo, self).__init__(name=name)

        self._edge_block = blocks.EdgeBlock(edge_model_fn=make_mlp_model,
                                            use_edges=False,
                                            use_receiver_nodes=True,
                                            use_sender_nodes=True,
                                            use_globals=False,
                                            name='edge_encoder_block')

        self._node_encoder_block = blocks.NodeBlock(
            node_model_fn=make_mlp_model,
            use_received_edges=False,
            use_sent_edges=False,
            use_nodes=True,
            use_globals=False,
            name='node_encoder_block')

        self._global_block = blocks.GlobalBlock(
            global_model_fn=make_mlp_model,
            use_edges=True,
            use_nodes=True,
            use_globals=False,
        )

        self._core = MLPGraphNetwork()
        # Transforms the outputs into appropriate shapes.
        global_output_size = 1
        global_fn = lambda: snt.Sequential([
            snt.nets.MLP([LATENT_SIZE, global_output_size],
                         name='global_output'), tf.sigmoid
        ])

        self._output_transform = modules.GraphIndependent(
            None, None, global_fn)
Пример #8
0
  def test_output_values(
      self, use_edges, use_nodes, use_globals, edges_reducer, nodes_reducer):
    """Compares the output of a GlobalBlock to an explicit computation."""
    input_graph = self._get_input_graph()
    global_block = blocks.GlobalBlock(
        global_model_fn=self._global_model_fn,
        use_edges=use_edges,
        use_nodes=use_nodes,
        use_globals=use_globals,
        edges_reducer=edges_reducer,
        nodes_reducer=nodes_reducer)
    output_graph = global_block(input_graph)

    model_inputs = []
    if use_edges:
      model_inputs.append(
          blocks.EdgesToGlobalsAggregator(edges_reducer)(input_graph))
    if use_nodes:
      model_inputs.append(
          blocks.NodesToGlobalsAggregator(nodes_reducer)(input_graph))
    if use_globals:
      model_inputs.append(input_graph.globals)

    model_inputs = tf.concat(model_inputs, axis=-1)
    self.assertEqual(input_graph.edges, output_graph.edges)
    self.assertEqual(input_graph.nodes, output_graph.nodes)

    with self.test_session() as sess:
      output_graph_out, model_inputs_out = sess.run(
          (output_graph, model_inputs))

    expected_output_globals = model_inputs_out * self._scale
    self.assertNDArrayNear(
        expected_output_globals, output_graph_out.globals, err=1e-4)
Пример #9
0
    def __init__(self,
                 node_model_fn,
                 global_model_fn,
                 reducer=tf.unsorted_segment_sum,
                 name="deep_sets"):
        """Initializes the DeepSets module.

    Args:
      node_model_fn: A callable to be passed to NodeBlock. The callable must
        return a Sonnet module (or equivalent; see NodeBlock for details). The
        shape of this module's output must equal the shape of the input graph's
        global features, but for the first and last axis.
      global_model_fn: A callable to be passed to GlobalBlock. The callable must
        return a Sonnet module (or equivalent; see GlobalBlock for details).
      reducer: Reduction to be used when aggregating the nodes in the globals.
        This should be a callable whose signature matches
        tf.unsorted_segment_sum.
      name: The module name.
    """
        super(DeepSets, self).__init__(name=name)

        with self._enter_variable_scope():
            self._node_block = blocks.NodeBlock(node_model_fn=node_model_fn,
                                                use_received_edges=False,
                                                use_sent_edges=False,
                                                use_nodes=True,
                                                use_globals=True)
            self._global_block = blocks.GlobalBlock(
                global_model_fn=global_model_fn,
                use_edges=False,
                use_nodes=True,
                use_globals=False,
                nodes_reducer=reducer)
Пример #10
0
  def test_unused_field_can_be_none(
      self, use_edges, use_nodes, use_globals, none_field):
    """Checks that computation can handle non-necessary fields left None."""
    input_graph = self._get_input_graph([none_field])
    global_block = blocks.GlobalBlock(
        global_model_fn=self._global_model_fn,
        use_edges=use_edges,
        use_nodes=use_nodes,
        use_globals=use_globals)
    output_graph = global_block(input_graph)

    model_inputs = []
    if use_edges:
      model_inputs.append(
          blocks.EdgesToGlobalsAggregator(tf.unsorted_segment_sum)(input_graph))
    if use_nodes:
      model_inputs.append(
          blocks.NodesToGlobalsAggregator(tf.unsorted_segment_sum)(input_graph))
    if use_globals:
      model_inputs.append(input_graph.globals)

    model_inputs = tf.concat(model_inputs, axis=-1)
    self.assertEqual(input_graph.edges, output_graph.edges)
    self.assertEqual(input_graph.nodes, output_graph.nodes)

    with self.test_session() as sess:
      actual_globals, model_inputs_out = sess.run(
          (output_graph.globals, model_inputs))

    expected_output_globals = model_inputs_out * self._scale
    self.assertNDArrayNear(expected_output_globals, actual_globals, err=1e-4)
Пример #11
0
    def test_created_variables(self, use_edges, use_nodes, use_globals,
                               expected_first_dim_w):
        """Verifies the variable names and shapes created by a GlobalBlock."""
        output_size = 10
        expected_var_shapes_dict = {
            "global_block/mlp/linear_0/b:0": [output_size],
            "global_block/mlp/linear_0/w:0":
            [expected_first_dim_w, output_size]
        }

        input_graph = self._get_input_graph()

        global_block = blocks.GlobalBlock(global_model_fn=functools.partial(
            snt.nets.MLP, output_sizes=[output_size]),
                                          use_edges=use_edges,
                                          use_nodes=use_nodes,
                                          use_globals=use_globals)

        global_block(input_graph)

        variables = global_block.get_variables()
        var_shapes_dict = {
            var.name: var.get_shape().as_list()
            for var in variables
        }
        self.assertDictEqual(expected_var_shapes_dict, var_shapes_dict)
Пример #12
0
 def test_no_input_raises_exception(self):
     """Checks that receiving no input raises an exception."""
     with self.assertRaisesRegexp(ValueError, "At least one of "):
         blocks.GlobalBlock(global_model_fn=self._global_model_fn,
                            use_edges=False,
                            use_nodes=False,
                            use_globals=False)
Пример #13
0
    def __init__(self,
                 edge_model_fn,
                 global_model_fn,
                 reducer=tf.unsorted_segment_sum,
                 name="relation_network"):
        """Initializes the RelationNetwork module.

    Args:
      edge_model_fn: A callable that will be passed to EdgeBlock to perform
        per-edge computations. The callable must return a Sonnet module (or
        equivalent; see EdgeBlock for details).
      global_model_fn: A callable that will be passed to GlobalBlock to perform
        per-global computations. The callable must return a Sonnet module (or
        equivalent; see GlobalBlock for details).
      reducer: Reducer to be used by GlobalBlock to aggregate edges. Defaults to
        tf.unsorted_segment_sum.
      name: The module name.
    """
        super(RelationNetwork, self).__init__(name=name)

        with self._enter_variable_scope():
            self._edge_block = blocks.EdgeBlock(edge_model_fn=edge_model_fn,
                                                use_edges=False,
                                                use_receiver_nodes=True,
                                                use_sender_nodes=True,
                                                use_globals=False)

            self._global_block = blocks.GlobalBlock(
                global_model_fn=global_model_fn,
                use_edges=True,
                use_nodes=False,
                use_globals=False,
                edges_reducer=reducer)
Пример #14
0
    def __init__(self,
                 attention_node_projection_model,
                 attention_edge_projection_model,
                 query_key_product_model,
                 node_model_fn,
                 edge_model_fn,
                 global_model_fn,
                 num_heads,
                 key_size,
                 value_size,
                 edge_block_opt=None,
                 global_block_opt=None,
                 name="GAT"):
        """
      Args:
        attention_node_projection_model: Model used for projection to get
          query, key and values
          Final layer dim should be key_size * num_heads
        attention_edge_projection_model: Model used for projection to get
          query, key and values
          Final layer dim should be (key_size + value_size) * num_heads
        query_key_product_model: Model used to find "dot product" between
          queries and keys.
          Final layer dim should be 1.
        node_model_fn: Model applied to node embeddings finally.
        edge_model_fn: Model applied to node embeddings finally.
        num_heads: Number of attention heads
        key_size: Key dimension
        value_size: value dimension
        edge_block_opt: Additional options to be passed to the EdgeBlock. Can
        contain keys `use_edges`, `use_receiver_nodes`, `use_sender_nodes`,
        `use_globals`. By default, these are all True.
        global_block_opt: Additional options to be passed to the GlobalBlock. Can
          contain the keys `use_edges`, `use_nodes`, `use_globals` (all set to
          True by default), and `edges_reducer`, `nodes_reducer` (defaults to
          `reducer`).
        name: The module name.
    """
        super().__init__(name=name)
        self._attention_node_projection_model = attention_node_projection_model
        self._attention_edge_projection_model = attention_edge_projection_model
        self._query_key_product_model = query_key_product_model
        self.num_heads = num_heads
        self.key_size = key_size
        self.value_size = value_size

        edge_block_opt = _make_default_edge_block_opt(edge_block_opt)
        global_block_opt = _make_default_global_block_opt(
            global_block_opt, tf.unsorted_segment_sum)
        # does not make sense without using sender nodes.
        assert edge_block_opt['use_sender_nodes']
        with self._enter_variable_scope():
            self._node_model = node_model_fn()
            self._edge_block = blocks.EdgeBlock(edge_model_fn=edge_model_fn,
                                                **edge_block_opt)
            self._global_block = blocks.GlobalBlock(
                global_model_fn=global_model_fn, **global_block_opt)
Пример #15
0
def tinet(input_graph):
    embedding = blocks.NodeBlock(
        node_model_fn=lambda: tf.keras.layers.Embedding(800, 32),
        use_received_edges=False,
        use_sent_edges=False,
        use_nodes=True,
        use_globals=False)
    graph_network_layer1 = blocks.NodeBlock(
        # edge_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        node_model_fn=lambda: tf.layers.Dense(32, activation=tf.nn.relu))
    # global_model_fn = lambda: tf.layers.Dense(8, activation=tf.nn.relu))
    graph_network_layer2 = blocks.NodeBlock(
        # edge_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        node_model_fn=lambda: tf.layers.Dense(32, activation=tf.nn.relu))
    # global_model_fn = lambda: tf.layers.Dense(8, activation=tf.nn.relu))
    graph_network_layer3 = blocks.NodeBlock(
        # edge_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        node_model_fn=lambda: tf.layers.Dense(32, activation=tf.nn.relu))
    # global_model_fn = lambda: tf.layers.Dense(8, activation=tf.nn.relu))
    graph_network_layer4 = blocks.NodeBlock(
        # edge_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        node_model_fn=lambda: tf.layers.Dense(32, activation=tf.nn.relu))
    # global_model_fn = lambda: tf.layers.Dense(8, activation=tf.nn.relu))
    graph_network_layer5 = blocks.GlobalBlock(
        # edge_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        # node_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        global_model_fn=lambda: tf.layers.Dense(40, activation=tf.nn.relu))
    graph_network_layer6 = blocks.GlobalBlock(
        # edge_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        # node_model_fn = lambda: tf.layers.Dense(16, activation=tf.nn.relu),
        global_model_fn=lambda: tf.layers.Dense(40, activation=tf.nn.relu))

    h0 = embedding(input_graph)
    h1 = graph_network_layer1(h0)
    h2 = graph_network_layer2(h1)
    h3 = graph_network_layer3(h2)
    h4 = graph_network_layer4(h3)
    h5 = graph_network_layer5(h4)
    h6 = graph_network_layer6(h5)

    out = h6.globals

    return tf.layers.dense(out, 4, activation=None)
Пример #16
0
 def test_missing_field_raises_exception(
     self, use_edges, use_nodes, use_globals, none_field):
   """Checks that missing a required field raises an exception."""
   input_graph = self._get_input_graph([none_field])
   global_block = blocks.GlobalBlock(
       global_model_fn=self._global_model_fn,
       use_edges=use_edges,
       use_nodes=use_nodes,
       use_globals=use_globals)
   with self.assertRaisesRegexp(ValueError, "field cannot be None"):
     global_block(input_graph)
Пример #17
0
 def test_missing_aggregation_raises_exception(self, use_edges, use_nodes,
                                               edges_reducer,
                                               nodes_reducer):
     """Checks that missing a required aggregation argument raises an error."""
     with self.assertRaisesRegexp(ValueError, "should not be None"):
         blocks.GlobalBlock(global_model_fn=self._global_model_fn,
                            use_edges=use_edges,
                            use_nodes=use_nodes,
                            use_globals=False,
                            edges_reducer=edges_reducer,
                            nodes_reducer=nodes_reducer)
Пример #18
0
  def test_optional_arguments(self, scale, offset):
    """Assesses the correctness of the GlobalBlock using arguments."""
    input_graph = self._get_input_graph()
    global_block = blocks.GlobalBlock(
        global_model_fn=self._global_model_args_fn)
    output_graph_out = global_block(
        input_graph, global_model_kwargs=dict(scale=scale, offset=offset))

    fixed_scale = scale
    fixed_offset = offset
    model_fn = lambda: lambda features: features * fixed_scale + fixed_offset
    hardcoded_global_block = blocks.GlobalBlock(global_model_fn=model_fn)
    expected_graph_out = hardcoded_global_block(input_graph)

    self.assertIs(expected_graph_out.edges, output_graph_out.edges)
    self.assertIs(expected_graph_out.nodes, output_graph_out.nodes)
    self.assertNDArrayNear(
        expected_graph_out.globals.numpy(),
        output_graph_out.globals.numpy(),
        err=1e-4)
Пример #19
0
    def __init__(self,
                 edge_model_fn,
                 node_model_fn,
                 global_model_fn,
                 reducer=tf.math.unsorted_segment_sum,
                 edge_block_opt=None,
                 node_block_opt=None,
                 global_block_opt=None,
                 name="graph_network"):
        """Initializes the GraphNetwork module.

    Args:
      edge_model_fn: A callable that will be passed to EdgeBlock to perform
        per-edge computations. The callable must return a Sonnet module (or
        equivalent; see EdgeBlock for details).
      node_model_fn: A callable that will be passed to NodeBlock to perform
        per-node computations. The callable must return a Sonnet module (or
        equivalent; see NodeBlock for details).
      global_model_fn: A callable that will be passed to GlobalBlock to perform
        per-global computations. The callable must return a Sonnet module (or
        equivalent; see GlobalBlock for details).
      reducer: Reducer to be used by NodeBlock and GlobalBlock to aggregate
        nodes and edges. Defaults to tf.unsorted_segment_sum. This will be
        overridden by the reducers specified in `node_block_opt` and
        `global_block_opt`, if any.
      edge_block_opt: Additional options to be passed to the EdgeBlock. Can
        contain keys `use_edges`, `use_receiver_nodes`, `use_sender_nodes`,
        `use_globals`. By default, these are all True.
      node_block_opt: Additional options to be passed to the NodeBlock. Can
        contain the keys `use_received_edges`, `use_sent_edges`, `use_nodes`,
        `use_globals` (all set to True by default), and
        `received_edges_reducer`, `sent_edges_reducer` (default to `reducer`).
      global_block_opt: Additional options to be passed to the GlobalBlock. Can
        contain the keys `use_edges`, `use_nodes`, `use_globals` (all set to
        True by default), and `edges_reducer`, `nodes_reducer` (defaults to
        `reducer`).
      name: The module name.
    """
        super(GraphNetwork, self).__init__(name=name)
        edge_block_opt = _make_default_edge_block_opt(edge_block_opt)
        node_block_opt = _make_default_node_block_opt(node_block_opt, reducer)
        global_block_opt = _make_default_global_block_opt(
            global_block_opt, reducer)

        #with self._enter_variable_scope():
        if 2 > 1:
            self._edge_block = blocks.EdgeBlock(edge_model_fn=edge_model_fn,
                                                **edge_block_opt)
            self._node_block = blocks.NodeBlock(node_model_fn=node_model_fn,
                                                **node_block_opt)
            self._global_block = blocks.GlobalBlock(
                global_model_fn=global_model_fn, **global_block_opt)
Пример #20
0
 def test_incompatible_higher_rank_inputs_no_raise(self, use_edges,
                                                   use_nodes, use_globals,
                                                   field):
     """No exception should occur if a differently shapped field is not used."""
     input_graph = self._get_shaped_input_graph()
     input_graph = input_graph.replace(
         **{field: tf.transpose(getattr(input_graph, field), [0, 2, 1, 3])})
     network = blocks.GlobalBlock(functools.partial(snt.Conv2D,
                                                    output_channels=10,
                                                    kernel_shape=[3, 3]),
                                  use_edges=use_edges,
                                  use_nodes=use_nodes,
                                  use_globals=use_globals)
     self._assert_build_and_run(network, input_graph)
Пример #21
0
 def test_incompatible_higher_rank_inputs_raises(self, use_edges, use_nodes,
                                                 use_globals, field):
     """A exception should be raised if the inputs have incompatible shapes."""
     input_graph = self._get_shaped_input_graph()
     input_graph = input_graph.replace(
         **{field: tf.transpose(getattr(input_graph, field), [0, 2, 1, 3])})
     network = blocks.GlobalBlock(functools.partial(snt.Conv2D,
                                                    output_channels=10,
                                                    kernel_shape=[3, 3]),
                                  use_edges=use_edges,
                                  use_nodes=use_nodes,
                                  use_globals=use_globals)
     with self.assertRaisesRegexp(ValueError,
                                  "in both shapes must be equal"):
         network(input_graph)
Пример #22
0
  def test_same_as_subblocks(self, reducer, none_fields):
    """Compares the output to explicit subblocks output.

    Args:
      reducer: The reducer used in the NodeBlock.
      none_fields: (list of strings) The corresponding fields are removed from
        the input graph.
    """
    input_graph = self._get_input_graph()
    input_graph = input_graph.map(lambda _: None, none_fields)

    deep_sets = self._get_model(reducer)

    output_graph = deep_sets(input_graph)
    output_nodes = output_graph.nodes
    output_globals = output_graph.globals

    node_block = blocks.NodeBlock(
        node_model_fn=lambda: deep_sets._node_block._node_model,
        use_received_edges=False,
        use_sent_edges=False,
        use_nodes=True,
        use_globals=True)
    global_block = blocks.GlobalBlock(
        global_model_fn=lambda: deep_sets._global_block._global_model,
        use_edges=False,
        use_nodes=True,
        use_globals=False,
        nodes_reducer=reducer)

    node_block_out = node_block(input_graph)
    expected_nodes = node_block_out.nodes
    expected_globals = global_block(node_block_out).globals

    self.assertAllEqual(input_graph.edges, output_graph.edges)
    self.assertAllEqual(input_graph.receivers, output_graph.receivers)
    self.assertAllEqual(input_graph.senders, output_graph.senders)

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      (output_nodes_, output_globals_, expected_nodes_,
       expected_globals_) = sess.run(
           [output_nodes, output_globals, expected_nodes, expected_globals])

    self._assert_all_none_or_all_close(expected_nodes_, output_nodes_)
    self._assert_all_none_or_all_close(expected_globals_, output_globals_)
    def __init__(self, model_id, name="MLPGraphNetwork"):
        super(MLPGraphNetwork, self).__init__(name=name)

        node_block_options = {
            "use_received_edges": False,
            "use_sent_edges": False,
            "use_nodes": True,
            "use_globals": True,
        }

        global_block_options = {
            "use_edges": False,
            "use_nodes": True,
            "use_globals": True,
        }

        node_block_opt = modules._make_default_node_block_opt(
            node_block_options, tf.unsorted_segment_sum)
        global_block_opt = modules._make_default_global_block_opt(
            global_block_options, tf.unsorted_segment_sum)

        with self._enter_variable_scope():
            node_model_fn = lambda: get_model_from_config(
                model_id, model_type="mlp"
            )(n_neurons=EncodeProcessDecode_v4_172_no_edges.n_neurons_nodes,
              n_layers=EncodeProcessDecode_v4_172_no_edges.n_layers_nodes,
              output_size=EncodeProcessDecode_v4_172_no_edges.
              n_neurons_nodes_total_dim,
              typ="mlp_transform",
              activation_final=False,
              name="mlp_core_node")

            global_model_fn = lambda: get_model_from_config(
                model_id, model_type="mlp"
            )(n_neurons=EncodeProcessDecode_v4_172_no_edges.n_neurons_globals,
              n_layers=EncodeProcessDecode_v4_172_no_edges.n_layers_globals,
              output_size=None,
              typ="mlp_layer_norm",
              name="mlp_core_global")

            self._node_block = blocks.NodeBlock(node_model_fn=node_model_fn,
                                                **node_block_opt)
            self._global_block = blocks.GlobalBlock(
                global_model_fn=global_model_fn, **global_block_opt)
Пример #24
0
    def __init__(self,
                 num_components,
                 reducer=tf.math.unsorted_segment_mean,
                 properties_size=11,
                 name=None):
        super(DecoderNetwork, self).__init__(name=name)

        self.num_components = num_components
        self.components_dim = properties_size * 10

        edge_enc_size = 32

        node_model_fn = lambda: snt.nets.MLP([self.components_dim],
                                             activate_final=True,
                                             activation=tf.nn.sigmoid)
        edge_model_fn = lambda: snt.nets.MLP(
            [edge_enc_size], activate_final=True, activation=tf.nn.sigmoid)
        global_model_fn = lambda: snt.nets.MLP([self.components_dim],
                                               activate_final=True,
                                               activation=tf.nn.sigmoid)

        self.node_block = blocks.NodeBlock(node_model_fn,
                                           use_received_edges=False,
                                           use_sent_edges=False,
                                           use_nodes=True,
                                           use_globals=False)

        self.edge_block = blocks.EdgeBlock(edge_model_fn,
                                           use_edges=False,
                                           use_receiver_nodes=True,
                                           use_sender_nodes=True,
                                           use_globals=False)

        self.global_block = blocks.GlobalBlock(global_model_fn,
                                               use_edges=True,
                                               use_nodes=False,
                                               use_globals=False,
                                               edges_reducer=reducer)
Пример #25
0
  def test_same_as_subblocks(self, reducer, none_field=None):
    """Compares the output to explicit subblocks output.

    Args:
      reducer: The reducer used in the `GlobalBlock`.
      none_field: (string, default=None) If not None, the corresponding field
        is removed from the input graph.
    """
    input_graph = self._get_input_graph(none_field)
    relation_network = self._get_model(reducer)
    output_graph = relation_network(input_graph)

    edge_block = blocks.EdgeBlock(
        edge_model_fn=lambda: relation_network._edge_block._edge_model,
        use_edges=False,
        use_receiver_nodes=True,
        use_sender_nodes=True,
        use_globals=False)
    global_block = blocks.GlobalBlock(
        global_model_fn=lambda: relation_network._global_block._global_model,
        use_edges=True,
        use_nodes=False,
        use_globals=False,
        edges_reducer=reducer,
        nodes_reducer=reducer)

    expected_output_edge_block = edge_block(input_graph)
    expected_output_global_block = global_block(expected_output_edge_block)

    self.assertEqual(input_graph.edges, output_graph.edges)
    self.assertEqual(input_graph.nodes, output_graph.nodes)

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      (actual_globals_out, expected_globals_out) = sess.run(
          (output_graph.globals, expected_output_global_block.globals))

    self._assert_all_none_or_all_close(expected_globals_out, actual_globals_out)
Пример #26
0
  def test_global_block_options(self,
                                use_edges,
                                use_nodes,
                                use_globals,
                                edges_reducer,
                                nodes_reducer):
    """Test for configuring the NodeBlock options."""
    input_graph = self._get_input_graph()

    if use_edges:
      edges_reducer = edges_reducer or tf.unsorted_segment_sum
    if use_nodes:
      nodes_reducer = nodes_reducer or tf.unsorted_segment_sum

    # Identity edge model.
    edge_model_fn = lambda: tf.identity
    edge_block_opt = {"use_edges": True,
                      "use_receiver_nodes": False,
                      "use_sender_nodes": False,
                      "use_globals": False}
    # Identity node model
    node_model_fn = lambda: tf.identity
    node_block_opt = {"use_received_edges": False,
                      "use_sent_edges": False,
                      "use_nodes": True,
                      "use_globals": False}
    global_model_fn = functools.partial(snt.Linear, output_size=10)
    global_block_opt = {"use_globals": use_globals,
                        "use_nodes": use_nodes,
                        "use_edges": use_edges,
                        "edges_reducer": edges_reducer,
                        "nodes_reducer": nodes_reducer}

    graph_network = modules.GraphNetwork(
        edge_model_fn=edge_model_fn,
        edge_block_opt=edge_block_opt,
        node_model_fn=node_model_fn,
        node_block_opt=node_block_opt,
        global_model_fn=global_model_fn,
        global_block_opt=global_block_opt)

    output_graph = graph_network(input_graph)

    global_block = blocks.GlobalBlock(
        global_model_fn=lambda: graph_network._global_block._global_model,
        use_edges=use_edges,
        use_nodes=use_nodes,
        use_globals=use_globals,
        edges_reducer=edges_reducer,
        nodes_reducer=nodes_reducer)

    expected_output_edge_block = input_graph
    expected_output_node_block = expected_output_edge_block
    expected_output_global_block = global_block(expected_output_node_block)
    expected_edges = expected_output_edge_block.edges
    expected_nodes = expected_output_node_block.nodes
    expected_globals = expected_output_global_block.globals

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      (output_graph_out,
       expected_edges_out, expected_nodes_out, expected_globals_out) = sess.run(
           (output_graph, expected_edges, expected_nodes, expected_globals))

    self._assert_all_none_or_all_close(expected_edges_out,
                                       output_graph_out.edges)
    self._assert_all_none_or_all_close(expected_nodes_out,
                                       output_graph_out.nodes)
    self._assert_all_none_or_all_close(expected_globals_out,
                                       output_graph_out.globals)
Пример #27
0
    def __init__(self,
                 num_output: int,
                 output_size: int,
                 node_size: int = 4,
                 edge_size: int = 4,
                 starting_global_size: int = 10,
                 inter_graph_connect_prob: float = 0.01,
                 crossing_steps: int = 4,
                 reducer=tf.math.unsorted_segment_mean,
                 properties_size=10,
                 name=None):
        super(GraphMappingNetwork, self).__init__(name=name)
        self.num_output = num_output
        self.output_size = output_size
        self.crossing_steps = crossing_steps
        self.empty_node_variable = tf.Variable(initial_value=tf.random.truncated_normal((node_size,)),
                                               name='empty_token_node')

        # values for different kinds of edges in the graph, which will be learned
        self.intra_graph_edge_variable = tf.Variable(initial_value=tf.random.truncated_normal((edge_size,)),
                                                     name='intra_graph_edge_var')
        self.intra_token_graph_edge_variable = tf.Variable(initial_value=tf.random.truncated_normal((edge_size,)),
                                                           name='intra_token_graph_edge_var')
        self.inter_graph_edge_variable = tf.Variable(initial_value=tf.random.truncated_normal((edge_size,)),
                                                     name='inter_graph_edge_var')
        self.starting_global_variable = tf.Variable(initial_value=tf.random.truncated_normal((starting_global_size,)),
                                                    name='starting_global_var')

        self.inter_graph_connect_prob = inter_graph_connect_prob

        self.projection_node_block = blocks.NodeBlock(lambda: snt.Linear(node_size, name='project'),
                                                      use_received_edges=False,
                                                      use_sent_edges=False,
                                                      use_nodes=True,
                                                      use_globals=False)

        node_model_fn = lambda: snt.nets.MLP([node_size, node_size], activate_final=True, activation=tf.nn.leaky_relu)
        edge_model_fn = lambda: snt.nets.MLP([edge_size, edge_size], activate_final=True, activation=tf.nn.leaky_relu)
        global_model_fn = lambda: snt.nets.MLP([starting_global_size, starting_global_size], activate_final=True,
                                               activation=tf.nn.leaky_relu)

        self.edge_block = blocks.EdgeBlock(edge_model_fn,
                                           use_edges=True,
                                           use_receiver_nodes=True,
                                           use_sender_nodes=True,
                                           use_globals=True)

        self.node_block = blocks.NodeBlock(node_model_fn,
                                           use_received_edges=True,
                                           use_sent_edges=True,
                                           use_nodes=True,
                                           use_globals=True,
                                           received_edges_reducer=reducer,
                                           sent_edges_reducer=reducer)

        self.global_block = blocks.GlobalBlock(global_model_fn,
                                               use_edges=True,
                                               use_nodes=True,
                                               use_globals=True,
                                               edges_reducer=reducer,
                                               nodes_reducer=reducer)

        self.output_projection_node_block = blocks.NodeBlock(lambda: snt.Linear(self.output_size, name='project'),
                                                             use_received_edges=False,
                                                             use_sent_edges=False,
                                                             use_nodes=True,
                                                             use_globals=False)
Пример #28
0
    def __init__(self, scope: str, model: GraphModel, reg_param):
        # Process parameters
        self.scope = scope
        self.model = model
        self.n_out = self.model.get_global_output_size()

        # Configure regularization
        self.regularizer = tf.contrib.layers.l2_regularizer(scale=reg_param)
        self.reg_linear = {"w": self.regularizer, "b": self.regularizer}
        #        self.reg_embed = {}
        self.reg_embed = {"embeddings": self.regularizer}

        # Set up input tensors
        with tf.variable_scope(self.scope + "/state_input"):
            self.input_graphs = utils_tf.placeholders_from_data_dicts(
                [self.model.placeholder_graph()],
                force_dynamic_num_graphs=True,
                name="local_state")

        with tf.variable_scope(self.scope + "/ground_truth"):
            # Reinforcement learning inputs
            self.true_action = tf.placeholder(tf.int32,
                                              shape=(None, ),
                                              name="action")
            self.n_objects = tf.placeholder(tf.int32,
                                            shape=(None, ),
                                            name="n_objects")
            self.target_q = tf.placeholder(tf.float32,
                                           shape=(None, ),
                                           name="target_q")
            self.target_value = tf.placeholder(tf.float32,
                                               shape=(None, ),
                                               name="target_value")

        with tf.variable_scope(self.scope):
            # Separately embed different categorical variables as dense vectors
            self.encoder_module = GraphEncoder(model,
                                               name="encoder",
                                               regularizers=self.reg_embed)
            self.embedded_graphs = self.encoder_module(self.input_graphs)

            # Apply an intermediate  transformation to pass information between neighboring nodes
            self.intermediate_graphs = DenseGraphTransform(
                model.hidden_edge_dimension,
                model.hidden_node_dimension,
                model.hidden_global_dimension,
                name="intermediate",
                regularizer=self.regularizer)(self.embedded_graphs)

            # Then apply a final transformation to produce a global output and node-level evaluations
            self.output_graphs = DenseGraphTransform(
                model.action_dimension,
                1,
                1,
                node_activation=None,
                global_activation=None,
                name="output",
                regularizer=self.regularizer)(self.intermediate_graphs)

        with tf.variable_scope(self.scope + "/outputs"):
            # If given a true action, get the corresponding output
            self.graph_indices = tf.math.cumsum(self.output_graphs.n_node,
                                                exclusive=True,
                                                name="starting_node_index")
            self.true_indices = self.graph_indices + self.true_action
            self.chosen_node_outputs = tf.reshape(
                tf.gather(self.output_graphs.nodes,
                          self.true_indices,
                          name="chosen_action_outputs"), [-1])

            # In case we need a policy output, build the following tensors:
            # 1) a learned stochastic policy for all possible actions,
            # 2) the individual probability of the chosen action
            # 3) the log of that individual probability."""
            # First, get each node's index
            node_indices = tf.range(tf.shape(self.output_graphs.nodes)[0])
            # Then, get the index of each graphs' first action
            first_action_indices = self.graph_indices + self.n_objects
            # broadcast action indices to nodes and compare to node indices
            first_action_broadcast = blocks.broadcast_globals_to_nodes(
                self.output_graphs.replace(
                    globals=tf.reshape(first_action_indices, [-1, 1])))
            action_mask = tf.greater_equal(
                node_indices, tf.reshape(first_action_broadcast, [-1]))
            # Zero out the objects and apply softmax to the actions (treat action-nodes as logits)
            exp_or_zero = self.output_graphs.replace(nodes=tf.where(
                action_mask, tf.math.exp(self.output_graphs.nodes),
                tf.zeros_like(self.output_graphs.nodes)))
            # Sum the node values so that the global for each graph is the softmax denominator
            sum_nodes = blocks.GlobalBlock(lambda: tf.identity,
                                           use_edges=False,
                                           use_globals=False)
            softmax_graph = sum_nodes(exp_or_zero)

            # Then divide each node's value by that denominator, or set to 1 where denominator is 0
            def node_value_to_prob(node_inputs):
                p = tf.div_no_nan(node_inputs[:, 0], node_inputs[:, 1])
                return tf.where(p > 0, p, tf.ones_like(p))

            policy_graph = blocks.NodeBlock(
                lambda: node_value_to_prob,
                use_received_edges=False,
                use_sent_edges=False)(softmax_graph)
            self.policy = policy_graph.nodes
            self.p_chosen = tf.gather(self.policy,
                                      self.true_indices,
                                      name="p_true_action")
            self.log_p_chosen = tf.log(self.p_chosen, name="logp_true_action")

        # Configure metrics for training and display
        self.TRAIN_METRIC_OPS = self.scope + "/TRAIN_METRIC_OPS"
        self.VAL_METRIC_OPS = self.scope + "/VAL_METRIC_OPS"
        self.reg_term = tf.reduce_sum(
            tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
        tf.summary.scalar('reg_loss', self.reg_term)