Ejemplo n.º 1
0
    def call(self, inputs, training: bool, seq_enc_output = None):
        # Pack input data from keys back into a tuple:
        adjacency_lists: Tuple[tf.Tensor, ...] = tuple(
            inputs[f"adjacency_list_{edge_type_idx}"]
            for edge_type_idx in range(self._num_edge_types)
        )

        # Start the model computations:
        initial_node_features = self.compute_initial_node_features(inputs, training)
        if tf.is_tensor(seq_enc_output):
            node_features = tf.split(initial_node_features, inputs["graph_to_num_nodes"])
            n_tokens = seq_enc_output.shape[1]
            node_features = [
                tf.concat([source_features[1:source_len+1],  graph_features[min(source_len, n_tokens-1):, :]
                ], 0) for graph_features, source_features, source_len in zip(node_features, seq_enc_output, inputs["source_len"])]
            initial_node_features = tf.concat(node_features, 0)

        gnn_input = GNNInput(
            node_features=initial_node_features,
            adjacency_lists=adjacency_lists,
            node_to_graph_map=inputs["node_to_graph_map"],
            num_graphs=inputs["num_graphs_in_batch"],
        )
        final_node_representations = self._gnn(gnn_input, training)
        return self.compute_task_output(inputs, final_node_representations, training), final_node_representations
Ejemplo n.º 2
0
    def call(self, inputs, training: bool = False):
        node_labels_embedded = self._embedding_layer(inputs["node_features"],
                                                     training=training)

        adjacency_lists: Tuple[tf.Tensor, ...] = tuple(
            inputs[f"adjacency_list_{edge_type_idx}"]
            for edge_type_idx in range(self._num_edge_types))
        #before feed into gnn
        # print("node_features",inputs["node_features"])
        # print("node_features len",len(set(np.array(inputs["node_features"]))))
        # print("arguments",inputs["node_argument"])
        # print("node_to_graph_map",inputs['node_to_graph_map'])
        # print("num_graphs_in_batch",inputs['num_graphs_in_batch'])
        # print("adjacency_lists",adjacency_lists)

        # call gnn and get graph representation
        gnn_input = GNNInput(node_features=node_labels_embedded,
                             num_graphs=inputs['num_graphs_in_batch'],
                             node_to_graph_map=inputs['node_to_graph_map'],
                             adjacency_lists=adjacency_lists)
        final_node_representations = self._gnn(gnn_input, training=training)
        argument_representations = tf.gather(
            params=final_node_representations * 1,
            indices=inputs["node_argument"])
        #print("argument_representations",argument_representations)
        return self.compute_task_output(inputs, argument_representations,
                                        training)
Ejemplo n.º 3
0
    def build(self, input_shapes: Dict[str, Any]):
        graph_params = {
            name[4:]: value
            for name, value in self._params.items() if name.startswith("gnn_")
        }
        self._gnn = GNN(graph_params)
        self._gnn.build(
            GNNInput(
                node_features=self.get_initial_node_feature_shape(
                    input_shapes),
                adjacency_lists=tuple(
                    input_shapes[f"adjacency_list_{edge_type_idx}"]
                    for edge_type_idx in range(self._num_edge_types)),
                node_to_graph_map=tf.TensorShape((None, )),
                num_graphs=tf.TensorShape(()),
            ))

        super().build([])

        if not self._disable_tf_function_build:
            setattr(
                self,
                "_fast_run_step",
                tf.function(input_signature=(
                    self._batch_feature_spec,
                    self._batch_label_spec,
                    tf.TensorSpec(shape=(), dtype=tf.bool),
                ))(self._fast_run_step),
            )
Ejemplo n.º 4
0
    def build(self, input_shapes):
        # print("--build--")
        # build node embedding layer
        with tf.name_scope("Node_embedding_layer"):
            self._embedding_layer.build(tf.TensorShape((None, )))
        # build gnn layers
        self._gnn.build(
            GNNInput(
                node_features=tf.TensorShape(
                    (None, self._params["node_label_embedding_size"])),
                adjacency_lists=tuple(
                    input_shapes[f"adjacency_list_{edge_type_idx}"]
                    for edge_type_idx in range(self._num_edge_types)),
                node_to_graph_map=tf.TensorShape((None, )),
                num_graphs=tf.TensorShape(()),
            ))
        #build task-specific layer

        with tf.name_scope("Argument_repr_to_regression_layer"):
            self._argument_repr_to_regression_layer.build(
                tf.TensorShape(
                    (None,
                     self._params["hidden_dim"])))  #decide layer input shape
        with tf.name_scope("regression_layer_1"):
            self._regression_layer_1.build(
                tf.TensorShape(
                    (None, self._params["regression_hidden_layer_size"][0])))
        with tf.name_scope("Argument_regression_layer"):
            self._argument_output_layer.build(
                tf.TensorShape(
                    (None, self._params["regression_hidden_layer_size"][1]
                     ))  #decide layer input shape
            )
        super().build_horn_graph_gnn(
        )  #by pass graph_task_mode (GraphTaskModel)' build because it will build another gnn layer
Ejemplo n.º 5
0
    def call(self, inputs, training: bool = False):
        node_labels_embedded = self._embedding_layer(inputs["node_features"],
                                                     training=training)

        adjacency_lists: Tuple[tf.Tensor, ...] = tuple(
            inputs[f"adjacency_list_{edge_type_idx}"]
            for edge_type_idx in range(self._num_edge_types))

        # call gnn and get graph representation
        gnn_input = GNNInput(node_features=node_labels_embedded,
                             num_graphs=inputs['num_graphs_in_batch'],
                             node_to_graph_map=inputs['node_to_graph_map'],
                             adjacency_lists=adjacency_lists)
        final_node_representations = self._gnn(gnn_input, training=training)
        if self._params["label_type"] == "argument_identify":
            return self.compute_task_output(inputs, final_node_representations,
                                            training)
        elif self._params["label_type"] == "control_location_identify":
            return self.compute_task_output(inputs, final_node_representations,
                                            training)
        elif self._params["label_type"] == "argument_identify_no_batchs":
            current_node_representations = tf.gather(
                params=final_node_representations * 1,
                indices=inputs["current_node_index"])
            return self.compute_task_output(inputs,
                                            current_node_representations,
                                            training)
Ejemplo n.º 6
0
    def call(self, inputs,inputs2, inputs3,training: bool):
        # Pack input data from keys back into a tuple:
        adjacency_lists: Tuple[tf.Tensor, ...] = tuple(
            inputs[f"adjacency_list_{edge_type_idx}"]
            for edge_type_idx in range(self._num_edge_types)
        )

        # Start the model computations:
        initial_node_features = self.compute_initial_node_features(inputs, training)
        gnn_input = GNNInput(
            node_features=initial_node_features,
            adjacency_lists=adjacency_lists,
            node_to_graph_map=inputs["node_to_graph_map"],
            num_graphs=inputs["num_graphs_in_batch"],
        )

        gnn_output_1 = self._gnn(
            gnn_input,
            training=training,
            return_all_representations=self._use_intermediate_gnn_results
        )
        ####################second
        adjacency_lists: Tuple[tf.Tensor, ...] = tuple(
            inputs2[f"adjacency_list_{edge_type_idx}"]
            for edge_type_idx in range(self._num_edge_types)
        )

        # Start the model computations:
        initial_node_features = self.compute_initial_node_features(inputs2, training)
        gnn_input = GNNInput(
            node_features=initial_node_features,
            adjacency_lists=adjacency_lists,
            node_to_graph_map=inputs2["node_to_graph_map"],
            num_graphs=inputs2["num_graphs_in_batch"],
        )

        gnn_output_2 = self._gnn(
            gnn_input,
            training=training,
            return_all_representations=self._use_intermediate_gnn_results
        )
        return self.compute_task_output_new(inputs, gnn_output_1,inputs2,gnn_output_2, inputs3,training)
Ejemplo n.º 7
0
 def build(self, input_shapes: Dict[str, Any]):
     graph_params = {
         name[4:]: value
         for name, value in self._params.items() if name.startswith("gnn_")
     }
     self._gnn = GNN(graph_params)
     self._gnn.build(
         GNNInput(
             node_features=self.get_initial_node_feature_shape(
                 input_shapes),
             adjacency_lists=tuple(
                 input_shapes[f"adjacency_list_{edge_type_idx}"]
                 for edge_type_idx in range(self._num_edge_types)),
             node_to_graph_map=tf.TensorShape((None, )),
             num_graphs=tf.TensorShape(()),
         ))
     super().build([])
Ejemplo n.º 8
0
    def call(self, inputs, training: bool):
        # Pack input data from keys back into a tuple:
        adjacency_lists: Tuple[tf.Tensor, ...] = tuple(
            inputs[f"adjacency_list_{edge_type_idx}"]
            for edge_type_idx in range(self._num_edge_types))

        # Start the model computations:
        initial_node_features = self.compute_initial_node_features(
            inputs, training)
        gnn_input = GNNInput(
            node_features=initial_node_features,
            adjacency_lists=adjacency_lists,
            node_to_graph_map=inputs["node_to_graph_map"],
            num_graphs=inputs["num_graphs_in_batch"],
        )
        final_node_representations = self._gnn(gnn_input, training)
        return self.compute_task_output(inputs, final_node_representations,
                                        training)
Ejemplo n.º 9
0
    def build(self, input_shapes: Dict[str, Any]):
        graph_params = {
            name[4:]: value for name, value in self._params.items() if name.startswith("gnn_")
        }
        self.embedding = tf.keras.layers.Embedding(self.vocab_size, self._params["token_embedding_size"])
        self._gnn = GNN(graph_params)
        self._gnn.build(
            GNNInput(
                node_features=self.get_initial_node_feature_shape(input_shapes),
                adjacency_lists=tuple(
                    input_shapes[f"adjacency_list_{edge_type_idx}"]
                    for edge_type_idx in range(self._num_edge_types)
                ),
                node_to_graph_map=tf.TensorShape((None,)),
                num_graphs=tf.TensorShape(()),
            )
        )

        with tf.name_scope(self._name):
          self._node_to_graph_repr_layer = WeightedSumGraphRepresentation(
              graph_representation_size=self._params["graph_aggregation_size"],
              num_heads=self._params["graph_aggregation_num_heads"],
              scoring_mlp_layers=self._params["graph_aggregation_hidden_layers"],
              scoring_mlp_dropout_rate=self._params["graph_aggregation_dropout_rate"],
              transformation_mlp_layers=self._params["graph_aggregation_hidden_layers"],
              transformation_mlp_dropout_rate=self._params["graph_aggregation_dropout_rate"],
          )
          self._node_to_graph_repr_layer.build(
              NodesToGraphRepresentationInput(
                  node_embeddings=tf.TensorShape(
                      (None, input_shapes["node_features"][-1] + self._params["gnn_hidden_dim"])
                  ),
                  node_to_graph_map=tf.TensorShape((None)),
                  num_graphs=tf.TensorShape(()),
              )
          )

          self._graph_repr_layer = tf.keras.layers.Dense(
              self._params["graph_encoding_size"], use_bias=True
          )
          self._graph_repr_layer.build(
              tf.TensorShape((None, self._params["graph_aggregation_size"]))
          )
        super().build([])