def get_data(): inputs_tr, targets_tr, sort_indices_tr, _ = create_data( batch_size_tr, num_elements_min_max_tr) inputs_tr = utils_tf.set_zero_edge_features(inputs_tr, 1) inputs_tr = utils_tf.set_zero_global_features(inputs_tr, 1) # Test/generalization. inputs_ge, targets_ge, sort_indices_ge, _ = create_data( batch_size_ge, num_elements_min_max_ge) inputs_ge = utils_tf.set_zero_edge_features(inputs_ge, 1) inputs_ge = utils_tf.set_zero_global_features(inputs_ge, 1) targets_tr = utils_tf.set_zero_global_features(targets_tr, 1) targets_ge = utils_tf.set_zero_global_features(targets_ge, 1) return inputs_tr, targets_tr, sort_indices_tr, inputs_ge, targets_ge, sort_indices_ge
def test_fill_edge_state_with_missing_fields_raises(self): """Edge field cannot be filled if receivers or senders are missing.""" for g in self.graphs_dicts_in: g.pop("receivers") g.pop("senders") g.pop("edges") graphs_tuple = utils_tf.data_dicts_to_graphs_tuple(self.graphs_dicts_in) with self.assertRaisesRegexp(ValueError, "receivers"): graphs_tuple = utils_tf.set_zero_edge_features(graphs_tuple, edge_size=1)
def test_fill_edge_state(self, edge_size): """Tests for filling the edge state with a constant content.""" for g in self.graphs_dicts_in: g.pop("edges") graphs_tuple = utils_tf.data_dicts_to_graphs_tuple(self.graphs_dicts_in) n_edges = np.sum(self.reference_graph.n_edge) graphs_tuple = utils_tf.set_zero_edge_features(graphs_tuple, edge_size) self.assertAllEqual((n_edges, edge_size), graphs_tuple.edges.get_shape().as_list())
def test_fill_state_user_specified_types(self, dtype): """Tests that the features are created with the correct default type.""" for g in self.graphs_dicts_in: g.pop("nodes") g.pop("globals") g.pop("edges") graphs_tuple = utils_tf.data_dicts_to_graphs_tuple(self.graphs_dicts_in) graphs_tuple = utils_tf.set_zero_edge_features(graphs_tuple, 1, dtype) graphs_tuple = utils_tf.set_zero_node_features(graphs_tuple, 1, dtype) graphs_tuple = utils_tf.set_zero_global_features(graphs_tuple, 1, dtype) self.assertEqual(dtype, graphs_tuple.edges.dtype) self.assertEqual(dtype, graphs_tuple.nodes.dtype) self.assertEqual(dtype, graphs_tuple.globals.dtype)
def test_fill_edge_state_dynamic(self, edge_size): """Tests for filling the edge state with a constant content.""" for g in self.graphs_dicts_in: g.pop("edges") graphs_tuple = utils_tf.data_dicts_to_graphs_tuple(self.graphs_dicts_in) graphs_tuple = graphs_tuple._replace( n_edge=tf.constant( graphs_tuple.n_edge, shape=graphs_tuple.n_edge.get_shape())) n_edges = np.sum(self.reference_graph.n_edge) graphs_tuple = utils_tf.set_zero_edge_features(graphs_tuple, edge_size) actual_edges = graphs_tuple.edges self.assertNDArrayNear( np.zeros((n_edges, edge_size)), actual_edges, err=1e-4)
def test_fill_state_default_types(self): """Tests that the features are created with the correct default type.""" for g in self.graphs_dicts_in: g.pop("nodes") g.pop("globals") g.pop("edges") graphs_tuple = utils_tf.data_dicts_to_graphs_tuple(self.graphs_dicts_in) graphs_tuple = utils_tf.set_zero_edge_features(graphs_tuple, edge_size=1) graphs_tuple = utils_tf.set_zero_node_features(graphs_tuple, node_size=1) graphs_tuple = utils_tf.set_zero_global_features( graphs_tuple, global_size=1) self.assertEqual(tf.float32, graphs_tuple.edges.dtype) self.assertEqual(tf.float32, graphs_tuple.nodes.dtype) self.assertEqual(tf.float32, graphs_tuple.globals.dtype)
def _build(self, v, num_processing_steps, is_training): # simply use kpts as nodes in the graph (no top-down attn) input_graphs = rl_loss.get_graph_tuple(v) # pre-process graphs-tuple data input_graphs = utils_tf.fully_connect_graph_static( input_graphs, exclude_self_edges=False) input_graphs = utils_tf.set_zero_edge_features( input_graphs, edge_size=self._latent_size) # encode input graphs latent = self._encoder(input_graphs, is_training) delta_latent = latent # measure interaction-effects with keypoints as nodes for _ in range(num_processing_steps): delta_latent = self._interaction_core( latent, edge_model_kwargs={"is_training": is_training}, node_model_kwargs={"is_training": is_training}) # decode last round latent graph output = self._decoder(delta_latent, is_training) return output
def zeros_graph(sample_graph, edge_size, node_size, global_size): zeros_graphs = sample_graph.replace(nodes=None, edges=None, globals=None) zeros_graphs = utils_tf.set_zero_edge_features(zeros_graphs, edge_size) zeros_graphs = utils_tf.set_zero_node_features(zeros_graphs, node_size) zeros_graphs = utils_tf.set_zero_global_features(zeros_graphs, global_size) return zeros_graphs