def test_shapes(self):
        """Simple test that Graph topology placeholders have correct shapes."""
        n_atoms = 5
        n_feat = 10
        batch_size = 3
        max_deg = 6
        min_deg = 0
        topology = GraphTopology(n_feat)

        # Degrees from 1 to max_deg inclusive
        # TODO(rbharath): Should this be 0 to max_deg inclusive?
        deg_adj_lists_placeholders = topology.get_deg_adjacency_lists_placeholders(
        )
        assert len(deg_adj_lists_placeholders) == max_deg
        for ind, deg_adj_list in enumerate(deg_adj_lists_placeholders):
            deg = ind + 1
            # Should have shape (?, deg)
            assert deg_adj_list.get_shape()[1] == deg

        # Shape of atom_features should be (?, n_feat)
        atom_features = topology.get_atom_features_placeholder()
        assert atom_features.get_shape()[1] == n_feat

        # Shape of deg_slice placeholder should be (max_deg+1-min_deg, 2)
        deg_slice = topology.get_deg_slice_placeholder()
        print("deg_slice.get_shape()")
        print(deg_slice.get_shape())
        assert deg_slice.get_shape() == (max_deg + 1 - min_deg, 2)
  def test_shapes(self):
    """Simple test that Graph topology placeholders have correct shapes."""
    n_atoms = 5
    n_feat = 10
    batch_size = 3
    max_deg = 6
    min_deg = 0
    topology = GraphTopology(n_feat)

    # Degrees from 1 to max_deg inclusive 
    # TODO(rbharath): Should this be 0 to max_deg inclusive?
    deg_adj_lists_placeholders = topology.get_deg_adjacency_lists_placeholders()
    assert len(deg_adj_lists_placeholders) == max_deg 
    for ind, deg_adj_list in enumerate(deg_adj_lists_placeholders):
      deg = ind + 1
      # Should have shape (?, deg)
      assert deg_adj_list.get_shape()[1] == deg

    # Shape of atom_features should be (?, n_feat)
    atom_features = topology.get_atom_features_placeholder()
    assert atom_features.get_shape()[1] == n_feat

    # Shape of deg_slice placeholder should be (max_deg+1-min_deg, 2)
    deg_slice = topology.get_deg_slice_placeholder()
    print("deg_slice.get_shape()")
    print(deg_slice.get_shape())
    assert deg_slice.get_shape() == (max_deg+1-min_deg, 2)
    def test_graph_pool(self):
        """Tests that GraphPool transforms shapes correctly."""
        n_atoms = 5
        n_feat = 10
        batch_size = 3
        nb_filter = 7
        with self.test_session() as sess:
            graph_topology = GraphTopology(n_feat)
            graph_pool_layer = GraphPool()

            X = graph_topology.get_input_placeholders()
            out = graph_pool_layer(X)
Beispiel #4
0
 def __init__(self, n_feat):
   """
   Parameters
   ----------
   n_feat: int
     Number of features per atom.
   """
   #self.graph_topology = GraphTopology(n_atoms, n_feat)
   self.graph_topology = GraphTopology(n_feat)
   self.output = self.graph_topology.get_atom_features_placeholder()
   # Keep track of the layers
   self.layers = []  
Beispiel #5
0
class SequentialGraphModel(object):
    """An analog of Keras Sequential class for Graph data.

  Like the Sequential class from Keras, but automatically passes topology
  placeholders from GraphTopology to each graph layer (from keras_layers) added
  to the network. Non graph layers don't get the extra placeholders. 
  """
    def __init__(self, n_feat):
        """
    Parameters
    ----------
    n_feat: int
      Number of features per atom.
    """
        #self.graph_topology = GraphTopology(n_atoms, n_feat)
        self.graph_topology = GraphTopology(n_feat)
        self.output = self.graph_topology.get_atom_features_placeholder()
        # Keep track of the layers
        self.layers = []

    def add(self, layer):
        """Adds a new layer to model."""
        # For graphical layers, add connectivity placeholders
        if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
            if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")):
                assert self.layers[-1].__name__ != "GraphGather", \
                        'Cannot use GraphConv or GraphGather layers after a GraphGather'

            self.output = layer(
                [self.output] +
                self.graph_topology.get_topology_placeholders())
        else:
            self.output = layer(self.output)

        # Add layer to the layer list
        self.layers.append(layer)

    def get_graph_topology(self):
        return self.graph_topology

    def get_num_output_features(self):
        """Gets the output shape of the featurization layers of the network"""
        return self.layers[-1].output_shape[1]

    def return_outputs(self):
        return self.output

    def return_inputs(self):
        return self.graph_topology.get_input_placeholders()

    def get_layer(self, layer_id):
        return self.layers[layer_id]
    def test_graph_convolution(self):
        """Tests that Graph Convolution transforms shapes correctly."""
        n_atoms = 5
        n_feat = 10
        nb_filter = 7
        with self.test_session() as sess:
            graph_topology = GraphTopology(n_feat)
            graph_conv_layer = GraphConv(nb_filter)

            X = graph_topology.get_input_placeholders()
            out = graph_conv_layer(X)
            # Output should be of shape (?, nb_filter)
            assert out.get_shape()[1] == nb_filter
Beispiel #7
0
class SequentialGraph(object):
  """An analog of Keras Sequential class for Graph data.

  Like the Sequential class from Keras, but automatically passes topology
  placeholders from GraphTopology to each graph layer (from keras_layers) added
  to the network. Non graph layers don't get the extra placeholders. 
  """
  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of features per atom.
    """
    #self.graph_topology = GraphTopology(n_atoms, n_feat)
    self.graph_topology = GraphTopology(n_feat)
    self.output = self.graph_topology.get_atom_features_placeholder()
    # Keep track of the layers
    self.layers = []  

  def add(self, layer):
    """Adds a new layer to model."""
    # For graphical layers, add connectivity placeholders 
    if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
      if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")):
        assert self.layers[-1].__name__ != "GraphGather", \
                'Cannot use GraphConv or GraphGather layers after a GraphGather'
          
      self.output = layer(
          [self.output] + self.graph_topology.get_topology_placeholders())
    else:
      self.output = layer(self.output)

    # Add layer to the layer list
    self.layers.append(layer)

  def get_graph_topology(self):
    return self.graph_topology

  def get_num_output_features(self):
    """Gets the output shape of the featurization layers of the network"""
    return self.layers[-1].output_shape[1]
  
  def return_outputs(self):
    return self.output

  def return_inputs(self):
    return self.graph_topology.get_input_placeholders()

  def get_layer(self, layer_id):
    return self.layers[layer_id]
    def test_graph_gather(self):
        """Tests that GraphGather transforms shapes correctly."""
        n_atoms = 5
        n_feat = 10
        batch_size = 3
        nb_filter = 7
        with self.test_session() as sess:
            graph_topology = GraphTopology(n_feat)
            graph_gather_layer = GraphGather(batch_size)

            X = graph_topology.get_input_placeholders()
            out = graph_gather_layer(X)
            # Output should be of shape (batch_size, n_feat)
            assert out.get_shape() == (batch_size, n_feat)
Beispiel #9
0
  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
    # Create graph topology and x
    self.test_graph_topology = GraphTopology(n_feat, name='test')
    self.support_graph_topology = GraphTopology(n_feat, name='support')
    self.test = self.test_graph_topology.get_atom_features_placeholder()
    self.support = self.support_graph_topology.get_atom_features_placeholder()

    # Keep track of the layers
    self.layers = []  
    # Whether or not we have used the GraphGather layer yet
    self.bool_pre_gather = True  
Beispiel #10
0
    def test_attn_lstm_embedding(self):
        """Test that attention LSTM computation works properly."""
        max_depth = 5
        n_test = 5
        n_support = 11
        n_feat = 10
        nb_filter = 7
        with self.test_session() as sess:
            graph_topology_test = GraphTopology(n_feat)
            graph_topology_support = GraphTopology(n_feat)

            test = graph_topology_test.get_input_placeholders()[0]
            support = graph_topology_support.get_input_placeholders()[0]

            attn_embedding_layer = AttnLSTMEmbedding(n_test, n_support,
                                                     max_depth)
            # Try concatenating the two lists of placeholders
            feed_dict = {
                test: np.zeros((n_test, n_feat)),
                support: np.zeros((n_support, n_feat))
            }
            test_out, support_out = attn_embedding_layer([test, support])
            assert test_out.get_shape() == (n_test, n_feat)
            assert support_out.get_shape()[1] == (n_feat)
Beispiel #11
0
class SequentialSupportGraph(object):
  """An analog of Keras Sequential model for test/support models."""
  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
    # Create graph topology and x
    self.test_graph_topology = GraphTopology(n_feat, name='test')
    self.support_graph_topology = GraphTopology(n_feat, name='support')
    self.test = self.test_graph_topology.get_atom_features_placeholder()
    self.support = self.support_graph_topology.get_atom_features_placeholder()

    # Keep track of the layers
    self.layers = []  
    # Whether or not we have used the GraphGather layer yet
    self.bool_pre_gather = True  

  def add(self, layer):
    """Adds a layer to both test/support stacks.

    Note that the layer transformation is performed independently on the
    test/support tensors.
    """
    self.layers.append(layer)

    # Update new value of x
    if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
      assert self.bool_pre_gather, "Cannot apply graphical layers after gather."
          
      self.test = layer([self.test] + self.test_graph_topology.topology)
      self.support = layer([self.support] + self.support_graph_topology.topology)
    else:
      self.test = layer(self.test)
      self.support = layer(self.support)

    if type(layer).__name__ == 'GraphGather':
      self.bool_pre_gather = False  # Set flag to stop adding topology

  def add_test(self, layer):
    """Adds a layer to test."""
    self.layers.append(layer)

    # Update new value of x
    if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
      self.test = layer([self.test] + self.test_graph_topology.topology)
    else:
      self.test = layer(self.test)

  def add_support(self, layer):
    """Adds a layer to support."""
    self.layers.append(layer)

    # Update new value of x
    if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
      self.support = layer([self.support] + self.support_graph_topology.topology)
    else:
      self.support = layer(self.support)

  def join(self, layer):
    """Joins test and support to a two input two output layer"""
    self.layers.append(layer)
    self.test, self.support = layer([self.test, self.support])

  def get_test_output(self):
    return self.test

  def get_support_output(self):
    return self.support
  
  def return_outputs(self):
    return [self.test] + [self.support]

  def return_inputs(self):
    return (self.test_graph_topology.get_inputs()
            + self.support_graph_topology.get_inputs())