def test_shapes(self):
    """Simple test that Graph topology placeholders have correct shapes."""
    n_atoms = 5
    n_feat = 10
    batch_size = 3
    max_deg = 10
    min_deg = 0
    topology = GraphTopology(n_feat)

    # Degrees from 1 to max_deg inclusive 
    # TODO(rbharath): Should this be 0 to max_deg inclusive?
    deg_adj_lists_placeholders = topology.get_deg_adjacency_lists_placeholders()
    assert len(deg_adj_lists_placeholders) == max_deg 
    for ind, deg_adj_list in enumerate(deg_adj_lists_placeholders):
      deg = ind + 1
      # Should have shape (?, deg)
      assert deg_adj_list.get_shape()[1] == deg

    # Shape of atom_features should be (?, n_feat)
    atom_features = topology.get_atom_features_placeholder()
    assert atom_features.get_shape()[1] == n_feat

    # Shape of deg_slice placeholder should be (max_deg+1-min_deg, 2)
    deg_slice = topology.get_deg_slice_placeholder()
    print("deg_slice.get_shape()")
    print(deg_slice.get_shape())
    assert deg_slice.get_shape() == (max_deg+1-min_deg, 2)
Beispiel #2
0
class SequentialGraph(object):
    """An analog of Keras Sequential class for Graph data.

  Like the Sequential class from Keras, but automatically passes topology
  placeholders from GraphTopology to each graph layer (from layers) added
  to the network. Non graph layers don't get the extra placeholders. 
  """
    def __init__(self, n_feat):
        """
    Parameters
    ----------
    n_feat: int
      Number of features per atom.
    """
        warnings.warn(
            "SequentialGraph is deprecated. "
            "Will be removed in DeepChem 1.4.", DeprecationWarning)
        self.graph = tf.Graph()
        with self.graph.as_default():
            self.graph_topology = GraphTopology(n_feat)
            self.output = self.graph_topology.get_atom_features_placeholder()
        # Keep track of the layers
        self.layers = []

    def add(self, layer):
        """Adds a new layer to model."""
        with self.graph.as_default():
            # For graphical layers, add connectivity placeholders
            if type(layer).__name__ in [
                    'GraphConv', 'GraphGather', 'GraphPool'
            ]:
                if (len(self.layers) > 0
                        and hasattr(self.layers[-1], "__name__")):
                    assert self.layers[-1].__name__ != "GraphGather", \
                            'Cannot use GraphConv or GraphGather layers after a GraphGather'

                self.output = layer(
                    [self.output] +
                    self.graph_topology.get_topology_placeholders())
            else:
                self.output = layer(self.output)

            # Add layer to the layer list
            self.layers.append(layer)

    def get_graph_topology(self):
        return self.graph_topology

    def get_num_output_features(self):
        """Gets the output shape of the featurization layers of the network"""
        return self.layers[-1].output_shape[1]

    def return_outputs(self):
        return self.output

    def return_inputs(self):
        return self.graph_topology.get_input_placeholders()

    def get_layer(self, layer_id):
        return self.layers[layer_id]
    def test_shapes(self):
        """Simple test that Graph topology placeholders have correct shapes."""
        n_atoms = 5
        n_feat = 10
        batch_size = 3
        max_deg = 10
        min_deg = 0
        topology = GraphTopology(n_feat)

        # Degrees from 1 to max_deg inclusive
        # TODO(rbharath): Should this be 0 to max_deg inclusive?
        deg_adj_lists_placeholders = topology.get_deg_adjacency_lists_placeholders(
        )
        assert len(deg_adj_lists_placeholders) == max_deg
        for ind, deg_adj_list in enumerate(deg_adj_lists_placeholders):
            deg = ind + 1
            # Should have shape (?, deg)
            assert deg_adj_list.get_shape()[1] == deg

        # Shape of atom_features should be (?, n_feat)
        atom_features = topology.get_atom_features_placeholder()
        assert atom_features.get_shape()[1] == n_feat

        # Shape of deg_slice placeholder should be (max_deg+1-min_deg, 2)
        deg_slice = topology.get_deg_slice_placeholder()
        print("deg_slice.get_shape()")
        print(deg_slice.get_shape())
        assert deg_slice.get_shape() == (max_deg + 1 - min_deg, 2)
Beispiel #4
0
class SequentialGraph(object):
  """An analog of Keras Sequential class for Graph data.

  Like the Sequential class from Keras, but automatically passes topology
  placeholders from GraphTopology to each graph layer (from layers) added
  to the network. Non graph layers don't get the extra placeholders. 
  """

  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of features per atom.
    """
    warnings.warn("SequentialGraph is deprecated. "
                  "Will be removed in DeepChem 1.4.", DeprecationWarning)
    self.graph = tf.Graph()
    with self.graph.as_default():
      self.graph_topology = GraphTopology(n_feat)
      self.output = self.graph_topology.get_atom_features_placeholder()
    # Keep track of the layers
    self.layers = []

  def add(self, layer):
    """Adds a new layer to model."""
    with self.graph.as_default():
      # For graphical layers, add connectivity placeholders
      if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
        if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")):
          assert self.layers[-1].__name__ != "GraphGather", \
                  'Cannot use GraphConv or GraphGather layers after a GraphGather'

        self.output = layer([self.output] +
                            self.graph_topology.get_topology_placeholders())
      else:
        self.output = layer(self.output)

      # Add layer to the layer list
      self.layers.append(layer)

  def get_graph_topology(self):
    return self.graph_topology

  def get_num_output_features(self):
    """Gets the output shape of the featurization layers of the network"""
    return self.layers[-1].output_shape[1]

  def return_outputs(self):
    return self.output

  def return_inputs(self):
    return self.graph_topology.get_input_placeholders()

  def get_layer(self, layer_id):
    return self.layers[layer_id]
Beispiel #5
0
 def __init__(self, n_feat):
     """
 Parameters
 ----------
 n_feat: int
   Number of features per atom.
 """
     #self.graph_topology = GraphTopology(n_atoms, n_feat)
     self.graph_topology = GraphTopology(n_feat)
     self.output = self.graph_topology.get_atom_features_placeholder()
     # Keep track of the layers
     self.layers = []
Beispiel #6
0
 def __init__(self, n_feat):
     """
 Parameters
 ----------
 n_feat: int
   Number of features per atom.
 """
     warnings.warn(
         "SequentialGraph is deprecated. "
         "Will be removed in DeepChem 1.4.", DeprecationWarning)
     self.graph = tf.Graph()
     with self.graph.as_default():
         self.graph_topology = GraphTopology(n_feat)
         self.output = self.graph_topology.get_atom_features_placeholder()
     # Keep track of the layers
     self.layers = []
Beispiel #7
0
  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
    # Create graph topology and x
    self.test_graph_topology = GraphTopology(n_feat, name='test')
    self.support_graph_topology = GraphTopology(n_feat, name='support')
    self.test = self.test_graph_topology.get_atom_features_placeholder()
    self.support = self.support_graph_topology.get_atom_features_placeholder()

    # Keep track of the layers
    self.layers = []  
    # Whether or not we have used the GraphGather layer yet
    self.bool_pre_gather = True  
Beispiel #8
0
 def __init__(self, n_feat):
     """
 Parameters
 ----------
 n_feat: int
   Number of features per atom.
 """
     self.graph = tf.Graph()
     #with self.graph.as_default():
     if 1:
         self.graph_topology = GraphTopology(n_feat)
         self.output = self.graph_topology.get_atom_features_placeholder()
         self.training = tf.placeholder(dtype='float32',
                                        shape=(),
                                        name='ops_training')
     # Keep track of the layers
     self.layers = []
     self.add_time = 0
Beispiel #9
0
    def __init__(self, n_feat):
        """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
        # Create graph topology and x
        self.test_graph_topology = GraphTopology(n_feat, name='test')
        self.support_graph_topology = GraphTopology(n_feat, name='support')
        self.test = self.test_graph_topology.get_atom_features_placeholder()
        self.support = self.support_graph_topology.get_atom_features_placeholder(
        )

        # Keep track of the layers
        self.layers = []
        # Whether or not we have used the GraphGather layer yet
        self.bool_pre_gather = True
Beispiel #10
0
 def __init__(self, n_feat):
   """
   Parameters
   ----------
   n_feat: int
     Number of features per atom.
   """
   #self.graph_topology = GraphTopology(n_atoms, n_feat)
   self.graph_topology = GraphTopology(n_feat)
   self.output = self.graph_topology.get_atom_features_placeholder()
   # Keep track of the layers
   self.layers = []  
Beispiel #11
0
  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
    warnings.warn("SequentialSupportWeaveGraph is deprecated. "
                  "Will be removed in DeepChem 1.4.", DeprecationWarning)
    self.graph = tf.Graph()
    with self.graph.as_default():
      # Create graph topology and x
      self.test_graph_topology = GraphTopology(n_feat, name='test')
      self.support_graph_topology = GraphTopology(n_feat, name='support')
      self.test = self.test_graph_topology.get_atom_features_placeholder()
      self.support = self.support_graph_topology.get_atom_features_placeholder()

    # Keep track of the layers
    self.layers = []
    # Whether or not we have used the GraphGather layer yet
    self.bool_pre_gather = True
Beispiel #12
0
    def __init__(self, n_feat):
        """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
        warnings.warn(
            "SequentialSupportWeaveGraph is deprecated. "
            "Will be removed in DeepChem 1.4.", DeprecationWarning)
        self.graph = tf.Graph()
        with self.graph.as_default():
            # Create graph topology and x
            self.test_graph_topology = GraphTopology(n_feat, name='test')
            self.support_graph_topology = GraphTopology(n_feat, name='support')
            self.test = self.test_graph_topology.get_atom_features_placeholder(
            )
            self.support = self.support_graph_topology.get_atom_features_placeholder(
            )

        # Keep track of the layers
        self.layers = []
        # Whether or not we have used the GraphGather layer yet
        self.bool_pre_gather = True
Beispiel #13
0
 def __init__(self, n_feat):
   """
   Parameters
   ----------
   n_feat: int
     Number of features per atom.
   """
   warnings.warn("SequentialGraph is deprecated. "
                 "Will be removed in DeepChem 1.4.", DeprecationWarning)
   self.graph = tf.Graph()
   with self.graph.as_default():
     self.graph_topology = GraphTopology(n_feat)
     self.output = self.graph_topology.get_atom_features_placeholder()
   # Keep track of the layers
   self.layers = []
Beispiel #14
0
        predic = one_hot(np.array(predic), n_class)
        precision, recall, thresholds = metrics.precision_recall_curve(y_test.ravel(), predic.ravel())
        micro_auprc = metrics.auc(precision, recall)
    except Exception:
        micro_auprc=0.
    # tp = np.sum(np.float32(np.logical_and(np.equal(predic,1), np.equal(auc_y_set, 1))))
    # fp = np.sum(np.float32(np.logical_and(np.equal(predic,1), np.equal(auc_y_set, 0))))
    # tn = np.sum(np.float32(np.logical_and(np.equal(predic,0), np.equal(auc_y_set, 0))))
    # fn = np.sum(np.float32(np.logical_and(np.equal(predic,0), np.equal(auc_y_set, 1))))
    # accuracy = scores(tp, fp, tn, fn)
    # print(auc_y_set)
    return accuracy_1,(accuracy,macro_recall,micro_recall,macro_precision,micro_precision,macro_f1,micro_f1,micro_auprc)


with tf.Graph().as_default():
    graphA_topology = GraphTopology(n_features, name='topology_A')
    graphB_topology = GraphTopology(n_features, name='topology_B')
    with tf.name_scope('input'):
        outputA = graphA_topology.get_atom_features_placeholder()
        outputB = graphB_topology.get_atom_features_placeholder()
        label_gold = tf.placeholder(dtype=tf.int32, shape=[None], name="label_placeholder")
        training = tf.placeholder(dtype='float32', shape=(), name='ops_training')
    add_time = 0

    lstm = tf.contrib.rnn.BasicLSTMCell(Dense_size)
    hidden_state = tf.zeros([batch_size, lstm.state_size[0]])
    current_state = tf.zeros([batch_size, lstm.state_size[0]])
    
    lstm_1 = tf.contrib.rnn.BasicLSTMCell(Dense_size*2)
    Hidden_state = tf.zeros([batch_size, lstm_1.state_size[0]])
    Current_state = tf.zeros([batch_size, lstm_1.state_size[0]])
Beispiel #15
0
class SequentialGraph(object):
    """An analog of Keras Sequential class for Graph data.

  Like the Sequential class from Keras, but automatically passes topology
  placeholders from GraphTopology to each graph layer (from layers) added
  to the network. Non graph layers don't get the extra placeholders. 
  """
    def __init__(self, n_feat):
        """
    Parameters
    ----------
    n_feat: int
      Number of features per atom.
    """
        self.graph = tf.Graph()
        #with self.graph.as_default():
        if 1:
            self.graph_topology = GraphTopology(n_feat)
            self.output = self.graph_topology.get_atom_features_placeholder()
            self.training = tf.placeholder(dtype='float32',
                                           shape=(),
                                           name='ops_training')
        # Keep track of the layers
        self.layers = []
        self.add_time = 0

    def get_training_state(self):
        return self.training

    def add(self, layer):
        """Adds a new layer to model."""
        #with self.graph.as_default():
        if 1:
            # For graphical layers, add connectivity placeholders
            if type(layer).__name__ in [
                    'GraphResBlock', 'GraphConv', 'GraphGather', 'GraphPool'
            ]:
                if (len(self.layers) > 0
                        and hasattr(self.layers[-1], "__name__")):
                    assert self.layers[-1].__name__ != "GraphGather", \
                            'Cannot use GraphConv or GraphGather layers after a GraphGather'

                self.output = layer(
                    [self.output] +
                    self.graph_topology.get_topology_placeholders() +
                    [self.training] + [self.add_time])
            else:
                self.output = layer(self.output)

            # Add layer to the layer list
            self.layers.append(layer)
            self.add_time = self.add_time + 1

    def get_graph_topology(self):
        return self.graph_topology

    def get_num_output_features(self):
        """Gets the output shape of the featurization layers of the network"""
        return self.layers[-1].output_shape[1]

    def return_outputs(self):
        return self.output

    def return_inputs(self):
        return self.graph_topology.get_input_placeholders()

    def get_layer(self, layer_id):
        return self.layers[layer_id]
class SequentialSupportGraph(object):
  """An analog of Keras Sequential model for test/support models."""

  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
    self.graph = tf.Graph()
    with self.graph.as_default():
      # Create graph topology and x
      self.test_graph_topology = GraphTopology(n_feat, name='test')
      self.support_graph_topology = GraphTopology(n_feat, name='support')
      self.test = self.test_graph_topology.get_atom_features_placeholder()
      self.support = self.support_graph_topology.get_atom_features_placeholder()

    # Keep track of the layers
    self.layers = []
    # Whether or not we have used the GraphGather layer yet
    self.bool_pre_gather = True

  def add(self, layer):
    """Adds a layer to both test/support stacks.

    Note that the layer transformation is performed independently on the
    test/support tensors.
    """
    with self.graph.as_default():
      self.layers.append(layer)

      # Update new value of x
      if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
        assert self.bool_pre_gather, "Cannot apply graphical layers after gather."

        self.test = layer([self.test] + self.test_graph_topology.topology)
        self.support = layer([self.support] +
                             self.support_graph_topology.topology)
      else:
        self.test = layer(self.test)
        self.support = layer(self.support)

      if type(layer).__name__ == 'GraphGather':
        self.bool_pre_gather = False  # Set flag to stop adding topology

  def add_test(self, layer):
    """Adds a layer to test."""
    with self.graph.as_default():
      self.layers.append(layer)

      # Update new value of x
      if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
        self.test = layer([self.test] + self.test_graph_topology.topology)
      else:
        self.test = layer(self.test)

  def add_support(self, layer):
    """Adds a layer to support."""
    with self.graph.as_default():
      self.layers.append(layer)

      # Update new value of x
      if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
        self.support = layer([self.support] +
                             self.support_graph_topology.topology)
      else:
        self.support = layer(self.support)

  def join(self, layer):
    """Joins test and support to a two input two output layer"""
    with self.graph.as_default():
      self.layers.append(layer)
      self.test, self.support = layer([self.test, self.support])

  def get_test_output(self):
    return self.test

  def get_support_output(self):
    return self.support

  def return_outputs(self):
    return [self.test] + [self.support]

  def return_inputs(self):
    return (self.test_graph_topology.get_inputs() +
            self.support_graph_topology.get_inputs())
Beispiel #17
0
class SequentialSupportGraph(object):
  """An analog of Keras Sequential model for test/support models."""
  def __init__(self, n_feat):
    """
    Parameters
    ----------
    n_feat: int
      Number of atomic features.
    """
    # Create graph topology and x
    self.test_graph_topology = GraphTopology(n_feat, name='test')
    self.support_graph_topology = GraphTopology(n_feat, name='support')
    self.test = self.test_graph_topology.get_atom_features_placeholder()
    self.support = self.support_graph_topology.get_atom_features_placeholder()

    # Keep track of the layers
    self.layers = []  
    # Whether or not we have used the GraphGather layer yet
    self.bool_pre_gather = True  

  def add(self, layer):
    """Adds a layer to both test/support stacks.

    Note that the layer transformation is performed independently on the
    test/support tensors.
    """
    self.layers.append(layer)

    # Update new value of x
    if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
      assert self.bool_pre_gather, "Cannot apply graphical layers after gather."
          
      self.test = layer([self.test] + self.test_graph_topology.topology)
      self.support = layer([self.support] + self.support_graph_topology.topology)
    else:
      self.test = layer(self.test)
      self.support = layer(self.support)

    if type(layer).__name__ == 'GraphGather':
      self.bool_pre_gather = False  # Set flag to stop adding topology

  def add_test(self, layer):
    """Adds a layer to test."""
    self.layers.append(layer)

    # Update new value of x
    if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
      self.test = layer([self.test] + self.test_graph_topology.topology)
    else:
      self.test = layer(self.test)

  def add_support(self, layer):
    """Adds a layer to support."""
    self.layers.append(layer)

    # Update new value of x
    if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
      self.support = layer([self.support] + self.support_graph_topology.topology)
    else:
      self.support = layer(self.support)

  def join(self, layer):
    """Joins test and support to a two input two output layer"""
    self.layers.append(layer)
    self.test, self.support = layer([self.test, self.support])

  def get_test_output(self):
    return self.test

  def get_support_output(self):
    return self.support
  
  def return_outputs(self):
    return [self.test] + [self.support]

  def return_inputs(self):
    return (self.test_graph_topology.get_inputs()
            + self.support_graph_topology.get_inputs())
Beispiel #18
0
    fp = np.sum(
        np.float32(np.logical_and(np.equal(predic, 1), np.equal(auc_y_set,
                                                                0))))
    tn = np.sum(
        np.float32(np.logical_and(np.equal(predic, 0), np.equal(auc_y_set,
                                                                0))))
    fn = np.sum(
        np.float32(np.logical_and(np.equal(predic, 0), np.equal(auc_y_set,
                                                                1))))
    accuracy = scores(tp, fp, tn, fn)
    # print(auc_y_set)
    return accuracy_1, auc, accuracy


with tf.Graph().as_default():
    graphA_topology = GraphTopology(n_features, name='topology_A')
    graphB_topology = GraphTopology(n_features, name='topology_B')
    with tf.name_scope('input'):
        outputA = graphA_topology.get_atom_features_placeholder()
        outputB = graphB_topology.get_atom_features_placeholder()
        label_gold = tf.placeholder(dtype=tf.int32,
                                    shape=[None],
                                    name="label_placeholder")
        training = tf.placeholder(dtype='float32',
                                  shape=(),
                                  name='ops_training')
    add_time = 0

    lstm = tf.contrib.rnn.BasicLSTMCell(Dense_size)
    hidden_state = tf.zeros([batch_size, lstm.state_size[0]])
    current_state = tf.zeros([batch_size, lstm.state_size[0]])