class SequentialGraph(object): """An analog of Keras Sequential class for Graph data. Like the Sequential class from Keras, but automatically passes topology placeholders from GraphTopology to each graph layer (from layers) added to the network. Non graph layers don't get the extra placeholders. """ def __init__(self, n_feat): """ Parameters ---------- n_feat: int Number of features per atom. """ warnings.warn( "SequentialGraph is deprecated. " "Will be removed in DeepChem 1.4.", DeprecationWarning) self.graph = tf.Graph() with self.graph.as_default(): self.graph_topology = GraphTopology(n_feat) self.output = self.graph_topology.get_atom_features_placeholder() # Keep track of the layers self.layers = [] def add(self, layer): """Adds a new layer to model.""" with self.graph.as_default(): # For graphical layers, add connectivity placeholders if type(layer).__name__ in [ 'GraphConv', 'GraphGather', 'GraphPool' ]: if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")): assert self.layers[-1].__name__ != "GraphGather", \ 'Cannot use GraphConv or GraphGather layers after a GraphGather' self.output = layer( [self.output] + self.graph_topology.get_topology_placeholders()) else: self.output = layer(self.output) # Add layer to the layer list self.layers.append(layer) def get_graph_topology(self): return self.graph_topology def get_num_output_features(self): """Gets the output shape of the featurization layers of the network""" return self.layers[-1].output_shape[1] def return_outputs(self): return self.output def return_inputs(self): return self.graph_topology.get_input_placeholders() def get_layer(self, layer_id): return self.layers[layer_id]
class SequentialGraph(object): """An analog of Keras Sequential class for Graph data. Like the Sequential class from Keras, but automatically passes topology placeholders from GraphTopology to each graph layer (from layers) added to the network. Non graph layers don't get the extra placeholders. """ def __init__(self, n_feat): """ Parameters ---------- n_feat: int Number of features per atom. """ warnings.warn("SequentialGraph is deprecated. " "Will be removed in DeepChem 1.4.", DeprecationWarning) self.graph = tf.Graph() with self.graph.as_default(): self.graph_topology = GraphTopology(n_feat) self.output = self.graph_topology.get_atom_features_placeholder() # Keep track of the layers self.layers = [] def add(self, layer): """Adds a new layer to model.""" with self.graph.as_default(): # For graphical layers, add connectivity placeholders if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']: if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")): assert self.layers[-1].__name__ != "GraphGather", \ 'Cannot use GraphConv or GraphGather layers after a GraphGather' self.output = layer([self.output] + self.graph_topology.get_topology_placeholders()) else: self.output = layer(self.output) # Add layer to the layer list self.layers.append(layer) def get_graph_topology(self): return self.graph_topology def get_num_output_features(self): """Gets the output shape of the featurization layers of the network""" return self.layers[-1].output_shape[1] def return_outputs(self): return self.output def return_inputs(self): return self.graph_topology.get_input_placeholders() def get_layer(self, layer_id): return self.layers[layer_id]
Hidden_state = tf.zeros([batch_size, lstm_1.state_size[0]]) Current_state = tf.zeros([batch_size, lstm_1.state_size[0]]) stateA = hidden_state, current_state stateB = hidden_state, current_state State = Hidden_state, Current_state # layer1 : lay1_conv = mylayers.GraphConv_and_gather(conv_size, n_features, batch_size, activation='relu', dropout=dropout0) # lay1_norm = dc.nn.BatchNormalization(epsilon=1e-5, mode=1) lay1_pool = dc.nn.GraphPool() lay1_dense = mylayers.Dense(Dense_size, conv_size, activation='relu') outputA, gatherA, _ = lay1_conv( [outputA] + graphA_topology.get_topology_placeholders() + [training] + [add_time]) # outputA = lay1_norm(outputA) outputA = lay1_pool([outputA] + graphA_topology.get_topology_placeholders() + [training] + [add_time]) gatherA = lay1_dense(gatherA) # gatherA = lay1_norm(gatherA) h_A_1, stateA = lstm(gatherA, stateA) outputB, gatherB, _ = lay1_conv( [outputB] + graphB_topology.get_topology_placeholders() + [training] + [add_time]) # outputB = lay1_norm(outputB) outputB = lay1_pool([outputB] + graphB_topology.get_topology_placeholders() + [training] + [add_time]) gatherB = lay1_dense(gatherB) # gatherB = lay1_norm(gatherB) h_B_1, stateB = lstm(gatherB, stateB)
class SequentialGraph(object): """An analog of Keras Sequential class for Graph data. Like the Sequential class from Keras, but automatically passes topology placeholders from GraphTopology to each graph layer (from layers) added to the network. Non graph layers don't get the extra placeholders. """ def __init__(self, n_feat): """ Parameters ---------- n_feat: int Number of features per atom. """ self.graph = tf.Graph() #with self.graph.as_default(): if 1: self.graph_topology = GraphTopology(n_feat) self.output = self.graph_topology.get_atom_features_placeholder() self.training = tf.placeholder(dtype='float32', shape=(), name='ops_training') # Keep track of the layers self.layers = [] self.add_time = 0 def get_training_state(self): return self.training def add(self, layer): """Adds a new layer to model.""" #with self.graph.as_default(): if 1: # For graphical layers, add connectivity placeholders if type(layer).__name__ in [ 'GraphResBlock', 'GraphConv', 'GraphGather', 'GraphPool' ]: if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")): assert self.layers[-1].__name__ != "GraphGather", \ 'Cannot use GraphConv or GraphGather layers after a GraphGather' self.output = layer( [self.output] + self.graph_topology.get_topology_placeholders() + [self.training] + [self.add_time]) else: self.output = layer(self.output) # Add layer to the layer list self.layers.append(layer) self.add_time = self.add_time + 1 def get_graph_topology(self): return self.graph_topology def get_num_output_features(self): """Gets the output shape of the featurization layers of the network""" return self.layers[-1].output_shape[1] def return_outputs(self): return self.output def return_inputs(self): return self.graph_topology.get_input_placeholders() def get_layer(self, layer_id): return self.layers[layer_id]
stateB = hidden_state, current_state State = Hidden_state, Current_state # layer1 : lay1_conv = mylayers.GraphConv_and_gather(conv_size, n_features, batch_size, activation='relu', dropout=dropout0) # lay1_norm = dc.nn.BatchNormalization(epsilon=1e-5, mode=1) lay1_pool = dc.nn.GraphPool() lay1_dense = mylayers.Dense(Dense_size, conv_size, activation='relu') outputA, gatherA, _ = lay1_conv( [outputA] + graphA_topology.get_topology_placeholders() + [training] + [add_time]) # outputA = lay1_norm(outputA) outputA = lay1_pool([outputA] + graphA_topology.get_topology_placeholders() + [training] + [add_time]) gatherA = lay1_dense(gatherA) # gatherA = lay1_norm(gatherA) h_A_1, stateA = lstm(gatherA, stateA) outputB, gatherB, _ = lay1_conv( [outputB] + graphB_topology.get_topology_placeholders() + [training] + [add_time]) # outputB = lay1_norm(outputB) outputB = lay1_pool([outputB] +