def build_graph(self): """ Building graph structures: """ self.m1_features = Feature(shape=(None, self.n_features)) self.m2_features = Feature(shape=(None, self.n_features)) prev_layer1 = self.m1_features prev_layer2 = self.m2_features for layer_size in self.layer_sizes: prev_layer1 = Dense( out_channels=layer_size, in_layers=[prev_layer1], activation_fn=tf.nn.relu) prev_layer2 = prev_layer1.shared([prev_layer2]) if self.dropout > 0.0: prev_layer1 = Dropout(self.dropout, in_layers=prev_layer1) prev_layer2 = Dropout(self.dropout, in_layers=prev_layer2) readout_m1 = Dense( out_channels=1, in_layers=[prev_layer1], activation_fn=None) readout_m2 = readout_m1.shared([prev_layer2]) self.add_output(Sigmoid(readout_m1) * 4 + 1) self.add_output(Sigmoid(readout_m2) * 4 + 1) self.difference = readout_m1 - readout_m2 label = Label(shape=(None, 1)) loss = HingeLoss(in_layers=[label, self.difference]) self.my_task_weights = Weights(shape=(None, 1)) loss = WeightedError(in_layers=[loss, self.my_task_weights]) self.set_loss(loss)
def build_graph(self): """Constructs the graph architecture of IRV as described in: https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2750043/ """ self.mol_features = Feature(shape=(None, self.n_features)) self._labels = Label(shape=(None, self.n_tasks)) self._weights = Weights(shape=(None, self.n_tasks)) predictions = IRVLayer(self.n_tasks, self.K, in_layers=[self.mol_features]) costs = [] outputs = [] for task in range(self.n_tasks): task_output = Slice(task, 1, in_layers=[predictions]) sigmoid = Sigmoid(in_layers=[task_output]) outputs.append(sigmoid) label = Slice(task, axis=1, in_layers=[self._labels]) cost = SigmoidCrossEntropy(in_layers=[label, task_output]) costs.append(cost) all_cost = Concat(in_layers=costs, axis=1) loss = WeightedError(in_layers=[all_cost, self._weights]) + \ IRVRegularize(predictions, self.penalty, in_layers=[predictions]) self.set_loss(loss) outputs = Stack(axis=1, in_layers=outputs) outputs = Concat(axis=2, in_layers=[1 - outputs, outputs]) self.add_output(outputs)
def test_Sigmoid_pickle(): tg = TensorGraph() feature = Feature(shape=(tg.batch_size, 1)) layer = Sigmoid(in_layers=feature) tg.add_output(layer) tg.set_loss(layer) tg.build() tg.save()
def test_sigmoid(self): """Test that Sigmoid can be invoked.""" batch_size = 10 n_features = 5 in_tensor = np.random.rand(batch_size, n_features) with self.session() as sess: in_tensor = tf.convert_to_tensor(in_tensor, dtype=tf.float32) out_tensor = Sigmoid()(in_tensor) out_tensor = out_tensor.eval() assert out_tensor.shape == (batch_size, n_features)