Example #1
0
 def test_squeeze_inputs(self):
     """Test that layers can automatically reshape inconsistent inputs."""
     value1 = np.random.uniform(size=(2, 1)).astype(np.float32)
     with self.session() as sess:
         out_tensor = Squeeze(squeeze_dims=1)(tf.constant(value1))
         result = out_tensor.eval()
         assert result.shape == (2, )
Example #2
0
 def test_squeeze_inputs(self):
   """Test that layers can automatically reshape inconsistent inputs."""
   value1 = np.random.uniform(size=(2, 1)).astype(np.float32)
   with self.session() as sess:
     out_tensor = Squeeze(squeeze_dims=1)(tf.constant(value1))
     result = out_tensor.eval()
     assert result.shape == (2,)
Example #3
0
  def create_loss(self, layer, label, weight):
    task_label = Squeeze(squeeze_dims=1, in_layers=[label])
    task_label = Cast(dtype=tf.int32, in_layers=[task_label])
    task_weight = Squeeze(squeeze_dims=1, in_layers=[weight])

    loss = SparseSoftMaxCrossEntropy(in_layers=[task_label, layer])
    weighted_loss = WeightedError(in_layers=[loss, task_weight])
    return weighted_loss
Example #4
0
 def create_layers(self, state, **kwargs):
   d1 = Flatten(in_layers=state)
   d2 = Dense(
       in_layers=[d1],
       activation_fn=tf.nn.relu,
       normalizer_fn=tf.nn.l2_normalize,
       normalizer_params={"dim": 1},
       out_channels=64)
   d3 = Dense(
       in_layers=[d2],
       activation_fn=tf.nn.relu,
       normalizer_fn=tf.nn.l2_normalize,
       normalizer_params={"dim": 1},
       out_channels=32)
   d4 = Dense(
       in_layers=[d3],
       activation_fn=tf.nn.relu,
       normalizer_fn=tf.nn.l2_normalize,
       normalizer_params={"dim": 1},
       out_channels=16)
   d4 = BatchNorm(in_layers=[d4])
   d5 = Dense(in_layers=[d4], activation_fn=None, out_channels=9)
   value = Dense(in_layers=[d4], activation_fn=None, out_channels=1)
   value = Squeeze(squeeze_dims=1, in_layers=[value])
   probs = SoftMax(in_layers=[d5])
   return {'action_prob': probs, 'value': value}
Example #5
0
def test_Squeeze_pickle():
  tg = TensorGraph()
  feature = Feature(shape=(tg.batch_size, 1))
  layer = Squeeze(in_layers=feature)
  tg.add_output(layer)
  tg.set_loss(layer)
  tg.build()
  tg.save()
Example #6
0
    def build_graph(self):
        self.smiles_seqs = Feature(shape=(None, self.seq_length),
                                   dtype=tf.int32)
        # Character embedding
        self.Embedding = DTNNEmbedding(
            n_embedding=self.n_embedding,
            periodic_table_length=len(self.char_dict.keys()) + 1,
            in_layers=[self.smiles_seqs])
        self.pooled_outputs = []
        self.conv_layers = []
        for filter_size, num_filter in zip(self.kernel_sizes,
                                           self.num_filters):
            # Multiple convolutional layers with different filter widths
            self.conv_layers.append(
                Conv1D(kernel_size=filter_size,
                       filters=num_filter,
                       padding='valid',
                       in_layers=[self.Embedding]))
            # Max-over-time pooling
            self.pooled_outputs.append(
                MaxPool1D(window_shape=self.seq_length - filter_size + 1,
                          strides=1,
                          padding='VALID',
                          in_layers=[self.conv_layers[-1]]))
        # Concat features from all filters(one feature per filter)
        concat_outputs = Concat(axis=2, in_layers=self.pooled_outputs)
        outputs = Squeeze(squeeze_dims=1, in_layers=concat_outputs)
        dropout = Dropout(dropout_prob=self.dropout, in_layers=[outputs])
        dense = Dense(out_channels=200,
                      activation_fn=tf.nn.relu,
                      in_layers=[dropout])
        # Highway layer from https://arxiv.org/pdf/1505.00387.pdf
        self.gather = Highway(in_layers=[dense])

        costs = []
        self.labels_fd = []
        for task in range(self.n_tasks):
            if self.mode == "classification":
                classification = Dense(out_channels=2,
                                       activation_fn=None,
                                       in_layers=[self.gather])
                softmax = SoftMax(in_layers=[classification])
                self.add_output(softmax)

                label = Label(shape=(None, 2))
                self.labels_fd.append(label)
                cost = SoftMaxCrossEntropy(in_layers=[label, classification])
                costs.append(cost)
            if self.mode == "regression":
                regression = Dense(out_channels=1,
                                   activation_fn=None,
                                   in_layers=[self.gather])
                self.add_output(regression)

                label = Label(shape=(None, 1))
                self.labels_fd.append(label)
                cost = L2Loss(in_layers=[label, regression])
                costs.append(cost)
        if self.mode == "classification":
            all_cost = Concat(in_layers=costs, axis=1)
        elif self.mode == "regression":
            all_cost = Stack(in_layers=costs, axis=1)
        self.weights = Weights(shape=(None, self.n_tasks))
        loss = WeightedError(in_layers=[all_cost, self.weights])
        self.set_loss(loss)