def make_embedding(self, x):
     with self.graph.as_default():
         x = players.Convolution1D(128,
                                   5,
                                   border_mode='valid',
                                   subsample_length=2,
                                   activation='relu')(x)
         x = players.Convolution1D(self.embedding_size,
                                   5,
                                   border_mode='valid',
                                   subsample_length=1,
                                   activation='relu')(x)
         x = tf.reduce_max(x, reduction_indices=1)
     return x
    def make_embedding(self, x):
        with self.graph.as_default():
            x = model_utils.pad_up_to(x, size=29, axis=1)
            x = players.Convolution1D(128,
                                      5,
                                      border_mode='valid',
                                      subsample_length=2,
                                      activation='relu')(x)
            x = players.Convolution1D(512,
                                      5,
                                      border_mode='valid',
                                      subsample_length=2,
                                      activation='relu')(x)
            x = players.Convolution1D(self.embedding_size,
                                      5,
                                      border_mode='valid',
                                      activation='relu')(x)

            initializer = tf.truncated_normal_initializer(stddev=0.01,
                                                          seed=1337)
            cell = tf.contrib.rnn.LSTMCell(self.embedding_size,
                                           use_peepholes=False,
                                           initializer=initializer,
                                           num_proj=None,
                                           num_unit_shards=1,
                                           num_proj_shards=1,
                                           forget_bias=1.0,
                                           state_is_tuple=False)
            x, _ = tf.nn.dynamic_rnn(cell,
                                     x,
                                     sequence_length=None,
                                     initial_state=None,
                                     dtype='float32',
                                     parallel_iterations=32,
                                     swap_memory=False)
            last_timestep = tf.shape(x)[1]
            indices = tf.stack([0, last_timestep - 1, 0])
            indices = tf.cast(indices, 'int32')
            embedded = tf.slice(x, indices, [-1, 1, -1])
            embedded = tf.squeeze(embedded, [1])
            embedded.set_shape((None, self.embedding_size))
        return embedded