def siamese_layer( self, sequence_len, model_cfg, ): hidden_size = model_cfg['PARAMS'].getint('hidden_size') cell_type = model_cfg['PARAMS'].get('cell_type') outputs_sen1 = rnn_layer( embedded_x=self.embedded_x1, hidden_size=hidden_size, bidirectional=False, cell_type=cell_type, ) outputs_sen2 = rnn_layer( embedded_x=self.embedded_x2, hidden_size=hidden_size, bidirectional=False, cell_type=cell_type, reuse=True, ) out1 = tf.reduce_mean(outputs_sen1, axis=1) out2 = tf.reduce_mean(outputs_sen2, axis=1) return manhattan_similarity(out1, out2)
def testRNNBidirectionalNetwork(self): with self.test_session(): embedded_x = tf.random_normal( [1, 2, 5]) # batch x sentence length x embedding size hidden_size = 10 rnn_output = rnn_layer(embedded_x, hidden_size, bidirectional=True) actual_output = rnn_output.get_shape().as_list()[-1] self.assertEqual(actual_output, 2 * hidden_size)
def siamese_layer(self, sequence_len, model_cfg): hidden_size = model_cfg['PARAMS'].getint('hidden_size') cell_type = model_cfg['PARAMS'].get('cell_type') outputs_sen = rnn_layer(self.embedded_x, hidden_size, cell_type) with tf.name_scope('classifier'): L1 = tf.layers.dropout( tf.layers.dense(outputs_sen, 100, activation=tf.nn.relu, name='L1'), rate=self.dropout, training=self.is_training) y = tf.layers.dense(L1, 1, activation=tf.nn.softmax, name='y') return y
def testRNNUnidirectionalNetwork(self): embedded_x = tf.random_normal( [1, 2, 5]) # batch x sentence length x embedding size hidden_size = 10 rnn_output = rnn_layer( embedded_x=embedded_x, hidden_size=hidden_size, bidirectional=False, cell_type='GRU', ) actual_output = rnn_output.get_shape().as_list()[-1] self.assertEqual(actual_output, hidden_size)