def testBLSTM(self): lstm = rnn.LSTM(3, 12, dropout=0.5, bidirectional=True) inputs = tf.random.uniform([4, 5, 5]) outputs, states = lstm(inputs, training=True) self.assertListEqual(outputs.shape.as_list(), [4, 5, 24]) self.assertIsInstance(states, tuple) self.assertEqual(len(states), 3)
def testLSTM(self): lstm = rnn.LSTM(3, 12) inputs = tf.random.uniform([4, 5, 5]) outputs, states = lstm(inputs, training=True) self.assertListEqual(outputs.shape.as_list(), [4, 5, 12]) self.assertIsInstance(states, tuple) self.assertEqual(len(states), 3)
def __init__(self, num_layers, num_units, bidirectional=False, residual_connections=False, dropout=0.3, reducer=ConcatReducer(), **kwargs): """Initializes the parameters of the encoder. Args: num_layers: The number of layers. num_units: The number of units in each layer output. bidirectional: Make each LSTM layer bidirectional. residual_connections: If ``True``, each layer input will be added to its output. dropout: The probability to drop units in each layer output. reducer: A :class:`opennmt.layers.Reducer` instance to merge bidirectional state and outputs. **kwargs: Additional layer arguments. """ lstm_layer = rnn.LSTM( num_layers, num_units, bidirectional=bidirectional, reducer=reducer, dropout=dropout, residual_connections=residual_connections, ) super().__init__(lstm_layer, **kwargs)
def testLSTMWithMask(self): lstm = rnn.LSTM(3, 12) inputs = tf.random.uniform([3, 4, 5]) lengths = [4, 2, 3] mask = tf.sequence_mask(lengths) outputs, states = lstm(inputs, mask=mask, training=True) last_hidden = states[-1][0] for i, length in enumerate(lengths): self.assertAllClose(last_hidden[i], outputs[i, length - 1])