Example #1
0
 def testRNNCell(self):
     cell = rnn.make_rnn_cell(3, 10, dropout=0.1, residual_connections=True)
     inputs = tf.random.uniform([4, 5])
     states = cell.get_initial_state(inputs=inputs)
     outputs, states = cell(inputs, states, training=True)
     self.assertEqual(len(states), 3)
     self.assertListEqual(outputs.shape.as_list(), [4, 10])
Example #2
0
  def __init__(self,
               num_layers,
               num_units,
               bridge_class=None,
               cell_class=None,
               dropout=0.3,
               residual_connections=False,
               **kwargs):
    """Initializes the decoder parameters.

    Args:
      num_layers: The number of layers.
      num_units: The number of units in each layer.
      bridge_class: A :class:`opennmt.layers.Bridge` class to pass the
        encoder state to the decoder. Default to
        :class:`opennmt.layers.ZeroBridge`.
      cell_class: The inner cell class or a callable taking :obj:`num_units` as
        argument and returning a cell. Defaults to a LSTM cell.
      dropout: The probability to drop units in each layer output.
      residual_connections: If ``True``, each layer input will be added to its
        output.
      **kwargs: Additional layer arguments.
    """
    super(RNNDecoder, self).__init__(**kwargs)
    self.dropout = dropout
    self.cell = rnn.make_rnn_cell(
        num_layers,
        num_units,
        dropout=dropout,
        residual_connections=residual_connections,
        cell_class=cell_class)
    if bridge_class is None:
      bridge_class = bridge.ZeroBridge
    self.bridge = bridge_class()
Example #3
0
    def __init__(self,
                 num_layers,
                 num_units,
                 bidirectional=False,
                 residual_connections=False,
                 dropout=0.3,
                 reducer=ConcatReducer(),
                 cell_class=None,
                 **kwargs):
        """Initializes the parameters of the encoder.

    Args:
      num_layers: The number of layers.
      num_units: The number of units in each layer.
      bidirectional: Use a bidirectional RNN.
      residual_connections: If ``True``, each layer input will be added to its
        output.
      reducer: A :class:`opennmt.layers.reducer.Reducer` instance to merge
        bidirectional state and outputs.
      dropout: The probability to drop units in each layer output.
      cell_class: The inner cell class or a callable taking :obj:`num_units` as
        argument and returning a cell. Defaults to a LSTM cell.
      **kwargs: Additional layer arguments.
    """
        super(RNNEncoderV2, self).__init__(**kwargs)
        cell = rnn.make_rnn_cell(num_layers,
                                 num_units,
                                 dropout=dropout,
                                 residual_connections=residual_connections,
                                 cell_class=cell_class)
        self.rnn = rnn.RNN(cell, bidirectional=bidirectional, reducer=reducer)
Example #4
0
  def __init__(self,
               num_layers,
               num_units,
               cell_class=None,
               dropout=0.3,
               residual_connections=False,
               **kwargs):
    """Initializes the parameters of the encoder.

    Args:
      num_layers: The number of layers.
      num_units: The number of units in each layer.
      cell_class: The inner cell class or a callable taking :obj:`num_units` as
        argument and returning a cell. Defaults to a LSTM cell.
      dropout: The probability to drop units in each layer output.
      residual_connections: If ``True``, each layer input will be added to its
        output.
      kwargs: Additional layer arguments.
    """
    super(UnidirectionalRNNEncoderV2, self).__init__(**kwargs)
    cell = rnn.make_rnn_cell(
        num_layers,
        num_units,
        dropout=dropout,
        residual_connections=residual_connections,
        cell_class=cell_class)
    self.rnn = rnn.RNN(cell)
Example #5
0
 def testRNN(self):
     cell = rnn.make_rnn_cell(3, 10, dropout=0.1, residual_connections=True)
     rnn_layer = rnn.RNN(cell)
     inputs = tf.random.uniform([4, 5, 5])
     outputs, states = rnn_layer(inputs, training=True)
     self.assertListEqual(outputs.shape.as_list(), [4, 5, 10])
     self.assertIsInstance(states, tuple)
     self.assertEqual(len(states), 3)
Example #6
0
 def testBRNN(self):
     cell = rnn.make_rnn_cell(3, 10, dropout=0.1, residual_connections=True)
     rnn_layer = rnn.RNN(cell,
                         bidirectional=True,
                         reducer=reducer.ConcatReducer())
     inputs = tf.random.uniform([4, 5, 5])
     outputs, states = rnn_layer(inputs, training=True)
     self.assertListEqual(outputs.shape.as_list(), [4, 5, 20])
     self.assertIsInstance(states, tuple)
     self.assertEqual(len(states), 3)
     self.assertEqual(len(states[0]), 2)
     self.assertListEqual(states[0][0].shape.as_list(), [4, 20])
Example #7
0
  def __init__(self,
               num_layers,
               num_units,
               reducer=SumReducer(),
               cell_class=None,
               dropout=0.3,
               residual_connections=False,
               **kwargs):
    """Initializes the parameters of the encoder.

    Args:
      num_layers: The number of layers.
      num_units: The number of units in each layer.
      reducer: A :class:`opennmt.layers.reducer.Reducer` instance to merge
        bidirectional state and outputs.
      cell_class: The inner cell class or a callable taking :obj:`num_units` as
        argument and returning a cell. Defaults to a LSTM cell.
      dropout: The probability to drop units in each layer output.
      residual_connections: If ``True``, each layer input will be added to its
        output.

    Raises:
      ValueError: when using :class:`opennmt.layers.reducer.ConcatReducer` and
        :obj:`num_units` is not divisible by 2.
    """
    if isinstance(reducer, ConcatReducer):
      if num_units % 2 != 0:
        raise ValueError("num_units must be divisible by 2 to use the ConcatReducer.")
      num_units /= 2
    super(BidirectionalRNNEncoderV2, self).__init__(**kwargs)
    cell = rnn.make_rnn_cell(
        num_layers,
        num_units,
        dropout=dropout,
        residual_connections=residual_connections,
        cell_class=cell_class)
    self.rnn = rnn.RNN(cell, bidirectional=True, reducer=reducer)