Пример #1
0
    def test_get_rnn_cell(self):
        """Tests :func:`texar.tf.core.layers.get_rnn_cell`.
        """
        emb_dim = 4
        num_units = 64

        # Given instance
        hparams = {"type": rnn.LSTMCell(num_units)}
        cell = layers.get_rnn_cell(hparams)
        self.assertTrue(isinstance(cell, rnn.LSTMCell))

        # Given class
        hparams = {"type": rnn.LSTMCell, "kwargs": {"num_units": 10}}
        cell = layers.get_rnn_cell(hparams)
        self.assertTrue(isinstance(cell, rnn.LSTMCell))

        # Given string, and complex hyperparameters
        keep_prob_x = tf.placeholder(name='keep_prob',
                                     shape=[],
                                     dtype=tf.float32)
        hparams = {
            "type": "tensorflow.contrib.rnn.GRUCell",
            "kwargs": {
                "num_units": num_units
            },
            "num_layers": 2,
            "dropout": {
                "input_keep_prob": 0.8,
                "state_keep_prob": keep_prob_x,
                "variational_recurrent": True,
                "input_size": [emb_dim, num_units]
            },
            "residual": True,
            "highway": True
        }

        hparams_ = HParams(hparams, layers.default_rnn_cell_hparams())
        cell = layers.get_rnn_cell(hparams_)

        batch_size = 16
        inputs = tf.zeros([batch_size, emb_dim], dtype=tf.float32)
        output, state = cell(inputs,
                             cell.zero_state(batch_size, dtype=tf.float32))
        with self.test_session() as sess:
            sess.run(tf.global_variables_initializer())

            feed_dict = {
                keep_prob_x: 1.0,
                context.global_mode(): tf.estimator.ModeKeys.TRAIN
            }
            output_, state_ = sess.run([output, state], feed_dict=feed_dict)

            self.assertEqual(output_.shape[0], batch_size)
            if isinstance(state_, (list, tuple)):
                self.assertEqual(state_[0].shape[0], batch_size)
                self.assertEqual(state_[0].shape[1], hparams_.kwargs.num_units)
            else:
                self.assertEqual(state_.shape[0], batch_size)
                self.assertEqual(state_.shape[1], hparams_.kwargs.num_units)
Пример #2
0
    def __init__(self,
                 cell=None,
                 vocab_size=None,
                 output_layer=None,
                 cell_dropout_mode=None,
                 hparams=None):
        ModuleBase.__init__(self, hparams)

        self._helper = None
        self._initial_state = None

        # Make rnn cell
        with tf.variable_scope(self.variable_scope):
            if cell is not None:
                self._cell = cell
            else:
                self._cell = layers.get_rnn_cell(self._hparams.rnn_cell,
                                                 cell_dropout_mode)
        self._beam_search_cell = None

        # Make the output layer
        self._output_layer, self._vocab_size = _make_output_layer(
            output_layer, vocab_size, self._hparams.output_layer_bias,
            self.variable_scope)

        self.max_decoding_length = None
Пример #3
0
    def test_switch_dropout(self):
        """Tests dropout mode.
        """
        emb_dim = 4
        num_units = 64
        hparams = {
            "kwargs": {
                "num_units": num_units
            },
            "num_layers": 2,
            "dropout": {
                "input_keep_prob": 0.8,
            },
        }
        mode = tf.placeholder(tf.string)
        hparams_ = HParams(hparams, layers.default_rnn_cell_hparams())
        cell = layers.get_rnn_cell(hparams_, mode)

        batch_size = 16
        inputs = tf.zeros([batch_size, emb_dim], dtype=tf.float32)
        output, state = cell(inputs,
                             cell.zero_state(batch_size, dtype=tf.float32))
        with self.test_session() as sess:
            sess.run(tf.global_variables_initializer())
            output_train, _ = sess.run(
                [output, state], feed_dict={mode: tf.estimator.ModeKeys.TRAIN})
            self.assertEqual(output_train.shape[0], batch_size)
            output_test, _ = sess.run(
                [output, state], feed_dict={mode: tf.estimator.ModeKeys.EVAL})
            self.assertEqual(output_test.shape[0], batch_size)
Пример #4
0
    def __init__(self,
                 cell_fw=None,
                 cell_bw=None,
                 cell_dropout_mode=None,
                 output_layer_fw=None,
                 output_layer_bw=None,
                 hparams=None):
        RNNEncoderBase.__init__(self, hparams)

        # Make RNN cells
        with tf.variable_scope(self.variable_scope):
            if cell_fw is not None:
                self._cell_fw = cell_fw
            else:
                self._cell_fw = layers.get_rnn_cell(self._hparams.rnn_cell_fw,
                                                    cell_dropout_mode)

            if cell_bw is not None:
                self._cell_bw = cell_bw
            elif self._hparams.rnn_cell_share_config:
                self._cell_bw = layers.get_rnn_cell(self._hparams.rnn_cell_fw,
                                                    cell_dropout_mode)
            else:
                self._cell_bw = layers.get_rnn_cell(self._hparams.rnn_cell_bw,
                                                    cell_dropout_mode)

        # Make output layers
        with tf.variable_scope(self.variable_scope):
            if output_layer_fw is not None:
                self._output_layer_fw = output_layer_fw
                self._output_layer_hparams_fw = None
            else:
                self._output_layer_fw = _build_dense_output_layer(
                    self._hparams.output_layer_fw)
                self._output_layer_hparams_fw = self._hparams.output_layer_fw

            if output_layer_bw is not None:
                self._output_layer_bw = output_layer_bw
                self._output_layer_hparams_bw = None
            elif self._hparams.output_layer_share_config:
                self._output_layer_bw = _build_dense_output_layer(
                    self._hparams.output_layer_fw)
                self._output_layer_hparams_bw = self._hparams.output_layer_fw
            else:
                self._output_layer_bw = _build_dense_output_layer(
                    self._hparams.output_layer_bw)
                self._output_layer_hparams_bw = self._hparams.output_layer_bw
Пример #5
0
    def __init__(self,
                 cell=None,
                 cell_dropout_mode=None,
                 output_layer=None,
                 hparams=None):
        RNNEncoderBase.__init__(self, hparams)

        # Make RNN cell
        with tf.variable_scope(self.variable_scope):
            if cell is not None:
                self._cell = cell
            else:
                self._cell = layers.get_rnn_cell(self._hparams.rnn_cell,
                                                 cell_dropout_mode)

        # Make output layer
        with tf.variable_scope(self.variable_scope):
            if output_layer is not None:
                self._output_layer = output_layer
                self._output_layer_hparams = None
            else:
                self._output_layer = _build_dense_output_layer(
                    self._hparams.output_layer)
                self._output_layer_hparams = self._hparams.output_layer
Пример #6
0
    def setUp(self):
        tf.test.TestCase.setUp(self)
        self._batch_size = 100

        self._decoder_cell = layers.get_rnn_cell(
            layers.default_rnn_cell_hparams())