示例#1
0
  def testBasicLSTMCellWithStateTuple(self):
    with self.test_session() as sess:
      with variable_scope.variable_scope(
          "root", initializer=init_ops.constant_initializer(0.5)):
        x = array_ops.zeros([1, 2])
        c0 = array_ops.zeros([1, 2])
        h0 = array_ops.zeros([1, 2])
        state0 = core_rnn_cell_impl.LSTMStateTuple(c0, h0)
        c1 = array_ops.zeros([1, 2])
        h1 = array_ops.zeros([1, 2])
        state1 = core_rnn_cell_impl.LSTMStateTuple(c1, h1)
        cell = rnn_cell.LayerNormBasicLSTMCell(2)
        cell = core_rnn_cell_impl.MultiRNNCell([cell] * 2)
        h, (s0, s1) = cell(x, (state0, state1))
        sess.run([variables.global_variables_initializer()])
        res = sess.run([h, s0, s1], {
            x.name: np.array([[1., 1.]]),
            c0.name: 0.1 * np.asarray([[0, 1]]),
            h0.name: 0.1 * np.asarray([[2, 3]]),
            c1.name: 0.1 * np.asarray([[4, 5]]),
            h1.name: 0.1 * np.asarray([[6, 7]]),
        })

        expected_h = np.array([[-0.38079708, 0.38079708]])
        expected_h0 = np.array([[-0.38079708, 0.38079708]])
        expected_c0 = np.array([[-1.0, 1.0]])
        expected_h1 = np.array([[-0.38079708, 0.38079708]])
        expected_c1 = np.array([[-1.0, 1.0]])

        self.assertEqual(len(res), 3)
        self.assertAllClose(res[0], expected_h, 1e-5)
        self.assertAllClose(res[1].c, expected_c0, 1e-5)
        self.assertAllClose(res[1].h, expected_h0, 1e-5)
        self.assertAllClose(res[2].c, expected_c1, 1e-5)
        self.assertAllClose(res[2].h, expected_h1, 1e-5)
示例#2
0
 def _testDropoutWrapper(self, batch_size=None, time_steps=None,
                         parallel_iterations=None, **kwargs):
   with self.test_session() as sess:
     with variable_scope.variable_scope(
         "root", initializer=init_ops.constant_initializer(0.5)):
       if batch_size is None and time_steps is None:
         # 2 time steps, batch size 1, depth 3
         batch_size = 1
         time_steps = 2
         x = constant_op.constant(
             [[[2., 2., 2.]], [[1., 1., 1.]]], dtype=dtypes.float32)
         m = core_rnn_cell_impl.LSTMStateTuple(
             *[constant_op.constant([[0.1, 0.1, 0.1]],
                                    dtype=dtypes.float32)] * 2)
       else:
         x = constant_op.constant(
             np.random.randn(time_steps, batch_size, 3).astype(np.float32))
         m = core_rnn_cell_impl.LSTMStateTuple(
             *[constant_op.constant([[0.1, 0.1, 0.1]] * batch_size,
                                    dtype=dtypes.float32)] * 2)
       outputs, final_state = rnn.dynamic_rnn(
           cell=core_rnn_cell_impl.DropoutWrapper(
               core_rnn_cell_impl.LSTMCell(3),
               dtype=x.dtype,
               **kwargs),
           time_major=True,
           parallel_iterations=parallel_iterations,
           inputs=x, initial_state=m)
       sess.run([variables_lib.global_variables_initializer()])
       res = sess.run([outputs, final_state])
       self.assertEqual(res[0].shape, (time_steps, batch_size, 3))
       self.assertEqual(res[1].c.shape, (batch_size, 3))
       self.assertEqual(res[1].h.shape, (batch_size, 3))
       return res
示例#3
0
    def testStateTupleDictConversion(self):
        """Test `state_tuple_to_dict` and `dict_to_state_tuple`."""
        cell_sizes = [5, 3, 7]
        # A MultiRNNCell of LSTMCells is both a common choice and an interesting
        # test case, because it has two levels of nesting, with an inner class that
        # is not a plain tuple.
        cell = core_rnn_cell_impl.MultiRNNCell(
            [core_rnn_cell_impl.LSTMCell(i) for i in cell_sizes])
        state_dict = {
            dynamic_rnn_estimator._get_state_name(i):
            array_ops.expand_dims(math_ops.range(cell_size), 0)
            for i, cell_size in enumerate([5, 5, 3, 3, 7, 7])
        }
        expected_state = (core_rnn_cell_impl.LSTMStateTuple(
            np.reshape(np.arange(5), [1, -1]),
            np.reshape(np.arange(5), [1, -1])),
                          core_rnn_cell_impl.LSTMStateTuple(
                              np.reshape(np.arange(3), [1, -1]),
                              np.reshape(np.arange(3), [1, -1])),
                          core_rnn_cell_impl.LSTMStateTuple(
                              np.reshape(np.arange(7), [1, -1]),
                              np.reshape(np.arange(7), [1, -1])))
        actual_state = dynamic_rnn_estimator.dict_to_state_tuple(
            state_dict, cell)
        flattened_state = dynamic_rnn_estimator.state_tuple_to_dict(
            actual_state)

        with self.test_session() as sess:
            (state_dict_val, actual_state_val, flattened_state_val) = sess.run(
                [state_dict, actual_state, flattened_state])

        def _recursive_assert_equal(x, y):
            self.assertEqual(type(x), type(y))
            if isinstance(x, (list, tuple)):
                self.assertEqual(len(x), len(y))
                for i, _ in enumerate(x):
                    _recursive_assert_equal(x[i], y[i])
            elif isinstance(x, np.ndarray):
                np.testing.assert_array_equal(x, y)
            else:
                self.fail('Unexpected type: {}'.format(type(x)))

        for k in state_dict_val.keys():
            np.testing.assert_array_almost_equal(
                state_dict_val[k],
                flattened_state_val[k],
                err_msg='Wrong value for state component {}.'.format(k))
        _recursive_assert_equal(expected_state, actual_state_val)
示例#4
0
  def testBasicLSTMCellWithDropout(self):

    def _is_close(x, y, digits=4):
      delta = x - y
      return delta < 10**(-digits)

    def _is_close_in(x, items, digits=4):
      for i in items:
        if _is_close(x, i, digits):
          return True
      return False

    keep_prob = 0.5
    c_high = 2.9998924946
    c_low = 0.999983298578
    h_low = 0.761552567265
    h_high = 0.995008519604
    num_units = 5
    allowed_low = [2, 3]

    with self.test_session() as sess:
      with variable_scope.variable_scope(
          "other", initializer=init_ops.constant_initializer(1)):
        x = array_ops.zeros([1, 5])
        c = array_ops.zeros([1, 5])
        h = array_ops.zeros([1, 5])
        state = core_rnn_cell_impl.LSTMStateTuple(c, h)
        cell = rnn_cell.LayerNormBasicLSTMCell(
            num_units, layer_norm=False, dropout_keep_prob=keep_prob)

        g, s = cell(x, state)
        sess.run([variables.global_variables_initializer()])
        res = sess.run([g, s], {
            x.name: np.ones([1, 5]),
            c.name: np.ones([1, 5]),
            h.name: np.ones([1, 5]),
        })

        # Since the returned tensors are of size [1,n]
        # get the first component right now.
        actual_h = res[0][0]
        actual_state_c = res[1].c[0]
        actual_state_h = res[1].h[0]

        # For each item in `c` (the cell inner state) check that
        # it is equal to one of the allowed values `c_high` (not
        # dropped out) or `c_low` (dropped out) and verify that the
        # corresponding item in `h` (the cell activation) is coherent.
        # Count the dropped activations and check that their number is
        # coherent with the dropout probability.
        dropped_count = 0
        self.assertTrue((actual_h == actual_state_h).all())
        for citem, hitem in zip(actual_state_c, actual_state_h):
          self.assertTrue(_is_close_in(citem, [c_low, c_high]))
          if _is_close(citem, c_low):
            self.assertTrue(_is_close(hitem, h_low))
            dropped_count += 1
          elif _is_close(citem, c_high):
            self.assertTrue(_is_close(hitem, h_high))
        self.assertIn(dropped_count, allowed_low)
示例#5
0
 def testUsingSecondCellInScopeWithExistingVariablesFails(self):
     # This test should go away when this behavior is no longer an
     # error (Approx. May 2017)
     cell1 = core_rnn_cell_impl.LSTMCell(3)
     cell2 = core_rnn_cell_impl.LSTMCell(3)
     x = array_ops.zeros([1, 3])
     m = core_rnn_cell_impl.LSTMStateTuple(*[array_ops.zeros([1, 3])] * 2)
     cell1(x, m)
     with self.assertRaisesRegexp(ValueError,
                                  r"LSTMCell\(..., reuse=True\)"):
         cell2(x, m)
示例#6
0
 def testUsingCellInDifferentScopeFromFirstCallFails(self):
     # This test should go away when this behavior is no longer an
     # error (Approx. May 2017)
     cell = core_rnn_cell_impl.LSTMCell(3)
     x = array_ops.zeros([1, 3])
     m = core_rnn_cell_impl.LSTMStateTuple(*[array_ops.zeros([1, 3])] * 2)
     with variable_scope.variable_scope("scope1"):
         cell(x, m)
     with variable_scope.variable_scope("scope2"):
         with self.assertRaisesRegexp(ValueError,
                                      r"Attempt to reuse RNNCell"):
             cell(x, m)
示例#7
0
  def testBasicLSTMCell(self):
    with self.test_session() as sess:
      with variable_scope.variable_scope(
          "root", initializer=init_ops.constant_initializer(0.5)):
        x = array_ops.zeros([1, 2])
        c0 = array_ops.zeros([1, 2])
        h0 = array_ops.zeros([1, 2])
        state0 = core_rnn_cell_impl.LSTMStateTuple(c0, h0)
        c1 = array_ops.zeros([1, 2])
        h1 = array_ops.zeros([1, 2])
        state1 = core_rnn_cell_impl.LSTMStateTuple(c1, h1)
        state = (state0, state1)
        single_cell = lambda: rnn_cell.LayerNormBasicLSTMCell(2)
        cell = core_rnn_cell_impl.MultiRNNCell([single_cell() for _ in range(2)])
        g, out_m = cell(x, state)
        sess.run([variables.global_variables_initializer()])
        res = sess.run([g, out_m], {
            x.name: np.array([[1., 1.]]),
            c0.name: 0.1 * np.asarray([[0, 1]]),
            h0.name: 0.1 * np.asarray([[2, 3]]),
            c1.name: 0.1 * np.asarray([[4, 5]]),
            h1.name: 0.1 * np.asarray([[6, 7]]),
        })

        expected_h = np.array([[-0.38079708, 0.38079708]])
        expected_state0_c = np.array([[-1.0, 1.0]])
        expected_state0_h = np.array([[-0.38079708, 0.38079708]])
        expected_state1_c = np.array([[-1.0, 1.0]])
        expected_state1_h = np.array([[-0.38079708, 0.38079708]])

        actual_h = res[0]
        actual_state0_c = res[1][0].c
        actual_state0_h = res[1][0].h
        actual_state1_c = res[1][1].c
        actual_state1_h = res[1][1].h

        self.assertAllClose(actual_h, expected_h, 1e-5)
        self.assertAllClose(expected_state0_c, actual_state0_c, 1e-5)
        self.assertAllClose(expected_state0_h, actual_state0_h, 1e-5)
        self.assertAllClose(expected_state1_c, actual_state1_c, 1e-5)
        self.assertAllClose(expected_state1_h, actual_state1_h, 1e-5)

      with variable_scope.variable_scope(
          "other", initializer=init_ops.constant_initializer(0.5)):
        x = array_ops.zeros(
            [1, 3])  # Test BasicLSTMCell with input_size != num_units.
        c = array_ops.zeros([1, 2])
        h = array_ops.zeros([1, 2])
        state = core_rnn_cell_impl.LSTMStateTuple(c, h)
        cell = rnn_cell.LayerNormBasicLSTMCell(2)
        g, out_m = cell(x, state)
        sess.run([variables.global_variables_initializer()])
        res = sess.run([g, out_m], {
            x.name: np.array([[1., 1., 1.]]),
            c.name: 0.1 * np.asarray([[0, 1]]),
            h.name: 0.1 * np.asarray([[2, 3]]),
        })

        expected_h = np.array([[-0.38079708, 0.38079708]])
        expected_c = np.array([[-1.0, 1.0]])
        self.assertEqual(len(res), 2)
        self.assertAllClose(res[0], expected_h, 1e-5)
        self.assertAllClose(res[1].c, expected_c, 1e-5)
        self.assertAllClose(res[1].h, expected_h, 1e-5)