Exemplo n.º 1
0
  def test_timeseries_classification_sequential_tf_rnn(self):
    np.random.seed(1337)
    (x_train, y_train), _ = testing_utils.get_test_data(
        train_samples=100,
        test_samples=0,
        input_shape=(4, 10),
        num_classes=2)
    y_train = np_utils.to_categorical(y_train)

    with base_layer.keras_style_scope():
      model = keras.models.Sequential()
      model.add(keras.layers.RNN(rnn_cell.LSTMCell(5), return_sequences=True,
                                 input_shape=x_train.shape[1:]))
      model.add(keras.layers.RNN(rnn_cell.GRUCell(y_train.shape[-1],
                                                  activation='softmax',
                                                  dtype=dtypes.float32)))
      model.compile(
          loss='categorical_crossentropy',
          optimizer=keras.optimizer_v2.adam.Adam(0.005),
          metrics=['acc'],
          run_eagerly=testing_utils.should_run_eagerly(),
          experimental_run_tf_function=testing_utils.should_run_tf_function())

    history = model.fit(x_train, y_train, epochs=15, batch_size=10,
                        validation_data=(x_train, y_train),
                        verbose=2)
    self.assertGreater(history.history['val_acc'][-1], 0.7)
    _, val_acc = model.evaluate(x_train, y_train)
    self.assertAlmostEqual(history.history['val_acc'][-1], val_acc)
    predictions = model.predict(x_train)
    self.assertEqual(predictions.shape, (x_train.shape[0], 2))
  def testWrapperV2Caller(self, wrapper):
    """Tests that wrapper V2 is using the LayerRNNCell's caller."""

    with base_layer.keras_style_scope():
      base_cell = rnn_cell_impl.MultiRNNCell(
          [rnn_cell_impl.BasicRNNCell(1) for _ in range(2)])
    rnn_cell = wrapper(base_cell)
    inputs = ops.convert_to_tensor([[1]], dtype=dtypes.float32)
    state = ops.convert_to_tensor([[1]], dtype=dtypes.float32)
    _ = rnn_cell(inputs, [state, state])
    weights = base_cell._cells[0].weights
    self.assertLen(weights, expected_len=2)
    self.assertTrue(all(["_wrapper" in v.name for v in weights]))
Exemplo n.º 3
0
 def _rnn_input(apply_wrapper):
   """Creates a RNN layer with/without wrapper and returns built rnn cell."""
   with base_layer.keras_style_scope():
     base_cell = rnn_cell_impl.MultiRNNCell(
         [rnn_cell_impl.BasicRNNCell(1) for _ in range(2)])
   if apply_wrapper:
     rnn_cell = rnn_cell_impl.DropoutWrapperV2(base_cell)
   else:
     rnn_cell = base_cell
   rnn_layer = keras_layers.RNN(rnn_cell)
   inputs = ops.convert_to_tensor([[[1]]], dtype=dtypes.float32)
   _ = rnn_layer(inputs)
   return base_cell._cells[0]
Exemplo n.º 4
0
  def testRNNCellActsLikeKerasRNNCellInProperScope(self):
    with base_layers.keras_style_scope():
      kn1 = KerasNetworkTFRNNs(name="kn1")
      kn2 = KerasNetworkKerasRNNs(name="kn2")

    z = array_ops.zeros((2, 3))

    kn1(z)
    kn2(z)

    # pylint: disable=protected-access
    self.assertTrue(all("kn1" in v.name for v in kn1._cell.variables))
    self.assertTrue(all("kn2" in v.name for v in kn2._cell.variables))

    with base_layers.keras_style_scope():
      kn1_new = KerasNetworkTFRNNs(name="kn1_new")
      kn2_new = KerasNetworkKerasRNNs(name="kn2_new")

    kn2_new(z)
    # Most importantly, this doesn't fail due to variable scope reuse issues.
    kn1_new(z)

    self.assertTrue(all("kn1_new" in v.name for v in kn1_new._cell.variables))
    self.assertTrue(all("kn2_new" in v.name for v in kn2_new._cell.variables))
Exemplo n.º 5
0
  def testRNNCellActsLikeKerasRNNCellInProperScope(self):
    with base_layers.keras_style_scope():
      kn1 = KerasNetworkTFRNNs(name="kn1")
      kn2 = KerasNetworkKerasRNNs(name="kn2")

    z = array_ops.zeros((2, 3))

    kn1(z)
    kn2(z)

    # pylint: disable=protected-access
    self.assertTrue(all("kn1" in v.name for v in kn1._cell.variables))
    self.assertTrue(all("kn2" in v.name for v in kn2._cell.variables))

    with base_layers.keras_style_scope():
      kn1_new = KerasNetworkTFRNNs(name="kn1_new")
      kn2_new = KerasNetworkKerasRNNs(name="kn2_new")

    kn2_new(z)
    # Most importantly, this doesn't fail due to variable scope reuse issues.
    kn1_new(z)

    self.assertTrue(all("kn1_new" in v.name for v in kn1_new._cell.variables))
    self.assertTrue(all("kn2_new" in v.name for v in kn2_new._cell.variables))
Exemplo n.º 6
0
  def testKerasStyleAddWeight(self):
    keras_layer = keras_base_layer.Layer(name='keras_layer')
    with ops.name_scope('foo', skip_on_eager=False):
      keras_variable = keras_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(keras_variable.name, 'foo/my_var:0')

    with ops.name_scope('baz', skip_on_eager=False):
      old_style_layer = base_layers.Layer(name='my_layer')
      # Test basic variable creation.
      variable = old_style_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'my_layer/my_var:0')

    with base_layers.keras_style_scope():
      layer = base_layers.Layer(name='my_layer')
    # Test basic variable creation.
    with ops.name_scope('bar', skip_on_eager=False):
      variable = layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'bar/my_var:0')
Exemplo n.º 7
0
  def testKerasStyleAddWeight(self):
    keras_layer = keras_base_layer.Layer(name='keras_layer')
    with ops.name_scope('foo'):
      keras_variable = keras_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(keras_variable.name, 'foo/my_var:0')

    with ops.name_scope('baz'):
      old_style_layer = base_layers.Layer(name='my_layer')
      # Test basic variable creation.
      variable = old_style_layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'my_layer/my_var:0')

    with base_layers.keras_style_scope():
      layer = base_layers.Layer(name='my_layer')
    # Test basic variable creation.
    with ops.name_scope('bar'):
      variable = layer.add_variable(
          'my_var', [2, 2], initializer=init_ops.zeros_initializer())
    self.assertEqual(variable.name, 'bar/my_var:0')