def _testKerasLayer(self, layer_class):
    def kernel_posterior_fn(dtype, shape, name, trainable, add_variable_fn):
      """Set trivially. The function is required to instantiate layer."""
      del name, trainable, add_variable_fn  # unused
      # Deserialized Keras objects do not perform lexical scoping. Any modules
      # that the function requires must be imported within the function.
      import tensorflow.compat.v2 as tf  # pylint: disable=g-import-not-at-top,redefined-outer-name,reimported
      import tensorflow_probability as tfp  # pylint: disable=g-import-not-at-top,redefined-outer-name,reimported
      tfd = tfp.distributions  # pylint: disable=redefined-outer-name

      dist = tfd.Normal(loc=tf.zeros(shape, dtype), scale=tf.ones(shape, dtype))
      batch_ndims = tf.size(dist.batch_shape_tensor())
      return tfd.Independent(dist, reinterpreted_batch_ndims=batch_ndims)

    kwargs = {'units': 3,
              'kernel_posterior_fn': kernel_posterior_fn,
              'kernel_prior_fn': None,
              'bias_posterior_fn': None,
              'bias_prior_fn': None}
    with tf.keras.utils.CustomObjectScope({layer_class.__name__: layer_class}):
      # TODO(scottzhu): reenable the test when the repo switch change reach
      # the TF PIP package.
      self.skipTest('Skip the test until the TF and Keras has a new PIP.')
      with self.cached_session():
        testing_utils.layer_test(
            layer_class,
            kwargs=kwargs,
            input_shape=(3, 2))
        testing_utils.layer_test(
            layer_class,
            kwargs=kwargs,
            input_shape=(None, None, 2))
Beispiel #2
0
 def test_basic_layernorm(self):
     testing_utils.layer_test(keras.layers.LayerNormalization,
                              kwargs={
                                  'gamma_regularizer':
                                  keras.regularizers.l2(0.01),
                                  'beta_regularizer':
                                  keras.regularizers.l2(0.01)
                              },
                              input_shape=(3, 4, 2))
     testing_utils.layer_test(keras.layers.LayerNormalization,
                              kwargs={
                                  'gamma_initializer': 'ones',
                                  'beta_initializer': 'ones',
                              },
                              input_shape=(3, 4, 2))
     testing_utils.layer_test(keras.layers.LayerNormalization,
                              kwargs={
                                  'scale': False,
                                  'center': False
                              },
                              input_shape=(3, 3))
     testing_utils.layer_test(keras.layers.LayerNormalization,
                              kwargs={'axis': (-3, -2, -1)},
                              input_shape=(2, 8, 8, 3))
     testing_utils.layer_test(keras.layers.LayerNormalization,
                              input_shape=(1, 0, 10))
Beispiel #3
0
 def test_basics(self):
     testing_utils.layer_test(keras.layers.UnitNormalization,
                              kwargs={'axis': -1},
                              input_shape=(2, 3))
     testing_utils.layer_test(keras.layers.UnitNormalization,
                              kwargs={'axis': (1, 2)},
                              input_shape=(1, 3, 3))
Beispiel #4
0
    def test_locallyconnected_1d(self, data_format, padding, implementation):
        with self.cached_session():
            num_samples = 2
            num_steps = 8
            input_dim = 5
            filter_length = 3
            filters = 4

            for strides in [1]:
                if padding == 'same' and strides != 1:
                    continue
                kwargs = {
                    'filters': filters,
                    'kernel_size': filter_length,
                    'padding': padding,
                    'strides': strides,
                    'data_format': data_format,
                    'implementation': implementation
                }

                if padding == 'same' and implementation == 1:
                    self.assertRaises(ValueError,
                                      keras.layers.LocallyConnected1D,
                                      **kwargs)
                else:
                    testing_utils.layer_test(keras.layers.LocallyConnected1D,
                                             kwargs=kwargs,
                                             input_shape=(num_samples,
                                                          num_steps,
                                                          input_dim))
Beispiel #5
0
  def test_lambda(self):
    testing_utils.layer_test(
        keras.layers.Lambda,
        kwargs={'function': lambda x: x + 1},
        input_shape=(3, 2))

    testing_utils.layer_test(
        keras.layers.Lambda,
        kwargs={
            'function': lambda x, a, b: x * a + b,
            'arguments': {
                'a': 0.6,
                'b': 0.4
            }
        },
        input_shape=(3, 2))

    # test serialization with function
    def f(x):
      return x + 1

    ld = keras.layers.Lambda(f)
    config = ld.get_config()
    ld = keras.layers.deserialize({'class_name': 'Lambda', 'config': config})
    self.assertEqual(ld.function(3), 4)

    # test with lambda
    ld = keras.layers.Lambda(
        lambda x: keras.backend.concatenate([tf.square(x), x]))
    config = ld.get_config()
    ld = keras.layers.Lambda.from_config(config)
    self.assertAllEqual(self.evaluate(ld.function([3])), [9, 3])
Beispiel #6
0
  def test_upsampling_2d_bilinear(self):
    num_samples = 2
    stack_size = 2
    input_num_row = 11
    input_num_col = 12
    for data_format in ['channels_first', 'channels_last']:
      if data_format == 'channels_first':
        inputs = np.random.rand(num_samples, stack_size, input_num_row,
                                input_num_col)
      else:
        inputs = np.random.rand(num_samples, input_num_row, input_num_col,
                                stack_size)

      testing_utils.layer_test(keras.layers.UpSampling2D,
                               kwargs={'size': (2, 2),
                                       'data_format': data_format,
                                       'interpolation': 'bilinear'},
                               input_shape=inputs.shape)

      if not tf.executing_eagerly():
        for length_row in [2]:
          for length_col in [2, 3]:
            layer = keras.layers.UpSampling2D(
                size=(length_row, length_col),
                data_format=data_format)
            layer.build(inputs.shape)
            outputs = layer(keras.backend.variable(inputs))
            np_output = keras.backend.eval(outputs)
            if data_format == 'channels_first':
              self.assertEqual(np_output.shape[2], length_row * input_num_row)
              self.assertEqual(np_output.shape[3], length_col * input_num_col)
            else:
              self.assertEqual(np_output.shape[1], length_row * input_num_row)
              self.assertEqual(np_output.shape[2], length_col * input_num_col)
Beispiel #7
0
  def test_conv2d_transpose_dilation(self):
    testing_utils.layer_test(keras.layers.Conv2DTranspose,
                             kwargs={'filters': 2,
                                     'kernel_size': 3,
                                     'padding': 'same',
                                     'data_format': 'channels_last',
                                     'dilation_rate': (2, 2)},
                             input_shape=(2, 5, 6, 3))

    input_data = np.arange(48).reshape((1, 4, 4, 3)).astype(np.float32)
    # pylint: disable=too-many-function-args
    expected_output = np.float32([
        [192, 228, 192, 228],
        [336, 372, 336, 372],
        [192, 228, 192, 228],
        [336, 372, 336, 372]
    ]).reshape((1, 4, 4, 1))
    testing_utils.layer_test(keras.layers.Conv2DTranspose,
                             input_data=input_data,
                             kwargs={'filters': 1,
                                     'kernel_size': 3,
                                     'padding': 'same',
                                     'data_format': 'channels_last',
                                     'dilation_rate': (2, 2),
                                     'kernel_initializer': 'ones'},
                             expected_output=expected_output)
 def test_basic_batchnorm_v2(self):
     testing_utils.layer_test(batch_normalization.BatchNormalization,
                              kwargs={'fused': True},
                              input_shape=(3, 3, 3, 3))
     testing_utils.layer_test(batch_normalization.BatchNormalization,
                              kwargs={'fused': None},
                              input_shape=(3, 3, 3))
Beispiel #9
0
    def test_locallyconnected_2d(self, data_format, padding, implementation):
        with self.cached_session():
            num_samples = 8
            filters = 3
            stack_size = 4
            num_row = 6
            num_col = 10

            for strides in [(1, 1), (2, 2)]:
                if padding == 'same' and strides != (1, 1):
                    continue

                kwargs = {
                    'filters': filters,
                    'kernel_size': 3,
                    'padding': padding,
                    'kernel_regularizer': 'l2',
                    'bias_regularizer': 'l2',
                    'strides': strides,
                    'data_format': data_format,
                    'implementation': implementation
                }

                if padding == 'same' and implementation == 1:
                    self.assertRaises(ValueError,
                                      keras.layers.LocallyConnected2D,
                                      **kwargs)
                else:
                    testing_utils.layer_test(keras.layers.LocallyConnected2D,
                                             kwargs=kwargs,
                                             input_shape=(num_samples, num_row,
                                                          num_col, stack_size))
 def test_relu_with_invalid_max_value(self):
     with self.assertRaisesRegex(
             ValueError, 'max_value of a ReLU layer cannot be a negative '
             'value. Received: -10'):
         testing_utils.layer_test(keras.layers.ReLU,
                                  kwargs={'max_value': -10},
                                  input_shape=(2, 3, 4),
                                  supports_masking=True)
 def test_leaky_elu_with_invalid_alpha(self):
     # Test case for GitHub issue 46993.
     with self.assertRaisesRegex(ValueError,
                                 'alpha of ELU layer cannot be None'):
         testing_utils.layer_test(keras.layers.ELU,
                                  kwargs={'alpha': None},
                                  input_shape=(2, 3, 4),
                                  supports_masking=True)
    def test_conv_lstm(self, data_format, return_sequences):
        num_height = 3
        num_width = 3
        num_depth = 3
        filters = 3
        num_samples = 1
        input_channel = 2
        input_height = 5
        input_width = 5
        input_depth = 5
        sequence_len = 2
        if data_format == 'channels_first':
            inputs = np.random.rand(num_samples, sequence_len, input_channel,
                                    input_height, input_width, input_depth)
        else:
            inputs = np.random.rand(num_samples, sequence_len, input_height,
                                    input_width, input_depth, input_channel)

        # test for return state:
        x = keras.Input(batch_shape=inputs.shape)
        kwargs = {
            'data_format': data_format,
            'return_sequences': return_sequences,
            'return_state': True,
            'stateful': True,
            'filters': filters,
            'kernel_size': (num_height, num_width, num_depth),
            'padding': 'same'
        }
        layer = keras.layers.ConvLSTM3D(**kwargs)
        layer.build(inputs.shape)
        outputs = layer(x)
        _, states = outputs[0], outputs[1:]
        self.assertEqual(len(states), 2)
        model = keras.models.Model(x, states[0])

        state = model.predict(inputs)

        self.assertAllClose(keras.backend.eval(layer.states[0]),
                            state,
                            atol=1e-4)

        # test for output shape:
        testing_utils.layer_test(keras.layers.ConvLSTM3D,
                                 kwargs={
                                     'data_format':
                                     data_format,
                                     'return_sequences':
                                     return_sequences,
                                     'filters':
                                     filters,
                                     'kernel_size':
                                     (num_height, num_width, num_depth),
                                     'padding':
                                     'valid'
                                 },
                                 input_shape=inputs.shape)
 def test_leaky_relu_with_invalid_alpha(self):
     # Test case for GitHub issue 46993.
     with self.assertRaisesRegex(
             ValueError, 'The alpha value of a Leaky ReLU layer '
             'cannot be None, needs a float. Got None'):
         testing_utils.layer_test(keras.layers.LeakyReLU,
                                  kwargs={'alpha': None},
                                  input_shape=(2, 3, 4),
                                  supports_masking=True)
Beispiel #14
0
  def test_dropout(self):
    testing_utils.layer_test(
        keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2))

    testing_utils.layer_test(
        keras.layers.Dropout,
        kwargs={'rate': 0.5,
                'noise_shape': [3, 1]},
        input_shape=(3, 2))
Beispiel #15
0
 def test_cudnn_rnn_return_sequence(self, layer_class, return_sequences):
   input_size = 10
   timesteps = 6
   units = 2
   num_samples = 32
   testing_utils.layer_test(
       layer_class,
       kwargs={'units': units,
               'return_sequences': return_sequences},
       input_shape=(num_samples, timesteps, input_size))
Beispiel #16
0
    def test_activation(self):
        # with string argument
        testing_utils.layer_test(keras.layers.Activation,
                                 kwargs={'activation': 'relu'},
                                 input_shape=(3, 2))

        # with function argument
        testing_utils.layer_test(keras.layers.Activation,
                                 kwargs={'activation': keras.backend.relu},
                                 input_shape=(3, 2))
Beispiel #17
0
 def test_maxpooling_2d(self):
     pool_size = (3, 3)
     for strides in [(1, 1), (2, 2)]:
         testing_utils.layer_test(keras.layers.MaxPooling2D,
                                  kwargs={
                                      'strides': strides,
                                      'padding': 'valid',
                                      'pool_size': pool_size
                                  },
                                  input_shape=(3, 5, 6, 4))
    def _run_test(self, kwargs):
        num_samples = 2
        stack_size = 3
        length = 7

        with self.cached_session():
            testing_utils.layer_test(keras.layers.SeparableConv1D,
                                     kwargs=kwargs,
                                     input_shape=(num_samples, length,
                                                  stack_size))
Beispiel #19
0
  def test_spatial_dropout_3d(self):
    testing_utils.layer_test(
        keras.layers.SpatialDropout3D,
        kwargs={'rate': 0.5},
        input_shape=(2, 3, 4, 4, 5))

    testing_utils.layer_test(
        keras.layers.SpatialDropout3D,
        kwargs={'rate': 0.5, 'data_format': 'channels_first'},
        input_shape=(2, 3, 4, 4, 5))
Beispiel #20
0
 def test_cudnn_rnn_go_backward(self, layer_class, go_backwards):
   input_size = 10
   timesteps = 6
   units = 2
   num_samples = 32
   testing_utils.layer_test(
       layer_class,
       kwargs={'units': units,
               'go_backwards': go_backwards},
       input_shape=(num_samples, timesteps, input_size))
Beispiel #21
0
 def test_implementation_mode_LSTM(self, implementation_mode):
   num_samples = 2
   timesteps = 3
   embedding_dim = 4
   units = 2
   testing_utils.layer_test(
       keras.layers.LSTM,
       kwargs={'units': units,
               'implementation': implementation_mode},
       input_shape=(num_samples, timesteps, embedding_dim))
Beispiel #22
0
 def test_return_sequences_LSTM(self):
   num_samples = 2
   timesteps = 3
   embedding_dim = 4
   units = 2
   testing_utils.layer_test(
       keras.layers.LSTM,
       kwargs={'units': units,
               'return_sequences': True},
       input_shape=(num_samples, timesteps, embedding_dim))
Beispiel #23
0
  def test_upsampling_3d(self):
    num_samples = 2
    stack_size = 2
    input_len_dim1 = 10
    input_len_dim2 = 11
    input_len_dim3 = 12

    for data_format in ['channels_first', 'channels_last']:
      if data_format == 'channels_first':
        inputs = np.random.rand(num_samples, stack_size, input_len_dim1,
                                input_len_dim2, input_len_dim3)
      else:
        inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2,
                                input_len_dim3, stack_size)

      # basic test
      with self.cached_session(use_gpu=True):
        testing_utils.layer_test(
            keras.layers.UpSampling3D,
            kwargs={'size': (2, 2, 2),
                    'data_format': data_format},
            input_shape=inputs.shape)

        for length_dim1 in [2, 3]:
          for length_dim2 in [2]:
            for length_dim3 in [3]:
              layer = keras.layers.UpSampling3D(
                  size=(length_dim1, length_dim2, length_dim3),
                  data_format=data_format)
              layer.build(inputs.shape)
              output = layer(keras.backend.variable(inputs))
              if tf.executing_eagerly():
                np_output = output.numpy()
              else:
                np_output = keras.backend.eval(output)
              if data_format == 'channels_first':
                assert np_output.shape[2] == length_dim1 * input_len_dim1
                assert np_output.shape[3] == length_dim2 * input_len_dim2
                assert np_output.shape[4] == length_dim3 * input_len_dim3
              else:  # tf
                assert np_output.shape[1] == length_dim1 * input_len_dim1
                assert np_output.shape[2] == length_dim2 * input_len_dim2
                assert np_output.shape[3] == length_dim3 * input_len_dim3

              # compare with numpy
              if data_format == 'channels_first':
                expected_out = np.repeat(inputs, length_dim1, axis=2)
                expected_out = np.repeat(expected_out, length_dim2, axis=3)
                expected_out = np.repeat(expected_out, length_dim3, axis=4)
              else:  # tf
                expected_out = np.repeat(inputs, length_dim1, axis=1)
                expected_out = np.repeat(expected_out, length_dim2, axis=2)
                expected_out = np.repeat(expected_out, length_dim3, axis=3)

              np.testing.assert_allclose(np_output, expected_out)
Beispiel #24
0
  def _run_test(self, kwargs, expected_output_shape=None):
    num_samples = 2
    stack_size = 3
    num_row = 7

    with self.cached_session():
      testing_utils.layer_test(
          keras.layers.DepthwiseConv1D,
          kwargs=kwargs,
          input_shape=(num_samples, num_row, stack_size),
          expected_output_shape=expected_output_shape)
Beispiel #25
0
  def _run_test(self, kwargs, expected_output_shape):
    num_samples = 2
    stack_size = 3
    num_col = 6

    with testing_utils.use_gpu():
      testing_utils.layer_test(
          keras.layers.Conv1DTranspose,
          kwargs=kwargs,
          input_shape=(num_samples, num_col, stack_size),
          expected_output_shape=expected_output_shape)
Beispiel #26
0
  def _run_test(self, kwargs, expected_output_shape):
    num_samples = 2
    stack_size = 3
    length = 7

    with self.cached_session(use_gpu=True):
      testing_utils.layer_test(
          keras.layers.Conv1D,
          kwargs=kwargs,
          input_shape=(num_samples, length, stack_size),
          expected_output_shape=expected_output_shape)
Beispiel #27
0
 def test_dropout_LSTM(self):
   num_samples = 2
   timesteps = 3
   embedding_dim = 4
   units = 2
   testing_utils.layer_test(
       keras.layers.LSTM,
       kwargs={'units': units,
               'dropout': 0.1,
               'recurrent_dropout': 0.1},
       input_shape=(num_samples, timesteps, embedding_dim))
Beispiel #28
0
  def test_flatten_scalar_channels(self):
    testing_utils.layer_test(keras.layers.Flatten, kwargs={}, input_shape=(3,))

    # Test channels_first
    inputs = np.random.random((10,)).astype('float32')
    outputs = testing_utils.layer_test(
        keras.layers.Flatten,
        kwargs={'data_format': 'channels_first'},
        input_data=inputs)
    target_outputs = np.expand_dims(inputs, -1)
    self.assertAllClose(outputs, target_outputs)
  def _run_test(self, kwargs):
    num_samples = 2
    stack_size = 3
    num_row = 7
    num_col = 6

    with self.cached_session():
      testing_utils.layer_test(
          keras.layers.Conv2DTranspose,
          kwargs=kwargs,
          input_shape=(num_samples, num_row, num_col, stack_size))
Beispiel #30
0
    def _run_test(self, kwargs):
        num_samples = 2
        stack_size = 3
        num_row = 7
        num_col = 6

        with self.cached_session(use_gpu=True):
            testing_utils.layer_test(keras.layers.SeparableConv2D,
                                     kwargs=kwargs,
                                     input_shape=(num_samples, num_row,
                                                  num_col, stack_size))