def _testKerasLayer(self, layer_class): def kernel_posterior_fn(dtype, shape, name, trainable, add_variable_fn): """Set trivially. The function is required to instantiate layer.""" del name, trainable, add_variable_fn # unused # Deserialized Keras objects do not perform lexical scoping. Any modules # that the function requires must be imported within the function. import tensorflow as tf # pylint: disable=g-import-not-at-top,redefined-outer-name tfd = tf.contrib.distributions # pylint: disable=redefined-outer-name dist = tfd.Normal(loc=tf.zeros(shape, dtype), scale=tf.ones(shape, dtype)) batch_ndims = tf.size(dist.batch_shape_tensor()) return tfd.Independent(dist, reinterpreted_batch_ndims=batch_ndims) kwargs = {'units': 3, 'kernel_posterior_fn': kernel_posterior_fn, 'kernel_prior_fn': None, 'bias_posterior_fn': None, 'bias_prior_fn': None} with tf.keras.utils.CustomObjectScope({layer_class.__name__: layer_class}): with self.test_session(): testing_utils.layer_test( layer_class, kwargs=kwargs, input_shape=(3, 2)) testing_utils.layer_test( layer_class, kwargs=kwargs, input_shape=(None, None, 2))
def test_locallyconnected_2d(self): with self.cached_session(): num_samples = 8 filters = 3 stack_size = 4 num_row = 6 num_col = 10 for padding in ['valid', 'same']: for strides in [(1, 1), (2, 2)]: for implementation in [1, 2]: if padding == 'same' and strides != (1, 1): continue kwargs = { 'filters': filters, 'kernel_size': 3, 'padding': padding, 'kernel_regularizer': 'l2', 'bias_regularizer': 'l2', 'strides': strides, 'data_format': 'channels_last', 'implementation': implementation } if padding == 'same' and implementation == 1: self.assertRaises(ValueError, keras.layers.LocallyConnected2D, **kwargs) else: testing_utils.layer_test( keras.layers.LocallyConnected2D, kwargs=kwargs, input_shape=(num_samples, num_row, num_col, stack_size))
def test_basic_batchnorm(self): testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={ 'momentum': 0.9, 'epsilon': 0.1, 'gamma_regularizer': keras.regularizers.l2(0.01), 'beta_regularizer': keras.regularizers.l2(0.01) }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={ 'gamma_initializer': 'ones', 'beta_initializer': 'ones', 'moving_mean_initializer': 'zeros', 'moving_variance_initializer': 'ones' }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={'scale': False, 'center': False}, input_shape=(3, 3)) testing_utils.layer_test( normalization.BatchNormalizationV2, kwargs={'fused': True}, input_shape=(3, 3, 3, 3)) testing_utils.layer_test( normalization.BatchNormalizationV2, kwargs={'fused': None}, input_shape=(3, 3, 3))
def test_locallyconnected_1d(self): with self.cached_session(): num_samples = 2 num_steps = 8 input_dim = 5 filter_length = 3 filters = 4 for padding in ['valid', 'same']: for strides in [1]: if padding == 'same' and strides != 1: continue for data_format in ['channels_first', 'channels_last']: for implementation in [1, 2]: kwargs = { 'filters': filters, 'kernel_size': filter_length, 'padding': padding, 'strides': strides, 'data_format': data_format, 'implementation': implementation } if padding == 'same' and implementation == 1: self.assertRaises(ValueError, keras.layers.LocallyConnected1D, **kwargs) else: testing_utils.layer_test( keras.layers.LocallyConnected1D, kwargs=kwargs, input_shape=(num_samples, num_steps, input_dim))
def test_locallyconnected_2d_channels_first(self): with self.cached_session(): num_samples = 8 filters = 3 stack_size = 4 num_row = 6 num_col = 10 for implementation in [1, 2]: for padding in ['valid', 'same']: kwargs = { 'filters': filters, 'kernel_size': 3, 'data_format': 'channels_first', 'implementation': implementation, 'padding': padding } if padding == 'same' and implementation == 1: self.assertRaises(ValueError, keras.layers.LocallyConnected2D, **kwargs) else: testing_utils.layer_test( keras.layers.LocallyConnected2D, kwargs=kwargs, input_shape=(num_samples, num_row, num_col, stack_size))
def test_lambda(self): testing_utils.layer_test( keras.layers.Lambda, kwargs={'function': lambda x: x + 1}, input_shape=(3, 2)) testing_utils.layer_test( keras.layers.Lambda, kwargs={ 'function': lambda x, a, b: x * a + b, 'arguments': { 'a': 0.6, 'b': 0.4 } }, input_shape=(3, 2)) # test serialization with function def f(x): return x + 1 ld = keras.layers.Lambda(f) config = ld.get_config() ld = keras.layers.deserialize({ 'class_name': 'Lambda', 'config': config }) # test with lambda ld = keras.layers.Lambda( lambda x: keras.backend.concatenate([math_ops.square(x), x])) config = ld.get_config() ld = keras.layers.Lambda.from_config(config)
def test_spatial_dropout(self): testing_utils.layer_test( keras.layers.SpatialDropout1D, kwargs={'rate': 0.5}, input_shape=(2, 3, 4)) testing_utils.layer_test( keras.layers.SpatialDropout2D, kwargs={'rate': 0.5}, input_shape=(2, 3, 4, 5)) testing_utils.layer_test( keras.layers.SpatialDropout2D, kwargs={'rate': 0.5, 'data_format': 'channels_first'}, input_shape=(2, 3, 4, 5)) testing_utils.layer_test( keras.layers.SpatialDropout3D, kwargs={'rate': 0.5}, input_shape=(2, 3, 4, 4, 5)) testing_utils.layer_test( keras.layers.SpatialDropout3D, kwargs={'rate': 0.5, 'data_format': 'channels_first'}, input_shape=(2, 3, 4, 4, 5))
def test_upsampling_2d_bilinear(self): num_samples = 2 stack_size = 2 input_num_row = 11 input_num_col = 12 for data_format in ['channels_first', 'channels_last']: if data_format == 'channels_first': inputs = np.random.rand(num_samples, stack_size, input_num_row, input_num_col) else: inputs = np.random.rand(num_samples, input_num_row, input_num_col, stack_size) testing_utils.layer_test(keras.layers.UpSampling2D, kwargs={'size': (2, 2), 'data_format': data_format, 'interpolation': 'bilinear'}, input_shape=inputs.shape) if not context.executing_eagerly(): for length_row in [2]: for length_col in [2, 3]: layer = keras.layers.UpSampling2D( size=(length_row, length_col), data_format=data_format) layer.build(inputs.shape) outputs = layer(keras.backend.variable(inputs)) np_output = keras.backend.eval(outputs) if data_format == 'channels_first': self.assertEqual(np_output.shape[2], length_row * input_num_row) self.assertEqual(np_output.shape[3], length_col * input_num_col) else: self.assertEqual(np_output.shape[1], length_row * input_num_row) self.assertEqual(np_output.shape[2], length_col * input_num_col)
def test_relu_with_invalid_arg(self): with self.assertRaisesRegexp( ValueError, 'max_value of Relu layer cannot be negative value: -10'): with self.test_session(): testing_utils.layer_test(keras.layers.ReLU, kwargs={'max_value': -10}, input_shape=(2, 3, 4))
def test_cudnn_rnn_basics(self): if test.is_gpu_available(cuda_only=True): with self.test_session(use_gpu=True): input_size = 10 timesteps = 6 units = 2 num_samples = 32 for layer_class in [keras.layers.CuDNNGRU, keras.layers.CuDNNLSTM]: for return_sequences in [True, False]: with keras.utils.CustomObjectScope( {'keras.layers.CuDNNGRU': keras.layers.CuDNNGRU, 'keras.layers.CuDNNLSTM': keras.layers.CuDNNLSTM}): testing_utils.layer_test( layer_class, kwargs={'units': units, 'return_sequences': return_sequences}, input_shape=(num_samples, timesteps, input_size)) for go_backwards in [True, False]: with keras.utils.CustomObjectScope( {'keras.layers.CuDNNGRU': keras.layers.CuDNNGRU, 'keras.layers.CuDNNLSTM': keras.layers.CuDNNLSTM}): testing_utils.layer_test( layer_class, kwargs={'units': units, 'go_backwards': go_backwards}, input_shape=(num_samples, timesteps, input_size))
def test_averagepooling_1d(self): for padding in ['valid', 'same']: for stride in [1, 2]: testing_utils.layer_test( keras.layers.AveragePooling1D, kwargs={'strides': stride, 'padding': padding}, input_shape=(3, 5, 4))
def test_dropout(self): testing_utils.layer_test( keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2)) testing_utils.layer_test( keras.layers.Dropout, kwargs={'rate': 0.5, 'noise_shape': [3, 1]}, input_shape=(3, 2))
def test_basic_batchnorm_v2(self): testing_utils.layer_test( normalization.BatchNormalizationV2, kwargs={'fused': True}, input_shape=(3, 3, 3, 3)) testing_utils.layer_test( normalization.BatchNormalizationV2, kwargs={'fused': None}, input_shape=(3, 3, 3))
def test_cudnn_rnn_go_backward(self, layer_class, go_backwards): input_size = 10 timesteps = 6 units = 2 num_samples = 32 testing_utils.layer_test( layer_class, kwargs={'units': units, 'go_backwards': go_backwards}, input_shape=(num_samples, timesteps, input_size))
def test_implementation_mode_GRU(self, implementation_mode): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 testing_utils.layer_test( keras.layers.UnifiedGRU, kwargs={'units': units, 'implementation': implementation_mode}, input_shape=(num_samples, timesteps, embedding_dim))
def test_return_sequences_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 testing_utils.layer_test( keras.layers.GRU, kwargs={'units': units, 'return_sequences': True}, input_shape=(num_samples, timesteps, embedding_dim))
def test_cudnn_rnn_return_sequence(self, layer_class, return_sequences): input_size = 10 timesteps = 6 units = 2 num_samples = 32 testing_utils.layer_test( layer_class, kwargs={'units': units, 'return_sequences': return_sequences}, input_shape=(num_samples, timesteps, input_size))
def _run_test(self, kwargs): num_samples = 2 stack_size = 3 length = 7 with self.cached_session(use_gpu=True): testing_utils.layer_test( keras.layers.Conv1D, kwargs=kwargs, input_shape=(num_samples, length, stack_size))
def test_upsampling_3d(self): num_samples = 2 stack_size = 2 input_len_dim1 = 10 input_len_dim2 = 11 input_len_dim3 = 12 for data_format in ['channels_first', 'channels_last']: if data_format == 'channels_first': inputs = np.random.rand(num_samples, stack_size, input_len_dim1, input_len_dim2, input_len_dim3) else: inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2, input_len_dim3, stack_size) # basic test with self.test_session(use_gpu=True): testing_utils.layer_test( keras.layers.UpSampling3D, kwargs={'size': (2, 2, 2), 'data_format': data_format}, input_shape=inputs.shape) for length_dim1 in [2, 3]: for length_dim2 in [2]: for length_dim3 in [3]: layer = keras.layers.UpSampling3D( size=(length_dim1, length_dim2, length_dim3), data_format=data_format) layer.build(inputs.shape) output = layer(keras.backend.variable(inputs)) if context.executing_eagerly(): np_output = output.numpy() else: np_output = keras.backend.eval(output) if data_format == 'channels_first': assert np_output.shape[2] == length_dim1 * input_len_dim1 assert np_output.shape[3] == length_dim2 * input_len_dim2 assert np_output.shape[4] == length_dim3 * input_len_dim3 else: # tf assert np_output.shape[1] == length_dim1 * input_len_dim1 assert np_output.shape[2] == length_dim2 * input_len_dim2 assert np_output.shape[3] == length_dim3 * input_len_dim3 # compare with numpy if data_format == 'channels_first': expected_out = np.repeat(inputs, length_dim1, axis=2) expected_out = np.repeat(expected_out, length_dim2, axis=3) expected_out = np.repeat(expected_out, length_dim3, axis=4) else: # tf expected_out = np.repeat(inputs, length_dim1, axis=1) expected_out = np.repeat(expected_out, length_dim2, axis=2) expected_out = np.repeat(expected_out, length_dim3, axis=3) np.testing.assert_allclose(np_output, expected_out)
def _run_test(self, kwargs): num_samples = 2 stack_size = 3 num_row = 7 num_col = 6 with self.cached_session(use_gpu=True): testing_utils.layer_test( keras.layers.Conv2D, kwargs=kwargs, input_shape=(num_samples, num_row, num_col, stack_size))
def test_maxpooling_2d(self): pool_size = (3, 3) for strides in [(1, 1), (2, 2)]: testing_utils.layer_test( keras.layers.MaxPooling2D, kwargs={ 'strides': strides, 'padding': 'valid', 'pool_size': pool_size }, input_shape=(3, 5, 6, 4))
def test_dropout_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 testing_utils.layer_test( keras.layers.GRU, kwargs={'units': units, 'dropout': 0.1, 'recurrent_dropout': 0.1}, input_shape=(num_samples, timesteps, embedding_dim))
def test_implementation_mode_GRU(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 for mode in [0, 1, 2]: testing_utils.layer_test( keras.layers.GRU, kwargs={'units': units, 'implementation': mode}, input_shape=(num_samples, timesteps, embedding_dim))
def test_activation(self): # with string argument testing_utils.layer_test( keras.layers.Activation, kwargs={'activation': 'relu'}, input_shape=(3, 2)) # with function argument testing_utils.layer_test( keras.layers.Activation, kwargs={'activation': keras.backend.relu}, input_shape=(3, 2))
def test_maxpooling_1d(self): for padding in ['valid', 'same']: for stride in [1, 2]: testing_utils.layer_test( keras.layers.MaxPooling1D, kwargs={'strides': stride, 'padding': padding}, input_shape=(3, 5, 4)) testing_utils.layer_test( keras.layers.MaxPooling1D, kwargs={'data_format': 'channels_first'}, input_shape=(3, 2, 6))
def DISABLED_test_return_sequences_LSTM(self): num_samples = 2 timesteps = 3 embedding_dim = 4 units = 2 testing_utils.layer_test( rnn.LSTM, kwargs={ 'units': units, 'return_sequences': True }, input_shape=(num_samples, timesteps, embedding_dim))
def test_cropping_3d(self): num_samples = 2 stack_size = 2 input_len_dim1 = 8 input_len_dim2 = 8 input_len_dim3 = 8 croppings = [((2, 2), (1, 1), (2, 3)), 3, (0, 1, 1)] for cropping in croppings: for data_format in ['channels_last', 'channels_first']: if data_format == 'channels_first': inputs = np.random.rand(num_samples, stack_size, input_len_dim1, input_len_dim2, input_len_dim3) else: inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2, input_len_dim3, stack_size) # basic test with self.test_session(use_gpu=True): testing_utils.layer_test( keras.layers.Cropping3D, kwargs={'cropping': cropping, 'data_format': data_format}, input_shape=inputs.shape) if len(croppings) == 3 and len(croppings[0]) == 2: # correctness test with self.test_session(use_gpu=True): layer = keras.layers.Cropping3D( cropping=cropping, data_format=data_format) layer.build(inputs.shape) output = layer(keras.backend.variable(inputs)) if context.executing_eagerly(): np_output = output.numpy() else: np_output = keras.backend.eval(output) # compare with numpy if data_format == 'channels_first': expected_out = inputs[:, :, cropping[0][0]:-cropping[0][1], cropping[1][0]:-cropping[1][1], cropping[2][0]:-cropping[2][1]] else: expected_out = inputs[:, cropping[0][0]:-cropping[0][1], cropping[1][0]:-cropping[1][1], cropping[2][0]:-cropping[2][1], :] np.testing.assert_allclose(np_output, expected_out) # test incorrect use with self.assertRaises(ValueError): keras.layers.Cropping3D(cropping=(1, 1)) with self.assertRaises(ValueError): keras.layers.Cropping3D(cropping=None)
def test_flatten_scalar_channels(self): testing_utils.layer_test( keras.layers.Flatten, kwargs={}, input_shape=(3,)) # Test channels_first inputs = np.random.random((10,)).astype('float32') outputs = testing_utils.layer_test( keras.layers.Flatten, kwargs={'data_format': 'channels_first'}, input_data=inputs) target_outputs = np.expand_dims(inputs, -1) self.assertAllClose(outputs, target_outputs)
def test_flatten(self): testing_utils.layer_test( keras.layers.Flatten, kwargs={}, input_shape=(3, 2, 4)) # Test channels_first inputs = np.random.random((10, 3, 5, 5)).astype('float32') outputs = testing_utils.layer_test( keras.layers.Flatten, kwargs={'data_format': 'channels_first'}, input_data=inputs) target_outputs = np.reshape( np.transpose(inputs, (0, 2, 3, 1)), (-1, 5 * 5 * 3)) self.assertAllClose(outputs, target_outputs)
def _run_test(self, kwargs, arg, values): num_samples = 2 stack_size = 3 length = 7 test_kwargs = copy.copy(kwargs) for value in values: test_kwargs[arg] = value with self.test_session(use_gpu=True): testing_utils.layer_test( keras.layers.Conv1D, kwargs=test_kwargs, input_shape=(num_samples, length, stack_size))
def test_spatial_dropout_1d(self): testing_utils.layer_test(keras.layers.SpatialDropout1D, kwargs={'rate': 0.5}, input_shape=(2, 3, 4))
def test_softmax(self): with self.test_session(): testing_utils.layer_test(keras.layers.Softmax, kwargs={'axis': 1}, input_shape=(2, 3, 4))
def test_permute(self): testing_utils.layer_test(keras.layers.Permute, kwargs={'dims': (2, 1)}, input_shape=(3, 2, 4))
def test_permute_errors_on_invalid_set_of_dims_indices(self): with self.assertRaisesRegexp(ValueError, r'Invalid permutation .*dims.*'): testing_utils.layer_test(keras.layers.Permute, kwargs={'dims': (1, 4, 2)}, input_shape=(3, 2, 4))
def test_GaussianNoise(self): testing_utils.layer_test( keras.layers.GaussianNoise, kwargs={'stddev': 1.}, input_shape=(3, 2, 3))
def test_output(self): batch_size, dim, output_dim = (3, 4, 2) testing_utils.layer_test(example.LinearBlockFull, kwargs={'units': output_dim}, input_data=np.ones((batch_size, dim)), expected_output_dtype='float32')
def test_elu(self): for alpha in [0., .5, -1.]: testing_utils.layer_test(keras.layers.ELU, kwargs={'alpha': alpha}, input_shape=(2, 3, 4), supports_masking=True)
def test_repeat_vector(self): testing_utils.layer_test(keras.layers.RepeatVector, kwargs={'n': 3}, input_shape=(3, 2))
def test_global_attention_layer(): testing_utils.layer_test(GlobalAttentionLayer, kwargs={}, input_shape=(1, 3, 4))
def test_masking(self): with self.cached_session(): testing_utils.layer_test(keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3))
def test_relu(self): with self.test_session(): testing_utils.layer_test(keras.layers.ReLU, kwargs={'max_value': 10}, input_shape=(2, 3, 4))
def test_rescaling_base(self): kwargs = {'scale': 0.004} testing_utils.layer_test(image_preprocessing.Rescaling, kwargs=kwargs, input_shape=(2, 5, 6, 3), expected_output_shape=(None, 5, 6, 3))
def test_upsampling_3d(self): num_samples = 2 stack_size = 2 input_len_dim1 = 10 input_len_dim2 = 11 input_len_dim3 = 12 for data_format in ['channels_first', 'channels_last']: if data_format == 'channels_first': inputs = np.random.rand(num_samples, stack_size, input_len_dim1, input_len_dim2, input_len_dim3) else: inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2, input_len_dim3, stack_size) # basic test with self.cached_session(use_gpu=True): testing_utils.layer_test(keras.layers.UpSampling3D, kwargs={ 'size': (2, 2, 2), 'data_format': data_format }, input_shape=inputs.shape) for length_dim1 in [2, 3]: for length_dim2 in [2]: for length_dim3 in [3]: layer = keras.layers.UpSampling3D( size=(length_dim1, length_dim2, length_dim3), data_format=data_format) layer.build(inputs.shape) output = layer(keras.backend.variable(inputs)) if context.executing_eagerly(): np_output = output.numpy() else: np_output = keras.backend.eval(output) if data_format == 'channels_first': assert np_output.shape[ 2] == length_dim1 * input_len_dim1 assert np_output.shape[ 3] == length_dim2 * input_len_dim2 assert np_output.shape[ 4] == length_dim3 * input_len_dim3 else: # tf assert np_output.shape[ 1] == length_dim1 * input_len_dim1 assert np_output.shape[ 2] == length_dim2 * input_len_dim2 assert np_output.shape[ 3] == length_dim3 * input_len_dim3 # compare with numpy if data_format == 'channels_first': expected_out = np.repeat(inputs, length_dim1, axis=2) expected_out = np.repeat(expected_out, length_dim2, axis=3) expected_out = np.repeat(expected_out, length_dim3, axis=4) else: # tf expected_out = np.repeat(inputs, length_dim1, axis=1) expected_out = np.repeat(expected_out, length_dim2, axis=2) expected_out = np.repeat(expected_out, length_dim3, axis=3) np.testing.assert_allclose(np_output, expected_out)
def test_masking(self): testing_utils.layer_test(keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3))
def test_thresholded_relu(self): testing_utils.layer_test(keras.layers.ThresholdedReLU, kwargs={'theta': 0.5}, input_shape=(2, 3, 4), supports_masking=True)
def test_GaussianDropout(self): testing_utils.layer_test( keras.layers.GaussianDropout, kwargs={'rate': 0.5}, input_shape=(3, 2, 3))
def test_prelu_share(self): testing_utils.layer_test(keras.layers.PReLU, kwargs={'shared_axes': 1}, input_shape=(2, 3, 4), supports_masking=True)
def test_basic(self): testing_utils.layer_test(example.LinearBlockFull, input_shape=(4, 32))
def test_simple(self): testing_utils.layer_test(Maxout, kwargs={'num_units': 3}, input_shape=(5, 4, 2, 18))
def test_zero_padding_2d(self): num_samples = 2 stack_size = 2 input_num_row = 4 input_num_col = 5 for data_format in ['channels_first', 'channels_last']: inputs = np.ones( (num_samples, input_num_row, input_num_col, stack_size)) inputs = np.ones( (num_samples, stack_size, input_num_row, input_num_col)) # basic test with self.cached_session(use_gpu=True): testing_utils.layer_test(keras.layers.ZeroPadding2D, kwargs={ 'padding': (2, 2), 'data_format': data_format }, input_shape=inputs.shape) testing_utils.layer_test(keras.layers.ZeroPadding2D, kwargs={ 'padding': ((1, 2), (3, 4)), 'data_format': data_format }, input_shape=inputs.shape) # correctness test with self.cached_session(use_gpu=True): layer = keras.layers.ZeroPadding2D(padding=(2, 2), data_format=data_format) layer.build(inputs.shape) output = layer(keras.backend.variable(inputs)) if context.executing_eagerly(): np_output = output.numpy() else: np_output = keras.backend.eval(output) if data_format == 'channels_last': for offset in [0, 1, -1, -2]: np.testing.assert_allclose(np_output[:, offset, :, :], 0.) np.testing.assert_allclose(np_output[:, :, offset, :], 0.) np.testing.assert_allclose(np_output[:, 2:-2, 2:-2, :], 1.) elif data_format == 'channels_first': for offset in [0, 1, -1, -2]: np.testing.assert_allclose(np_output[:, :, offset, :], 0.) np.testing.assert_allclose(np_output[:, :, :, offset], 0.) np.testing.assert_allclose(np_output[:, 2:-2, 2:-2, :], 1.) layer = keras.layers.ZeroPadding2D(padding=((1, 2), (3, 4)), data_format=data_format) layer.build(inputs.shape) output = layer(keras.backend.variable(inputs)) if context.executing_eagerly(): np_output = output.numpy() else: np_output = keras.backend.eval(output) if data_format == 'channels_last': for top_offset in [0]: np.testing.assert_allclose( np_output[:, top_offset, :, :], 0.) for bottom_offset in [-1, -2]: np.testing.assert_allclose( np_output[:, bottom_offset, :, :], 0.) for left_offset in [0, 1, 2]: np.testing.assert_allclose( np_output[:, :, left_offset, :], 0.) for right_offset in [-1, -2, -3, -4]: np.testing.assert_allclose( np_output[:, :, right_offset, :], 0.) np.testing.assert_allclose(np_output[:, 1:-2, 3:-4, :], 1.) elif data_format == 'channels_first': for top_offset in [0]: np.testing.assert_allclose( np_output[:, :, top_offset, :], 0.) for bottom_offset in [-1, -2]: np.testing.assert_allclose( np_output[:, :, bottom_offset, :], 0.) for left_offset in [0, 1, 2]: np.testing.assert_allclose( np_output[:, :, :, left_offset], 0.) for right_offset in [-1, -2, -3, -4]: np.testing.assert_allclose( np_output[:, :, :, right_offset], 0.) np.testing.assert_allclose(np_output[:, :, 1:-2, 3:-4], 1.) # test incorrect use with self.assertRaises(ValueError): keras.layers.ZeroPadding2D(padding=(1, 1, 1)) with self.assertRaises(ValueError): keras.layers.ZeroPadding2D(padding=None)
def test_AlphaDropout(self): testing_utils.layer_test( keras.layers.AlphaDropout, kwargs={'rate': 0.2}, input_shape=(3, 2, 3))
def test_elu(self): with self.test_session(): for alpha in [0., .5, -1.]: testing_utils.layer_test(keras.layers.ELU, kwargs={'alpha': alpha}, input_shape=(2, 3, 4))
def test_thresholded_relu(self): with self.test_session(): testing_utils.layer_test(keras.layers.ThresholdedReLU, kwargs={'theta': 0.5}, input_shape=(2, 3, 4))
def test_invalid_shape(self): with self.assertRaisesRegexp(ValueError, r'number of features'): testing_utils.layer_test(Maxout, kwargs={'num_units': 3}, input_shape=(5, 4, 2, 7))
def test_tensorproduct(self): custom_objects = {'TensorProduct': layers.TensorProduct} with tf.keras.utils.custom_object_scope(custom_objects): testing_utils.layer_test(layers.TensorProduct, kwargs={'output_dim': 3}, input_shape=(3, 2)) testing_utils.layer_test(layers.TensorProduct, kwargs={'output_dim': 3}, input_shape=(3, 4, 2)) testing_utils.layer_test(layers.TensorProduct, kwargs={'output_dim': 3}, input_shape=(None, None, 2)) testing_utils.layer_test(layers.TensorProduct, kwargs={'output_dim': 3}, input_shape=(3, 4, 5, 2)) testing_utils.layer_test(layers.TensorProduct, kwargs={ 'output_dim': 3, 'data_format': 'channels_first' }, input_shape=(3, 2, 4, 5)) # test no bias testing_utils.layer_test(layers.TensorProduct, kwargs={ 'output_dim': 2, 'use_bias': False }, input_shape=(3, 5, 6, 4)) # test bad input channel with self.assertRaises(ValueError): testing_utils.layer_test(layers.TensorProduct, kwargs={'output_dim': 3}, input_shape=(3, 5, 6, None))
def test_cropping_2d(self): num_samples = 2 stack_size = 2 input_len_dim1 = 9 input_len_dim2 = 9 cropping = ((2, 2), (3, 3)) for data_format in ['channels_first', 'channels_last']: if data_format == 'channels_first': inputs = np.random.rand(num_samples, stack_size, input_len_dim1, input_len_dim2) else: inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2, stack_size) with self.cached_session(use_gpu=True): # basic test testing_utils.layer_test(keras.layers.Cropping2D, kwargs={ 'cropping': cropping, 'data_format': data_format }, input_shape=inputs.shape) # correctness test layer = keras.layers.Cropping2D(cropping=cropping, data_format=data_format) layer.build(inputs.shape) output = layer(keras.backend.variable(inputs)) if context.executing_eagerly(): np_output = output.numpy() else: np_output = keras.backend.eval(output) # compare with numpy if data_format == 'channels_first': expected_out = inputs[:, :, cropping[0][0]:-cropping[0][1], cropping[1][0]:-cropping[1][1]] else: expected_out = inputs[:, cropping[0][0]:-cropping[0][1], cropping[1][0]:-cropping[1][1], :] np.testing.assert_allclose(np_output, expected_out) for data_format in ['channels_first', 'channels_last']: if data_format == 'channels_first': inputs = np.random.rand(num_samples, stack_size, input_len_dim1, input_len_dim2) else: inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2, stack_size) # another correctness test (no cropping) with self.cached_session(use_gpu=True): cropping = ((0, 0), (0, 0)) layer = keras.layers.Cropping2D(cropping=cropping, data_format=data_format) layer.build(inputs.shape) output = layer(keras.backend.variable(inputs)) if context.executing_eagerly(): np_output = output.numpy() else: np_output = keras.backend.eval(output) # compare with input np.testing.assert_allclose(np_output, inputs) # test incorrect use with self.assertRaises(ValueError): keras.layers.Cropping2D(cropping=(1, 1, 1)) with self.assertRaises(ValueError): keras.layers.Cropping2D(cropping=None)
def test_softmax(self): testing_utils.layer_test(keras.layers.Softmax, kwargs={'axis': 1}, input_shape=(2, 3, 4), supports_masking=True)
def test_prelu_share(self): with self.test_session(): testing_utils.layer_test(keras.layers.PReLU, kwargs={'shared_axes': 1}, input_shape=(2, 3, 4))
def test_upsampling_1d(self): with self.session(use_gpu=True): testing_utils.layer_test(keras.layers.UpSampling1D, kwargs={'size': 2}, input_shape=(3, 5, 4))
def test_basic(self): """Test layer creation.""" testing_utils.layer_test((model.ReverseComplement), kwargs={'complements': [3, 2, 1, 0, 4]}, input_shape=(1, 10, 5))