def test_calling_with_unsupported_predefined_callbacks( self, distribution, cloning): with self.cached_session(): with distribution.scope(): model = keras_test_lib.get_model() optimizer = gradient_descent.GradientDescentOptimizer(0.001) loss = 'mse' metrics = ['mae'] model.compile(optimizer, loss, metrics=metrics, cloning=cloning) dataset = keras_test_lib.get_dataset(distribution) def schedule(_): return 0.001 with self.assertRaisesRegexp( ValueError, 'You must specify a Keras Optimizer V2 when ' 'using'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, callbacks=[keras.callbacks.LearningRateScheduler(schedule)]) with self.assertRaisesRegexp( ValueError, 'You must specify a Keras Optimizer V2 when ' 'using'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, callbacks=[keras.callbacks.ReduceLROnPlateau()])
def test_unsupported_features(self, distribution, cloning): with self.cached_session(): with distribution.scope(): model = keras_test_lib.get_model() optimizer = gradient_descent.GradientDescentOptimizer(0.001) loss = 'mse' metrics = ['mae'] model.compile(optimizer, loss, metrics=metrics, cloning=cloning) dataset = keras_test_lib.get_dataset(distribution) # Test with validation split with self.assertRaisesRegexp( ValueError, '`validation_split` argument is not ' 'supported when input `x` is a dataset or a ' 'dataset iterator.+'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, validation_split=0.5, validation_steps=2) # Test with sample weight. sample_weight = np.random.random((10,)) with self.assertRaisesRegexp( ValueError, '`sample_weight` argument is not supported when input ' '`x` is a dataset or a dataset iterator.'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, sample_weight=sample_weight) # Test with not specifying the `steps` argument for dataset with infinite # cardinality. dataset = dataset.repeat() with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps_per_epoch` argument'): model.fit(dataset, epochs=1, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.evaluate(dataset, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.predict(dataset, verbose=0)
def test_callbacks_in_fit(self, distribution, cloning): with distribution.scope(): model = keras_test_lib.get_model() model.compile( optimizer='sgd', loss='mse', metrics=['mae'], cloning=cloning) dataset = keras_test_lib.get_dataset(distribution) counter = Counter() epochs = 2 steps_per_epoch = 5 validation_steps = 3 model.fit( dataset, epochs=epochs, steps_per_epoch=steps_per_epoch, verbose=0, validation_data=dataset, validation_steps=validation_steps, callbacks=[counter]) if isinstance(distribution, (tpu_strategy.TPUStrategy, tpu_strategy.TPUStrategyV1)): # TPU Strategy can have multi step training, from extended.steps_per_run # if steps_per_run = 1, then num_batch_call_per_epoch = steps_per_epoch steps_per_run = distribution.extended.steps_per_run num_batch_call_per_epoch = steps_per_epoch // steps_per_run if steps_per_epoch % steps_per_run: num_batch_call_per_epoch += 1 else: num_batch_call_per_epoch = steps_per_epoch self.assertDictEqual( counter.method_counts, { 'on_batch_begin': epochs * num_batch_call_per_epoch, 'on_batch_end': epochs * num_batch_call_per_epoch, 'on_epoch_begin': epochs, 'on_epoch_end': epochs, 'on_test_batch_begin': epochs * validation_steps, 'on_test_batch_end': epochs * validation_steps, 'on_test_begin': epochs, 'on_test_end': epochs, 'on_train_batch_begin': epochs * num_batch_call_per_epoch, 'on_train_batch_end': epochs * num_batch_call_per_epoch, 'on_train_begin': 1, 'on_train_end': 1 })
def test_save_load_h5(self, distribution, optimizer, cloning): with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(optimizer(), 'mse', cloning=cloning) model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp('.h5') model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(optimizer(), 'mse', cloning=cloning) model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_save_load_h5(self, distribution): with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(rms_prop_keras.RMSprop(learning_rate=0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp('.h5') model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(rms_prop_keras.RMSprop(learning_rate=0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_callbacks_in_eval(self, distribution): with distribution.scope(): model = keras_test_lib.get_model() model.compile(optimizer='sgd', loss='mse', metrics=['mae']) dataset = keras_test_lib.get_dataset(distribution) counter = Counter() model.evaluate(dataset, steps=5, callbacks=[counter]) self.assertDictEqual( counter.method_counts, { 'on_test_batch_begin': 5, 'on_test_batch_end': 5, 'on_test_begin': 1, 'on_test_end': 1 })
def test_save_load_trackable(self, distribution, optimizer, cloning): # TODO(b/123533246): Enable the test for TPU once bug is fixed if (isinstance(distribution, (tpu_strategy.TPUStrategy, tpu_strategy.TPUStrategyV1)) and distribution.extended.steps_per_run > 1): self.skipTest('MultiStep TPU Strategy deadlocks with optimizer restore.') with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(optimizer(), 'mse', cloning=cloning) model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp() model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(optimizer(), 'mse', cloning=cloning) model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_callbacks_in_predict(self, distribution, experimental_run_tf_function): with distribution.scope(): model = keras_test_lib.get_model() model.compile( optimizer='sgd', loss='mse', metrics=['mae'], experimental_run_tf_function=experimental_run_tf_function) dataset = keras_test_lib.get_dataset(distribution) counter = Counter() model.predict(keras_test_lib.get_predict_dataset(dataset), steps=5, callbacks=[counter]) self.assertDictEqual( counter.method_counts, { 'on_predict_batch_begin': 5, 'on_predict_batch_end': 5, 'on_predict_begin': 1, 'on_predict_end': 1 })
def test_save_load_h5(self, distribution, optimizer, experimental_run_tf_function): with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile( optimizer(), 'mse', experimental_run_tf_function=experimental_run_tf_function) model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp('.h5') model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile( optimizer(), 'mse', experimental_run_tf_function=experimental_run_tf_function) model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_save_load_trackable(self, distribution): # TODO(b/123533246): Enable the test for TPU once bug is fixed if (isinstance(distribution, tpu_strategy.TPUStrategy) and distribution.extended.steps_per_run > 1): self.skipTest( 'MultiStep TPU Strategy deadlocks with optimizer restore.') with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(rms_prop_keras.RMSprop(learning_rate=0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp() model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(rms_prop_keras.RMSprop(learning_rate=0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_unsupported_features(self, distribution, experimental_run_tf_function, mode): with self.cached_session(): with distribution.scope(): model = keras_test_lib.get_model() optimizer = gradient_descent.GradientDescentOptimizer(0.001) loss = 'mse' metrics = ['mae'] model.compile( optimizer, loss, metrics=metrics, experimental_run_tf_function=experimental_run_tf_function) dataset = keras_test_lib.get_dataset(distribution) if experimental_run_tf_function and mode == 'eager': exception_error_message = ( '`validation_split` argument is not supported when data adapter' ' is.+') else: exception_error_message = ( '`validation_split` argument is not supported when input `x`' ' is a dataset or a dataset iterator.+') # Test with validation split with self.assertRaisesRegexp(ValueError, exception_error_message): model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0, validation_split=0.5, validation_steps=2) # Test with sample weight. sample_weight = np.random.random((10, )) with self.assertRaisesRegexp( ValueError, '`sample_weight` argument is not supported when input ' '`x` is a dataset or a dataset iterator.'): model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0, sample_weight=sample_weight) # Test with not specifying the `steps` argument for dataset with infinite # cardinality. dataset = dataset.repeat() with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps_per_epoch` argument'): model.fit(dataset, epochs=1, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.evaluate(dataset, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.predict(dataset, verbose=0)