def test_calling_with_unsupported_predefined_callbacks(self, distribution): with self.cached_session(): with distribution.scope(): model = keras_test_lib.get_model() optimizer = gradient_descent.GradientDescentOptimizer(0.001) loss = 'mse' metrics = ['mae'] model.compile(optimizer, loss, metrics=metrics) dataset = keras_test_lib.get_dataset(distribution) def schedule(_): return 0.001 with self.assertRaisesRegexp( ValueError, 'You must specify a Keras Optimizer V2 when ' 'using'): model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0, callbacks=[ keras.callbacks.LearningRateScheduler(schedule) ]) with self.assertRaisesRegexp( ValueError, 'You must specify a Keras Optimizer V2 when ' 'using'): model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=0, callbacks=[keras.callbacks.ReduceLROnPlateau()])
def test_calling_with_unsupported_predefined_callbacks(self, distribution): with self.cached_session(): with distribution.scope(): model = keras_test_lib.get_model() optimizer = gradient_descent.GradientDescentOptimizer(0.001) loss = 'mse' metrics = ['mae'] model.compile(optimizer, loss, metrics=metrics) dataset = keras_test_lib.get_dataset(distribution) def schedule(_): return 0.001 with self.assertRaisesRegexp( ValueError, 'You must specify a Keras Optimizer V2 when ' 'using'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, callbacks=[keras.callbacks.LearningRateScheduler(schedule)]) with self.assertRaisesRegexp( ValueError, 'You must specify a Keras Optimizer V2 when ' 'using'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, callbacks=[keras.callbacks.ReduceLROnPlateau()])
def test_unsupported_features(self, distribution): with self.cached_session(): with distribution.scope(): model = keras_test_lib.get_model() optimizer = gradient_descent.GradientDescentOptimizer(0.001) loss = 'mse' metrics = ['mae'] model.compile(optimizer, loss, metrics=metrics) dataset = keras_test_lib.get_dataset(distribution) # Test with validation split with self.assertRaisesRegexp( ValueError, '`validation_split` argument is not ' 'supported when input `x` is a dataset or a ' 'dataset iterator.+'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, validation_split=0.5, validation_steps=2) # Test with sample weight. sample_weight = np.random.random((10,)) with self.assertRaisesRegexp( ValueError, '`sample_weight` argument is not supported when input ' '`x` is a dataset or a dataset iterator.'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, sample_weight=sample_weight) # Test with not specifying the `steps` argument for dataset with infinite # cardinality. dataset = dataset.repeat() with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps_per_epoch` argument'): model.fit(dataset, epochs=1, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.evaluate(dataset, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.predict(dataset, verbose=0)
def test_unsupported_features(self, distribution): with self.cached_session(): with distribution.scope(): model = keras_test_lib.get_model() optimizer = gradient_descent.GradientDescentOptimizer(0.001) loss = 'mse' metrics = ['mae'] model.compile(optimizer, loss, metrics=metrics) dataset = keras_test_lib.get_dataset(distribution) # Test with validation split with self.assertRaisesRegexp( ValueError, '`validation_split` argument is not ' 'supported when input `x` is a dataset or a ' 'dataset iterator.+'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, validation_split=0.5, validation_steps=2) # Test with sample weight. sample_weight = np.random.random((10,)) with self.assertRaisesRegexp( ValueError, '`sample_weight` argument is not supported when input ' '`x` is a dataset or a dataset iterator.'): model.fit( dataset, epochs=1, steps_per_epoch=2, verbose=0, sample_weight=sample_weight) # Test with not specifying the `steps` argument for dataset with infinite # cardinality. dataset = dataset.repeat() with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps_per_epoch` argument'): model.fit(dataset, epochs=1, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.evaluate(dataset, verbose=0) with self.assertRaisesRegexp( ValueError, 'When passing an infinitely ' 'repeating dataset, you must specify the ' '`steps` argument'): model.predict(dataset, verbose=0)
def test_save_load_h5(self, distribution): with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(gradient_descent_keras.SGD(0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp('.h5') model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(gradient_descent_keras.SGD(0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_callbacks_in_eval(self, distribution): with distribution.scope(): model = keras_test_lib.get_model() model.compile(optimizer='sgd', loss='mse', metrics=['mae']) dataset = keras_test_lib.get_dataset(distribution) counter = Counter() model.evaluate(dataset, steps=5, callbacks=[counter]) self.assertDictEqual( counter.method_counts, { 'on_test_batch_begin': 5, 'on_test_batch_end': 5, 'on_test_begin': 1, 'on_test_end': 1 })
def test_callbacks_in_fit(self, distribution): with distribution.scope(): model = keras_test_lib.get_model() model.compile(optimizer='sgd', loss='mse', metrics=['mae']) dataset = keras_test_lib.get_dataset(distribution) counter = Counter() epochs = 2 steps_per_epoch = 5 validation_steps = 3 model.fit( dataset, epochs=epochs, steps_per_epoch=steps_per_epoch, verbose=0, validation_data=dataset, validation_steps=validation_steps, callbacks=[counter]) if isinstance(distribution, tpu_strategy.TPUStrategy): # TPU Strategy can have multi step training, from extended.steps_per_run # if steps_per_run = 1, then num_batch_call_per_epoch = steps_per_epoch steps_per_run = distribution.extended.steps_per_run num_batch_call_per_epoch = steps_per_epoch // steps_per_run if steps_per_epoch % steps_per_run: num_batch_call_per_epoch += 1 else: num_batch_call_per_epoch = steps_per_epoch self.assertDictEqual( counter.method_counts, { 'on_batch_begin': epochs * num_batch_call_per_epoch, 'on_batch_end': epochs * num_batch_call_per_epoch, 'on_epoch_begin': epochs, 'on_epoch_end': epochs, 'on_test_batch_begin': epochs * validation_steps, 'on_test_batch_end': epochs * validation_steps, 'on_test_begin': epochs, 'on_test_end': epochs, 'on_train_batch_begin': epochs * num_batch_call_per_epoch, 'on_train_batch_end': epochs * num_batch_call_per_epoch, 'on_train_begin': 1, 'on_train_end': 1 })
def test_save_load_h5(self, distribution): with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(gradient_descent_keras.SGD(0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp('.h5') model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(gradient_descent_keras.SGD(0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_callbacks_in_eval(self, distribution): with distribution.scope(): model = keras_test_lib.get_model() model.compile(optimizer='sgd', loss='mse', metrics=['mae']) dataset = keras_test_lib.get_dataset(distribution) counter = Counter() model.evaluate(dataset, steps=5, callbacks=[counter]) self.assertDictEqual( counter.method_counts, { 'on_test_batch_begin': 5, 'on_test_batch_end': 5, 'on_test_begin': 1, 'on_test_end': 1 })
def test_callbacks_in_fit(self, distribution): with distribution.scope(): model = keras_test_lib.get_model() model.compile(optimizer='sgd', loss='mse', metrics=['mae']) dataset = keras_test_lib.get_dataset(distribution) counter = Counter() epochs = 2 steps_per_epoch = 5 validation_steps = 3 model.fit( dataset, epochs=epochs, steps_per_epoch=steps_per_epoch, verbose=0, validation_data=dataset, validation_steps=validation_steps, callbacks=[counter]) if isinstance(distribution, tpu_strategy.TPUStrategy): # TPU Strategy can have multi step training, from extended.steps_per_run # if steps_per_run = 1, then num_batch_call_per_epoch = steps_per_epoch steps_per_run = distribution.extended.steps_per_run num_batch_call_per_epoch = steps_per_epoch // steps_per_run if steps_per_epoch % steps_per_run: num_batch_call_per_epoch += 1 else: num_batch_call_per_epoch = steps_per_epoch self.assertDictEqual( counter.method_counts, { 'on_batch_begin': epochs * num_batch_call_per_epoch, 'on_batch_end': epochs * num_batch_call_per_epoch, 'on_epoch_begin': epochs, 'on_epoch_end': epochs, 'on_test_batch_begin': epochs * validation_steps, 'on_test_batch_end': epochs * validation_steps, 'on_test_begin': epochs, 'on_test_end': epochs, 'on_train_batch_begin': epochs * num_batch_call_per_epoch, 'on_train_batch_end': epochs * num_batch_call_per_epoch, 'on_train_begin': 1, 'on_train_end': 1 })
def test_save_load_trackable_optimizer_v2(self, distribution): # TODO(b/123533246): Enable the test for TPU once bug is fixed with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(gradient_descent_keras.SGD(0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp() model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(gradient_descent_keras.SGD(0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_save_load_trackable(self, distribution): # TODO(sourabhbajaj): Test fails with optimizer v2 without h5 with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp() model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_save_load_trackable_optimizer_v2(self, distribution): # TODO(b/123533246): Enable the test for TPU once bug is fixed with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(gradient_descent_keras.SGD(0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp() model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile(gradient_descent_keras.SGD(0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)
def test_save_load_trackable(self, distribution): # TODO(sourabhbajaj): Test fails with optimizer v2 without h5 with self.cached_session(): dataset = keras_test_lib.get_dataset(distribution) with distribution.scope(): model = keras_test_lib.get_model() model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') model.fit(dataset, epochs=1, steps_per_epoch=1) weights_file = tempfile.mktemp() model.save_weights(weights_file) model_2 = keras_test_lib.get_model() model_2.compile( gradient_descent.GradientDescentOptimizer(0.01), 'mse') model_2.load_weights(weights_file) model_2.predict( keras_test_lib.get_predict_dataset(distribution), steps=2) model_2.fit(dataset, epochs=1, steps_per_epoch=1)