def test_saving_overwrite_option_gcs(): model = Sequential() model.add(Dense(2, input_shape=(3, ))) org_weights = model.get_weights() new_weights = [np.random.random(w.shape) for w in org_weights] with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath( filename='test_saving_overwrite_option_gcs.h5') # we should not use same filename in several tests to allow for parallel # execution save_model(model, gcs_filepath) model.set_weights(new_weights) with patch('keras.engine.saving.ask_to_proceed_with_overwrite') as ask: ask.return_value = False save_model(model, gcs_filepath, overwrite=False) ask.assert_called_once() new_model = load_model(gcs_filepath) for w, org_w in zip(new_model.get_weights(), org_weights): assert_allclose(w, org_w) ask.return_value = True save_model(model, gcs_filepath, overwrite=False) assert ask.call_count == 2 new_model = load_model(gcs_filepath) for w, new_w in zip(new_model.get_weights(), new_weights): assert_allclose(w, new_w) file_io_proxy.delete_file(gcs_filepath) # cleanup
def test_sequential_model_saving_2(): # test with custom optimizer, loss custom_opt = optimizers.rmsprop custom_loss = losses.mse model = Sequential() model.add(Dense(2, input_shape=(3,))) model.add(Dense(3)) model.compile(loss=custom_loss, optimizer=custom_opt(), metrics=['acc']) x = np.random.random((1, 3)) y = np.random.random((1, 3)) model.train_on_batch(x, y) out = model.predict(x) load_kwargs = {'custom_objects': {'custom_opt': custom_opt, 'custom_loss': custom_loss}} _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname, **load_kwargs) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath, **load_kwargs) file_io_proxy.delete_file(gcs_filepath) # cleanup for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05)
def test_sequential_model_saving_2(): # test with custom optimizer, loss custom_opt = optimizers.rmsprop custom_loss = losses.mse model = Sequential() model.add(Dense(2, input_shape=(3, ))) model.add(Dense(3)) model.compile(loss=custom_loss, optimizer=custom_opt(), metrics=['acc']) x = np.random.random((1, 3)) y = np.random.random((1, 3)) model.train_on_batch(x, y) out = model.predict(x) load_kwargs = { 'custom_objects': { 'custom_opt': custom_opt, 'custom_loss': custom_loss } } _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname, **load_kwargs) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath, **load_kwargs) file_io_proxy.delete_file(gcs_filepath) # cleanup for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05)
def test_functional_model_saving(): inputs = Input(shape=(3, )) x = Dense(2)(inputs) outputs = Dense(3)(x) model = Model(inputs, outputs) model.compile(loss=losses.MSE, optimizer=optimizers.Adam(), metrics=[metrics.categorical_accuracy]) x = np.random.random((1, 3)) y = np.random.random((1, 3)) model.train_on_batch(x, y) out = model.predict(x) _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath) file_io_proxy.delete_file(gcs_filepath) # cleanup for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05)
def test_saving_overwrite_option_gcs(): model = Sequential() model.add(Dense(2, input_shape=(3,))) org_weights = model.get_weights() new_weights = [np.random.random(w.shape) for w in org_weights] with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath( filename='test_saving_overwrite_option_gcs.h5') # we should not use same filename in several tests to allow for parallel # execution save_model(model, gcs_filepath) model.set_weights(new_weights) with patch('keras.engine.saving.ask_to_proceed_with_overwrite') as ask: ask.return_value = False save_model(model, gcs_filepath, overwrite=False) ask.assert_called_once() new_model = load_model(gcs_filepath) for w, org_w in zip(new_model.get_weights(), org_weights): assert_allclose(w, org_w) ask.return_value = True save_model(model, gcs_filepath, overwrite=False) assert ask.call_count == 2 new_model = load_model(gcs_filepath) for w, new_w in zip(new_model.get_weights(), new_weights): assert_allclose(w, new_w) file_io_proxy.delete_file(gcs_filepath) # cleanup
def test_functional_model_saving(): inputs = Input(shape=(3,)) x = Dense(2)(inputs) outputs = Dense(3)(x) model = Model(inputs, outputs) model.compile(loss=losses.MSE, optimizer=optimizers.Adam(), metrics=[metrics.categorical_accuracy]) x = np.random.random((1, 3)) y = np.random.random((1, 3)) model.train_on_batch(x, y) out = model.predict(x) _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath) file_io_proxy.delete_file(gcs_filepath) # cleanup for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05)
def test_functional_model_saving(): model, x = _get_sample_model_and_input() out = model.predict(x) _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath) file_io_proxy.delete_file(gcs_filepath) # cleanup for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05)
def test_functional_model_saving(): model, x = _get_sample_model_and_input() out = model.predict(x) _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath) file_io_proxy.delete_file(gcs_filepath) # cleanup for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05)
def test_save_load_weights_gcs(): model = Sequential() model.add(Dense(2, input_shape=(3, ))) org_weights = model.get_weights() with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath( filename='test_save_load_weights_gcs.h5') # we should not use same filename in several tests to allow for parallel # execution model.save_weights(gcs_filepath) model.set_weights([np.random.random(w.shape) for w in org_weights]) for w, org_w in zip(model.get_weights(), org_weights): assert not (w == org_w).all() model.load_weights(gcs_filepath) for w, org_w in zip(model.get_weights(), org_weights): assert_allclose(w, org_w) file_io_proxy.delete_file(gcs_filepath) # cleanup
def test_save_load_weights_gcs(): model = Sequential() model.add(Dense(2, input_shape=(3,))) org_weights = model.get_weights() with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath( filename='test_save_load_weights_gcs.h5') # we should not use same filename in several tests to allow for parallel # execution model.save_weights(gcs_filepath) model.set_weights([np.random.random(w.shape) for w in org_weights]) for w, org_w in zip(model.get_weights(), org_weights): assert not (w == org_w).all() model.load_weights(gcs_filepath) for w, org_w in zip(model.get_weights(), org_weights): assert_allclose(w, org_w) file_io_proxy.delete_file(gcs_filepath) # cleanup
def test_sequential_model_saving(): model = Sequential() model.add(Dense(2, input_shape=(3, ))) model.add(RepeatVector(3)) model.add(TimeDistributed(Dense(3))) model.compile(loss=losses.MSE, optimizer=optimizers.RMSprop(lr=0.0001), metrics=[metrics.categorical_accuracy], sample_weight_mode='temporal') x = np.random.random((1, 3)) y = np.random.random((1, 3, 3)) model.train_on_batch(x, y) out = model.predict(x) _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath) file_io_proxy.delete_file(gcs_filepath) # cleanup x2 = np.random.random((1, 3)) y2 = np.random.random((1, 3, 3)) model.train_on_batch(x2, y2) out_2 = model.predict(x2) for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05) # test that new updates are the same with both models new_model.train_on_batch(x2, y2) new_out_2 = new_model.predict(x2) assert_allclose(out_2, new_out_2, atol=1e-05)
def test_sequential_model_saving(): model = Sequential() model.add(Dense(2, input_shape=(3,))) model.add(RepeatVector(3)) model.add(TimeDistributed(Dense(3))) model.compile(loss=losses.MSE, optimizer=optimizers.RMSprop(lr=0.0001), metrics=[metrics.categorical_accuracy], sample_weight_mode='temporal') x = np.random.random((1, 3)) y = np.random.random((1, 3, 3)) model.train_on_batch(x, y) out = model.predict(x) _, fname = tempfile.mkstemp('.h5') save_model(model, fname) new_model_disk = load_model(fname) os.remove(fname) with tf_file_io_proxy('keras.engine.saving.tf_file_io') as file_io_proxy: gcs_filepath = file_io_proxy.get_filepath(filename=fname) save_model(model, gcs_filepath) file_io_proxy.assert_exists(gcs_filepath) new_model_gcs = load_model(gcs_filepath) file_io_proxy.delete_file(gcs_filepath) # cleanup x2 = np.random.random((1, 3)) y2 = np.random.random((1, 3, 3)) model.train_on_batch(x2, y2) out_2 = model.predict(x2) for new_model in [new_model_disk, new_model_gcs]: new_out = new_model.predict(x) assert_allclose(out, new_out, atol=1e-05) # test that new updates are the same with both models new_model.train_on_batch(x2, y2) new_out_2 = new_model.predict(x2) assert_allclose(out_2, new_out_2, atol=1e-05)