def test_tuner_not_call_super_search_with_overwrite(final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(epochs=10) tuner.save() super_search.reset_mock() tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(epochs=10) super_search.assert_not_called()
def test_tuner_not_call_super_search_with_overwrite(_, final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) final_fit.return_value = mock.Mock(), mock.Mock(), mock.Mock() tuner.search(x=None, epochs=10, validation_data=None) tuner.save() super_search.reset_mock() tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(x=None, epochs=10, validation_data=None) super_search.assert_not_called()
def test_greedy_oracle_populate_different_values(get_best_trials): hp = keras_tuner.HyperParameters() utils.build_graph().build(hp) oracle = greedy.GreedyOracle(objective="val_loss", seed=utils.SEED) trial = mock.Mock() trial.hyperparameters = hp get_best_trials.return_value = [trial] oracle.update_space(hp) values_a = oracle.populate_space("a")["values"] values_b = oracle.populate_space("b")["values"] assert not all([values_a[key] == values_b[key] for key in values_a])
def test_greedy_oracle_state_hypermodel_is_graph(): oracle = greedy.GreedyOracle( hypermodel=utils.build_graph(), objective='val_loss', ) oracle.set_state(oracle.get_state()) assert isinstance(oracle.hypermodel, graph_module.Graph)
def test_greedy_oracle_stop_reach_max_collision(get_best_trials, compute_values_hash): hp = keras_tuner.HyperParameters() utils.build_graph().build(hp) oracle = greedy.GreedyOracle(objective="val_loss", seed=utils.SEED) trial = mock.Mock() trial.hyperparameters = hp get_best_trials.return_value = [trial] compute_values_hash.return_value = 1 oracle.update_space(hp) oracle.populate_space("a")["values"] assert (oracle.populate_space("b")["status"] == keras_tuner.engine.trial.TrialStatus.STOPPED)
def test_no_final_fit_without_epochs_and_fov(get_best_models, final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(x=None, epochs=None, fit_on_val_data=False) final_fit.assert_not_called()
def test_tuner_does_not_crash_with_distribution_strategy(tmp_path): tuner = greedy.Greedy( hypermodel=utils.build_graph(), directory=tmp_path, distribution_strategy=tf.distribute.MirroredStrategy(), ) tuner.hypermodel.build(tuner.oracle.hyperparameters)
def test_final_fit_with_specified_epochs(_, final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) final_fit.return_value = mock.Mock(), mock.Mock(), mock.Mock() tuner.search(x=None, epochs=10, validation_data=None) assert final_fit.call_args_list[0][1]["epochs"] == 10
def test_tuner_call_super_with_early_stopping(_, final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) final_fit.return_value = mock.Mock(), mock.Mock() tuner.search(x=None, epochs=10, validation_data=None) assert called_with_early_stopping(super_search)
def test_tuner_call_super_with_early_stopping(final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(x=None, epochs=10) assert called_with_early_stopping(super_search)
def test_random_oracle_state(): graph = utils.build_graph() oracle = greedy.GreedyOracle( hypermodel=graph, objective='val_loss', ) oracle.hypermodel = graph oracle.set_state(oracle.get_state()) assert oracle.hypermodel is graph
def test_final_fit_with_specified_epochs( final_fit, super_search, tmp_path): tuner = greedy.Greedy( hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(x=None, epochs=10) assert final_fit.call_args_list[0][1]['epochs'] == 10
def test_greedy_oracle_get_state_update_space_can_run(): oracle = greedy.GreedyOracle( hypermodel=utils.build_graph(), objective='val_loss', ) oracle.set_state(oracle.get_state()) hp = kerastuner.HyperParameters() hp.Boolean('test') oracle.update_space(hp)
def test_final_fit_best_epochs_if_epoch_unspecified(best_epochs, final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(x=mock.Mock(), epochs=None, fit_on_val_data=True, validation_data=mock.Mock()) assert final_fit.call_args_list[0][1]["epochs"] == 2
def test_overwrite_search(fit_fn, base_tuner_search, tmp_path): graph = utils.build_graph() tuner = tuner_module.AutoTuner( oracle=greedy.GreedyOracle(graph, objective='val_loss'), hypermodel=graph, directory=tmp_path) tuner.search(epochs=10) assert tuner._finished
def test_super_with_1k_epochs_if_epoch_unspecified(best_epochs, final_fit, super_search, tmp_path): tuner = greedy.Greedy(hypermodel=utils.build_graph(), directory=tmp_path) tuner.search(x=mock.Mock(), epochs=None, fit_on_val_data=True, validation_data=mock.Mock()) assert super_search.call_args_list[0][1]['epochs'] == 1000 assert called_with_early_stopping(super_search)
def test_add_early_stopping(fit_fn, base_tuner_search, tmp_path): graph = utils.build_graph() tuner = tuner_module.AutoTuner( oracle=greedy.GreedyOracle(graph, objective='val_loss'), hypermodel=graph, directory=tmp_path) tuner.search(x=None, epochs=10) callbacks = base_tuner_search.call_args_list[0][1]['callbacks'] assert any([isinstance(callback, tf.keras.callbacks.EarlyStopping) for callback in callbacks])
def test_no_epochs(best_epochs, fit_fn, base_tuner_search, tmp_path): best_epochs.return_value = 2 graph = utils.build_graph() tuner = tuner_module.AutoTuner( oracle=greedy.GreedyOracle(graph, objective='val_loss'), hypermodel=graph, directory=tmp_path) tuner.search(x=mock.Mock(), epochs=None, fit_on_val_data=True, validation_data=mock.Mock()) callbacks = fit_fn.call_args_list[0][1]['callbacks'] print(callbacks) assert not any([isinstance(callback, tf.keras.callbacks.EarlyStopping) for callback in callbacks])
def test_overwrite_search(fit_fn, base_tuner_search, init, tmp_dir): graph = utils.build_graph() tuner = tuner_module.AutoTuner(oracle=mock.Mock(), hypermodel=graph) tuner.hypermodel = graph tuner.oracle = mock.Mock() tuner.directory = tmp_dir tuner.project_name = '' hp = kerastuner.HyperParameters() trial = mock.Mock() trial.hyperparameters = hp oracle = mock.Mock() oracle.get_best_trials.return_value = (trial,) tuner.oracle = oracle tuner.search() assert tuner._finished
def test_greedy_oracle(fn): oracle = greedy.GreedyOracle( hypermodel=utils.build_graph(), objective='val_loss', ) trial = mock.Mock() hp = kerastuner.HyperParameters() trial.hyperparameters = hp fn.return_value = [trial] oracle.update_space(hp) for i in range(2000): oracle._populate_space(str(i)) assert 'optimizer' in oracle._hp_names[greedy.GreedyOracle.OPT] assert 'classification_head_1/dropout' in oracle._hp_names[ greedy.GreedyOracle.ARCH] assert 'image_block_1/block_type' in oracle._hp_names[ greedy.GreedyOracle.HYPER]
def test_add_early_stopping(fit_fn, base_tuner_search, init, tmp_dir): graph = utils.build_graph() tuner = tuner_module.AutoTuner(oracle=mock.Mock(), hypermodel=graph) tuner.hypermodel = graph tuner.oracle = mock.Mock() tuner.directory = tmp_dir tuner.project_name = '' hp = kerastuner.HyperParameters() trial = mock.Mock() trial.hyperparameters = hp oracle = mock.Mock() oracle.get_best_trials.return_value = (trial,) tuner.oracle = oracle tuner.search() callbacks = base_tuner_search.call_args_list[0][1]['callbacks'] assert any([isinstance(callback, tf.keras.callbacks.EarlyStopping) for callback in callbacks])
def test_greedy_oracle_populate_doesnt_crash_with_init_hps(get_best_trials): hp = kerastuner.HyperParameters() graph = utils.build_graph() graph.build(hp) oracle = greedy.GreedyOracle( initial_hps=task_specific.IMAGE_CLASSIFIER, objective="val_loss", seed=utils.SEED, ) trial = mock.Mock() trial.hyperparameters = hp get_best_trials.return_value = [trial] for i in range(10): tf.keras.backend.clear_session() values = oracle._populate_space("a")["values"] hp = oracle.hyperparameters.copy() hp.values = values graph.build(hp) oracle.update_space(hp)