def test_deadline_stopper(): deadline = DeadlineStopper(0.0001) gp_minimize(bench3, [(-1.0, 1.0)], callback=deadline, n_calls=10, random_state=1) assert len(deadline.iter_time) == 1 assert np.sum(deadline.iter_time) > deadline.total_time deadline = DeadlineStopper(60) gp_minimize(bench3, [(-1.0, 1.0)], callback=deadline, n_calls=10, random_state=1) assert len(deadline.iter_time) == 10 assert np.sum(deadline.iter_time) < deadline.total_time
def test_dump_and_load(): res = gp_minimize( bench3, [(-2.0, 2.0)], x0=[0.0], acq_func="LCB", n_calls=2, n_random_starts=1, random_state=1, ) # Test normal dumping and loading with tempfile.TemporaryFile() as f: dump(res, f) f.seek(0) res_loaded = load(f) check_optimization_results_equality(res, res_loaded) assert "func" in res_loaded.specs["args"] # Test dumping without objective function with tempfile.TemporaryFile() as f: dump(res, f, store_objective=False) f.seek(0) res_loaded = load(f) check_optimization_results_equality(res, res_loaded) assert not ("func" in res_loaded.specs["args"]) # Delete the objective function and dump the modified object del res.specs["args"]["func"] with tempfile.TemporaryFile() as f: dump(res, f, store_objective=False) f.seek(0) res_loaded = load(f) check_optimization_results_equality(res, res_loaded) assert not ("func" in res_loaded.specs["args"])
def test_categorical_integer(): def f(params): return np.random.uniform() dims = [[1]] res = gp_minimize(f, dims, n_calls=2, n_initial_points=2, random_state=1) assert res.x_iters[0][0] == dims[0][0]
def test_hollow_iterations_stopper(): Result = namedtuple("Result", ["func_vals"]) hollow = HollowIterationsStopper(3, 0) # will run at least n_iterations + 1 times assert not hollow(Result([10, 11, 12])) assert hollow(Result([10, 11, 12, 13])) # a tie is not enough assert hollow(Result([10, 11, 12, 10])) # every time we make a new min, we then have n_iterations rounds to beat it assert not hollow(Result([10, 9, 8, 7, 7, 7])) assert hollow(Result([10, 9, 8, 7, 7, 7, 7])) hollow = HollowIterationsStopper(3, 1.1) assert not hollow(Result([10, 11, 12, 8.89])) assert hollow(Result([10, 11, 12, 8.9])) # individual improvement below threshold contribute assert hollow(Result([10, 9.9, 9.8, 9.7])) assert not hollow(Result([10, 9.5, 9, 8.5, 8, 7.5])) hollow = HollowIterationsStopper(3, 0) result = gp_minimize(bench3, [(-1.0, 1.0)], callback=hollow, n_calls=100, random_state=1) assert len(result.func_vals) == 10 hollow = HollowIterationsStopper(3, 0.1) result = gp_minimize(bench3, [(-1.0, 1.0)], callback=hollow, n_calls=100, random_state=1) assert len(result.func_vals) == 5 hollow = HollowIterationsStopper(3, 0.2) result = gp_minimize(bench3, [(-1.0, 1.0)], callback=hollow, n_calls=100, random_state=1) assert len(result.func_vals) == 4
def test_space_names_in_use_named_args(): space = [Integer(250, 2000, name="n_estimators")] @use_named_args(space) def objective(n_estimators): return n_estimators res = gp_minimize(objective, space, n_calls=10, random_state=0) best_params = dict(zip((s.name for s in res.space), res.x)) assert "n_estimators" in best_params assert res.space.dimensions[0].name == "n_estimators"
def test_n_jobs(): r_single = gp_minimize( bench3, [(-2.0, 2.0)], acq_optimizer="lbfgs", acq_func="EI", n_calls=4, n_initial_points=2, random_state=1, noise=1e-10, ) r_double = gp_minimize( bench3, [(-2.0, 2.0)], acq_optimizer="lbfgs", acq_func="EI", n_calls=4, n_initial_points=2, random_state=1, noise=1e-10, n_jobs=2, ) assert_array_equal(r_single.x_iters, r_double.x_iters)
def test_use_given_estimator(): """Test that gp_minimize does not use default estimator if one is passed in explicitly.""" domain = [(1.0, 2.0), (3.0, 4.0)] noise_correct = 1e5 noise_fake = 1e-10 estimator = cook_estimator("GP", domain, noise=noise_correct) res = gp_minimize( branin, domain, n_calls=4, n_initial_points=2, base_estimator=estimator, noise=noise_fake, ) assert res["models"][-1].noise == noise_correct
def test_expected_minimum(): res = gp_minimize( bench3, [(-2.0, 2.0)], x0=[0.0], noise=1e-8, n_calls=8, n_random_starts=3, random_state=1, ) x_min, f_min = expected_minimum(res, random_state=1) x_min2, f_min2 = expected_minimum(res, random_state=1) assert f_min <= res.fun # true since noise ~= 0.0 assert x_min == x_min2 assert f_min == f_min2
def test_mixed_categoricals2(initgen): space = Space([ Categorical(name="x", categories=["1", "2", "3"]), Categorical(name="y", categories=[4, 5, 6]), ]) def objective(param_list): x = param_list[0] y = param_list[1] loss = int(x) + y return loss res = gp_minimize(objective, space, n_calls=12, random_state=1, initial_point_generator=initgen) assert res["x"] == ["1", 4]
def test_mixed_categoricals(initgen): space = Space([ Categorical(name="x", categories=["1", "2", "3"]), Categorical(name="y", categories=[4, 5, 6]), Real(name="z", low=1.0, high=5.0), ]) def objective(param_list): x = param_list[0] y = param_list[1] z = param_list[2] loss = int(x) + y * z return loss res = gp_minimize(objective, space, n_calls=20, random_state=1, initial_point_generator=initgen) assert res["x"] in [["1", 4, 1.0], ["2", 4, 1.0]]
def test_defaults_are_equivalent(): # check that the defaults of Optimizer reproduce the defaults of # gp_minimize space = [(-5.0, 10.0), (0.0, 15.0)] # opt = Optimizer(space, 'ET', acq_func="EI", random_state=1) opt = Optimizer(space, random_state=1) for n in range(12): x = opt.ask() res_opt = opt.tell(x, branin(x)) # res_min = forest_minimize(branin, space, n_calls=12, random_state=1) res_min = gp_minimize(branin, space, n_calls=12, random_state=1) assert res_min.space == res_opt.space # tolerate small differences in the points sampled assert np.allclose(res_min.x_iters, res_opt.x_iters) # , atol=1e-5) assert np.allclose(res_min.x, res_opt.x) # , atol=1e-5) res_opt2 = opt.get_result() assert np.allclose(res_min.x_iters, res_opt2.x_iters) # , atol=1e-5) assert np.allclose(res_min.x, res_opt2.x) # , atol=1e-5)
def check_minimize( func, y_opt, bounds, acq_optimizer, acq_func, margin, n_calls, n_initial_points=10, init_gen="random", ): r = gp_minimize( func, bounds, acq_optimizer=acq_optimizer, acq_func=acq_func, n_initial_points=n_initial_points, n_calls=n_calls, random_state=1, initial_point_generator=init_gen, noise=1e-10, ) assert r.fun < y_opt + margin
def test_gpr_default(): """Smoke test that gp_minimize does not fail for default values.""" gp_minimize(branin, ((-5.0, 10.0), (0.0, 15.0)), n_initial_points=2, n_calls=2)