def test_attributes_result_errors_1(): with pytest.raises(ValueError): hyper = Hyperactive() hyper.add_search(objective_function, search_space, n_iter=15) hyper.run() hyper.best_score(objective_function1)
def test_random_state_2(): hyper0 = Hyperactive() hyper0.add_search( objective_function, search_space, n_iter=10, initialize={"random": 1}, random_state=1, ) hyper0.run() hyper1 = Hyperactive() hyper1.add_search( objective_function, search_space, n_iter=10, initialize={"random": 1}, random_state=10, ) hyper1.run() best_score0 = hyper0.best_score(objective_function) best_score1 = hyper1.best_score(objective_function) assert abs(best_score0 - best_score1) > err
def test_max_score_0(): def objective_function(para): score = -para["x1"] * para["x1"] return score search_space = { "x1": np.arange(0, 100, 0.1), } max_score = -9999 opt = HillClimbingOptimizer( epsilon=0.01, rand_rest_p=0, ) hyper = Hyperactive() hyper.add_search( objective_function, search_space, optimizer=opt, n_iter=100000, initialize={"warm_start": [{ "x1": 99 }]}, max_score=max_score, ) hyper.run() print("\n Results head \n", hyper.results(objective_function).head()) print("\n Results tail \n", hyper.results(objective_function).tail()) print("\nN iter:", len(hyper.results(objective_function))) assert -100 > hyper.best_score(objective_function) > max_score
def test_attributes_best_score_objective_function_0(): hyper = Hyperactive() hyper.add_search( objective_function, search_space, n_iter=15, ) hyper.run() assert isinstance(hyper.best_score(objective_function), numbers.Number)
def test_initialize_vertices(): initialize = {"vertices": 2} hyper = Hyperactive() hyper.add_search( objective_function, search_space, n_iter=2, initialize=initialize, ) hyper.run() assert abs(hyper.best_score(objective_function)) - 10000 < 0.001
def test_initialize_grid_0(): search_space = { "x1": np.arange(-1, 2, 1), } initialize = {"grid": 1} hyper = Hyperactive() hyper.add_search( objective_function, search_space, n_iter=1, initialize=initialize, ) hyper.run() assert abs(hyper.best_score(objective_function)) < 0.001
def test_initialize_warm_start_0(): init = { "x1": 0, } initialize = {"warm_start": [init]} hyper = Hyperactive() hyper.add_search( objective_function, search_space, n_iter=1, initialize=initialize, ) hyper.run() assert abs(hyper.best_score(objective_function)) < 0.001
def test_best_results_0(Optimizer, search_space, objective): search_space = search_space objective_function = objective initialize = {"vertices": 2} hyper = Hyperactive() hyper.add_search( objective_function, search_space, optimizer=Optimizer(), n_iter=10, memory=False, initialize=initialize, ) hyper.run() assert hyper.best_score(objective_function) == objective_function( hyper.best_para(objective_function))