def test_user_function_wrapper_callable_single_output(): function = lambda x: 2 * x function_input = np.array([[1], [2], [3]]) ufw = UserFunctionWrapper(function) evaluated_output = ufw.evaluate(function_input) called_output = ufw(function_input) assert len(evaluated_output) == len(called_output) assert all(eo == co for (eo, co) in zip(evaluated_output, called_output))
def test_user_function_wrapper_evaluation_single_output(): function = lambda x: 2 * x function_input = np.array([[1], [2], [3]]) ufw = UserFunctionWrapper(function) output = ufw.evaluate(function_input) assert len(output) == function_input.shape[0] for i, record in enumerate(output): assert_array_equal(output[i].X, function_input[i]) assert_array_equal(output[i].Y, function(function_input[i]))
def test_user_function_wrapper(): def function_test(x): return x[:, 0:1]**2 + x[:, 1:2]**2 user_function = UserFunctionWrapper(function_test) results = user_function.evaluate(np.random.rand(10, 2)) assert len(results) == 10, "A different number of results were expected" for res in results: assert res.X.ndim == 1, "X results are expected to be 1 dimensional" assert res.Y.ndim == 1, "Y results are expected to be 1 dimensional"
def test_user_function_wrapper_evaluation_with_cost(): function = lambda x: (2 * x, np.array([[1]] * x.shape[0])) function_input = np.array([[1], [2], [3]]) ufw = UserFunctionWrapper(function, extra_output_names=['cost']) output = ufw.evaluate(function_input) assert len(output) == function_input.shape[0] for i, record in enumerate(output): assert_array_equal(output[i].X, function_input[i]) assert_array_equal(output[i].Y, function(function_input[i])[0]) assert_array_equal(output[i].cost, function(function_input[i])[1][0])
def test_user_function_wrapper_evaluation_no_cost(): function = lambda x: 2 * x function_input = np.array([[1], [2], [3]]) ufw = UserFunctionWrapper(function) output = ufw.evaluate(function_input) assert len(output) == function_input.shape[0] for i, record in enumerate(output): assert_array_equal(output[i].X, function_input[i]) assert_array_equal(output[i].Y, function(function_input[i])) assert output[i].cost is None
def test_loop(): n_iterations = 5 x_init = np.random.rand(5, 1) y_init = np.random.rand(5, 1) # Make GPy model gpy_model = GPy.models.GPRegression(x_init, y_init) model = GPyModelWrapper(gpy_model) space = ParameterSpace([ContinuousParameter('x', 0, 1)]) acquisition = ExpectedImprovement(model) # Make loop and collect points bo = BayesianOptimizationLoop(model=model, space=space, acquisition=acquisition) bo.run_loop(UserFunctionWrapper(f), FixedIterationsStoppingCondition(n_iterations)) # Check we got the correct number of points assert bo.loop_state.X.shape[0] == n_iterations + 5 # Check the obtained results results = bo.get_results() assert results.minimum_location.shape[0] == 1 assert results.best_found_value_per_iteration.shape[0] == n_iterations + 5
def test_loop(): n_iterations = 5 x_init = np.random.rand(5, 1) y_init, y_constraint_init = f(x_init) # Make GPy objective model gpy_model = GPy.models.GPRegression(x_init, y_init) model = GPyModelWrapper(gpy_model) # Make GPy constraint model gpy_constraint_model = GPy.models.GPRegression(x_init, y_init) constraint_model = GPyModelWrapper(gpy_constraint_model) space = ParameterSpace([ContinuousParameter("x", 0, 1)]) acquisition = ExpectedImprovement(model) # Make loop and collect points bo = UnknownConstraintBayesianOptimizationLoop( model_objective=model, space=space, acquisition=acquisition, model_constraint=constraint_model ) bo.run_loop( UserFunctionWrapper(f, extra_output_names=["Y_constraint"]), FixedIterationsStoppingCondition(n_iterations) ) # Check we got the correct number of points assert bo.loop_state.X.shape[0] == n_iterations + 5
def advance(self): if self.iteration == 0: logger.info("Initializing Batch Bayesian Optimization") self.initialize() self.iteration += 1 logger.info("Starting BOO iteration %d" % self.iteration) self.loop.run_loop(UserFunctionWrapper(self._evaluate_batch), 1)
def fmin_fabolas(func, space: ParameterSpace, s_min: float, s_max: float, n_iters: int, n_init: int = 20, marginalize_hypers: bool = True) -> LoopState: """ Simple interface for Fabolas which optimizes the hyperparameters of machine learning algorithms by reasoning across training data set subsets. For further details see: Fast Bayesian hyperparameter optimization on large datasets A. Klein and S. Falkner and S. Bartels and P. Hennig and F. Hutter Electronic Journal of Statistics (2017) :param func: objective function which gets a hyperparameter configuration x and training dataset size s as input, and return the validation error and the runtime after training x on s datapoints. :param space: input space :param s_min: minimum training dataset size (linear scale) :param s_max: maximum training dataset size (linear scale) :param n_iters: number of iterations :param n_init: number of initial design points (needs to be smaller than num_iters) :param marginalize_hypers: determines whether to use a MAP estimate or to marginalize over the GP hyperparameters :return: LoopState with all evaluated data points """ initial_design = LatinDesign(space) grid = initial_design.get_samples(n_init) X_init = np.zeros([n_init, grid.shape[1] + 1]) Y_init = np.zeros([n_init, 1]) cost_init = np.zeros([n_init]) subsets = np.array([s_max // 2 ** i for i in range(2, 10)])[::-1] idx = np.where(subsets < s_min)[0] subsets[idx] = s_min for it in range(n_init): func_val, cost = func(x=grid[it], s=subsets[it % len(subsets)]) X_init[it] = np.concatenate((grid[it], np.array([subsets[it % len(subsets)]]))) Y_init[it] = func_val cost_init[it] = cost def wrapper(x): y, c = func(x[0, :-1], np.exp(x[0, -1])) return np.array([[y]]), np.array([[c]]) loop = FabolasLoop(X_init=X_init, Y_init=Y_init, cost_init=cost_init, space=space, s_min=s_min, s_max=s_max, marginalize_hypers=marginalize_hypers) loop.run_loop(user_function=UserFunctionWrapper(wrapper), stopping_condition=FixedIterationsStoppingCondition(n_iters - n_init)) return loop.loop_state
def test_user_function_wrapper_invalid_input(): # invalid input with pytest.raises(ValueError): function = lambda x: 2 * x function_input = np.array([1]) ufw = UserFunctionWrapper(function) ufw.evaluate(function_input) # invalid function output with pytest.raises(ValueError): function = lambda x: np.array([2]) function_input = np.array([[1]]) ufw = UserFunctionWrapper(function) ufw.evaluate(function_input) # invalid function output type with pytest.raises(ValueError): function = lambda x: [2] function_input = np.array([[1]]) ufw = UserFunctionWrapper(function) ufw.evaluate(function_input)
def test_loop(): n_iterations = 5 x_init = np.random.rand(5, 1) y_init = np.random.rand(5, 1) # Make GPy model gpy_model = GPy.models.GPRegression(x_init, y_init) model = GPyModelWrapper(gpy_model) space = ParameterSpace([ContinuousParameter('x', 0, 1)]) acquisition = ModelVariance(model) # Make loop and collect points exp_design = ExperimentalDesignLoop(space, model, acquisition) exp_design.run_loop(UserFunctionWrapper(f), FixedIterationsStoppingCondition(n_iterations)) # Check we got the correct number of points assert exp_design.loop_state.X.shape[0] == 10
def test_random_search_with_init_data(): np.random.seed(42) branin_fcn, parameter_space = branin_function() branin_fcn_with_cost = lambda x: (branin_fcn(x), np.zeros((x.shape[0], 1))) # Ensure function returns a value for cost wrapped_fcn = UserFunctionWrapper(branin_fcn_with_cost, extra_output_names=['cost']) x_init = parameter_space.sample_uniform(5) y_init = branin_fcn(x_init) cost_init = np.ones([5, 1]) rs = RandomSearch(parameter_space, x_init=x_init, y_init=y_init, cost_init=cost_init) rs.run_loop(wrapped_fcn, 5) assert len(rs.loop_state.Y) == 10 assert len(rs.loop_state.X) == 10 assert len(rs.loop_state.cost) == 10
def test_user_function_wrapper_invalid_input(): # invalid input with pytest.raises(ValueError): function = lambda x: 2 * x function_input = np.array([1]) ufw = UserFunctionWrapper(function) ufw.evaluate(function_input) # invalid function output with pytest.raises(ValueError): function = lambda x: np.array([2]) function_input = np.array([[1]]) ufw = UserFunctionWrapper(function) ufw.evaluate(function_input) # invalid function output type with pytest.raises(ValueError): function = lambda x: [2] function_input = np.array([[1]]) ufw = UserFunctionWrapper(function) ufw.evaluate(function_input)
def test_user_function_too_few_outputs_outputs_fails(): function = lambda x: 2 * x function_input = np.array([[1], [2], [3]]) ufw = UserFunctionWrapper(function, extra_output_names=['cost']) with pytest.raises(ValueError): ufw.evaluate(function_input)
def test_user_function_too_many_outputs_outputs_fails(): function = lambda x: (2 * x, np.array([1])) function_input = np.array([[1], [2], [3]]) ufw = UserFunctionWrapper(function) with pytest.raises(ValueError): ufw.evaluate(function_input)
if focus == 'energy': return opt_result.flatten()[0].reshape(1, -1) if focus == 'constant': return opt_result.flatten()[1:4].reshape(1, -1) else: return opt_result[:, :].reshape(1, -1) else: return opt_result.reshape(1, -1) if n_params == 4: from params_setting import parameter_space_4d as parameter_space else: from params_setting import parameter_space_6d as parameter_space f = UserFunctionWrapper(black_box_func) def main(): print("######################") global target, X0, Y0, values, frac_M, frac_X, bo_flag #target_params = np.array([[0.14,0.4],[1.4,0.03]]) #target = LiX_wrapper(True,'LiF','Rocksalt','JC', # target_params,False,False,eng) target = np.array([[-764.5, 6.012 * 0.99, 6.012 * 0.99, 6.012 * 0.99]]) if focus == 'energy': target_comp = target[0, 0].reshape(1, -1)