Example #1
0
def test_fixed_iteration_stopping_condition():
    stopping_condition = FixedIterationsStoppingCondition(5)
    loop_state_mock = mock.create_autospec(LoopState)
    loop_state_mock.iteration = 0

    assert(stopping_condition.should_stop(loop_state_mock) is False)

    loop_state_mock = mock.create_autospec(LoopState)
    loop_state_mock.iteration = 5

    assert(stopping_condition.should_stop(loop_state_mock) is True)
def test_fixed_iteration_stopping_condition():
    n_iterations = 5

    stopping_condition = FixedIterationsStoppingCondition(n_iterations)
    loop_state_mock = mock.create_autospec(LoopState)

    loop_state_mock.iteration = 0
    assert stopping_condition.should_stop(loop_state_mock) is False

    loop_state_mock.iteration = n_iterations - 1
    assert stopping_condition.should_stop(loop_state_mock) is False

    loop_state_mock.iteration = n_iterations
    assert stopping_condition.should_stop(loop_state_mock) is True
def test_loop():
    n_iterations = 5

    x_init = np.random.rand(5, 1)
    y_init = np.random.rand(5, 1)

    # Make GPy model
    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    space = ParameterSpace([ContinuousParameter('x', 0, 1)])
    acquisition = ExpectedImprovement(model)

    # Make loop and collect points
    bo = BayesianOptimizationLoop(model=model,
                                  space=space,
                                  acquisition=acquisition)
    bo.run_loop(UserFunctionWrapper(f),
                FixedIterationsStoppingCondition(n_iterations))

    # Check we got the correct number of points
    assert bo.loop_state.X.shape[0] == n_iterations + 5

    # Check the obtained results
    results = bo.get_results()

    assert results.minimum_location.shape[0] == 1
    assert results.best_found_value_per_iteration.shape[0] == n_iterations + 5
Example #4
0
def test_loop():
    n_iterations = 5

    x_init = np.random.rand(5, 1)
    y_init, y_constraint_init = f(x_init)

    # Make GPy objective model
    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    # Make GPy constraint model
    gpy_constraint_model = GPy.models.GPRegression(x_init, y_init)
    constraint_model = GPyModelWrapper(gpy_constraint_model)

    space = ParameterSpace([ContinuousParameter("x", 0, 1)])
    acquisition = ExpectedImprovement(model)

    # Make loop and collect points
    bo = UnknownConstraintBayesianOptimizationLoop(
        model_objective=model, space=space, acquisition=acquisition, model_constraint=constraint_model
    )
    bo.run_loop(
        UserFunctionWrapper(f, extra_output_names=["Y_constraint"]), FixedIterationsStoppingCondition(n_iterations)
    )

    # Check we got the correct number of points
    assert bo.loop_state.X.shape[0] == n_iterations + 5
def run_optimisation(current_range, freq_range, power_range):
    parameter_space = ParameterSpace([\
        ContinuousParameter('current', current_range[0], current_range[1]), \
        ContinuousParameter('freq', freq_range[0], freq_range[1]), \
        ContinuousParameter('power', power_range[0], power_range[1])
        ])

    def function(X):
        current = X[:, 0]
        freq = X[:, 1]
        power = X[:, 2]
        out = np.zeros((len(current), 1))
        for g in range(len(current)):
            '''
            Set JPA Current, Frequency & Power
            '''
            out[g, 0] = -get_SNR(
                plot=False)[-1]  #Negative as want to maximise SNR
        return out

    num_data_points = 10

    design = RandomDesign(parameter_space)
    X = design.get_samples(num_data_points)
    Y = function(X)

    model_gpy = GPRegression(X, Y)
    model_gpy.optimize()
    model_emukit = GPyModelWrapper(model_gpy)

    exp_imprv = ExpectedImprovement(model=model_emukit)
    optimizer = GradientAcquisitionOptimizer(space=parameter_space)
    point_calc = SequentialPointCalculator(exp_imprv, optimizer)
    coords = []
    min = []

    bayesopt_loop = BayesianOptimizationLoop(model=model_emukit,
                                             space=parameter_space,
                                             acquisition=exp_imprv,
                                             batch_size=1)

    stopping_condition = FixedIterationsStoppingCondition(i_max=100)

    bayesopt_loop.run_loop(q, stopping_condition)

    coord_results = bayesopt_loop.get_results().minimum_location
    min_value = bayesopt_loop.get_results().minimum_value
    step_results = bayesopt_loop.get_results().best_found_value_per_iteration
    print(coord_results)
    print(min_value)

    return coord_results, abs(min_value)
Example #6
0
def fmin_fabolas(func, space: ParameterSpace, s_min: float, s_max: float, n_iters: int,
                 n_init: int = 20, marginalize_hypers: bool = True) -> LoopState:
    """
    Simple interface for Fabolas which optimizes the hyperparameters of machine learning algorithms
    by reasoning across training data set subsets. For further details see:

    Fast Bayesian hyperparameter optimization on large datasets
    A. Klein and S. Falkner and S. Bartels and P. Hennig and F. Hutter
    Electronic Journal of Statistics (2017)

    :param func: objective function which gets a hyperparameter configuration x and training dataset size s as input,
    and return the validation error and the runtime after training x on s datapoints.
    :param space: input space
    :param s_min: minimum training dataset size (linear scale)
    :param s_max: maximum training dataset size (linear scale)
    :param n_iters: number of iterations
    :param n_init: number of initial design points (needs to be smaller than num_iters)
    :param marginalize_hypers: determines whether to use a MAP estimate or to marginalize over the GP hyperparameters

    :return: LoopState with all evaluated data points
    """
    initial_design = LatinDesign(space)

    grid = initial_design.get_samples(n_init)
    X_init = np.zeros([n_init, grid.shape[1] + 1])
    Y_init = np.zeros([n_init, 1])
    cost_init = np.zeros([n_init])

    subsets = np.array([s_max // 2 ** i for i in range(2, 10)])[::-1]
    idx = np.where(subsets < s_min)[0]

    subsets[idx] = s_min

    for it in range(n_init):
        func_val, cost = func(x=grid[it], s=subsets[it % len(subsets)])

        X_init[it] = np.concatenate((grid[it], np.array([subsets[it % len(subsets)]])))
        Y_init[it] = func_val
        cost_init[it] = cost

    def wrapper(x):
        y, c = func(x[0, :-1], np.exp(x[0, -1]))

        return np.array([[y]]), np.array([[c]])

    loop = FabolasLoop(X_init=X_init, Y_init=Y_init, cost_init=cost_init, space=space, s_min=s_min,
                       s_max=s_max, marginalize_hypers=marginalize_hypers)
    loop.run_loop(user_function=UserFunctionWrapper(wrapper),
                  stopping_condition=FixedIterationsStoppingCondition(n_iters - n_init))

    return loop.loop_state
Example #7
0
def test_loop():
    n_iterations = 5

    x_init = np.random.rand(5, 1)
    y_init = np.random.rand(5, 1)

    # Make GPy model
    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    space = ParameterSpace([ContinuousParameter('x', 0, 1)])
    acquisition = ModelVariance(model)

    # Make loop and collect points
    exp_design = ExperimentalDesignLoop(space, model, acquisition)
    exp_design.run_loop(UserFunctionWrapper(f), FixedIterationsStoppingCondition(n_iterations))

    # Check we got the correct number of points
    assert exp_design.loop_state.X.shape[0] == 10
Example #8
0
model_gpy = GPRegression(X,Y)
model_gpy.optimize()
model_emukit = GPyModelWrapper(model_gpy)

# Set up Bayesian optimisation routine
exp_imprv = ExpectedImprovement(model = model_emukit)
optimizer = GradientAcquisitionOptimizer(space = parameter_space)
point_calc = SequentialPointCalculator(exp_imprv,optimizer)

# Bayesian optimisation routine
bayesopt_loop = BayesianOptimizationLoop(model = model_emukit,
                                         space = parameter_space,
                                         acquisition=exp_imprv,
                                         batch_size=1)

stopping_condition = FixedIterationsStoppingCondition(i_max = no_BO_sims)
bayesopt_loop.run_loop(q, stopping_condition)


# Results of Bayesian optimisation
coord_results  = bayesopt_loop.get_results().minimum_location
min_value = bayesopt_loop.get_results().minimum_value
step_results = bayesopt_loop.get_results().best_found_value_per_iteration
print(coord_results)
print(min_value)

# Save the pararmeters of the best resonator
results = [coord_results,min_value]
results_file = open('results.txt','w')
results_file.write(str(results))
results_file.close()
Example #9
0
    acquisition = LogExpectedImprovement(model)
elif args.acquisition_type == "entropy_search":
    model = BOGP(X_init=X_init, Y_init=Y_init)
    acquisition = EntropySearch(model, space=space)


# if with_gradients:
#    acquisition_optimizer = AcquisitionOptimizer(space)
# else:
acquisition_optimizer = DirectOptimizer(space)

candidate_point_calculator = Sequential(acquisition, acquisition_optimizer)

bo = BayesianOptimizationLoop(model=model, space=space, X_init=X_init, Y_init=Y_init, acquisition=acquisition,
                              candidate_point_calculator=candidate_point_calculator)
bo.run_loop(user_function=obj, stopping_condition=FixedIterationsStoppingCondition(args.num_iterations))

curr_inc = np.inf
traj = []
regret = []
for yi in bo.loop_state.Y:
    if curr_inc > yi:
        curr_inc = yi[0]
    traj.append(curr_inc)
    regret.append(curr_inc - f_opt)

data = dict()
data["regret"] = regret

path = os.path.join(args.output_path, args.benchmark)
os.makedirs(path, exist_ok=True)
Example #10
0
    gpmodel.optimize()
    model = GPyModelWrapper(gpmodel)

acquisition = ExpectedImprovement(model)
acquisition_optimizer = DirectOptimizer(space)

candidate_point_calculator = Sequential(acquisition, acquisition_optimizer)

bo = BayesianOptimizationLoop(model=model, space=space, X_init=X_init, Y_init=Y_init, acquisition=acquisition,
                              candidate_point_calculator=candidate_point_calculator)

overhead = []
st = time.time()
for i in range(args.num_iterations):
    t = time.time()
    bo.run_loop(user_function=obj, stopping_condition=FixedIterationsStoppingCondition(i + X_init.shape[0]))
    overhead.append(time.time() - t)
    print(i)

data = dict()
data["overhead"] = overhead
data["runtime"] = np.cumsum(overhead).tolist()

path = os.path.join(args.output_path, args.benchmark)
os.makedirs(path, exist_ok=True)

fname = os.path.join(path, "%s_run_%d.json" % (args.model_type, args.run_id))

fh = open(fname, "w")
json.dump(data, fh)
fh.close()
Example #11
0
    acquisition = LogExpectedImprovement(model)

    # acquisition_optimizer = DirectOptimizer(space)
    acquisition_optimizer = DifferentialEvolution(space)
    candidate_point_calculator = Sequential(acquisition, acquisition_optimizer)

    bo = BayesianOptimizationLoop(model=model, space=space, X_init=X_init, Y_init=Y_init, acquisition=acquisition,
                                  candidate_point_calculator=candidate_point_calculator)
    initial_results = []
    for i in range(X_init.shape[0]):
        initial_results.append(UserFunctionResult(X_init[i], Y_init[i], C_init[i]))
    loop_state = LoopState(initial_results)
    bo.loop_state = loop_state

    bo.run_loop(user_function=obj,
                stopping_condition=FixedIterationsStoppingCondition(args.num_iterations - args.n_init))
    C = bo.loop_state.C
    incumbents = []
    curr_inc = None
    curr_inc_val = np.inf
    for yi, xi in zip(bo.loop_state.Y, bo.loop_state.X):
        if curr_inc_val > yi[0]:
            curr_inc = xi
            curr_inc_val = yi[0]
        incumbents.append(curr_inc)

elif args.method == "gp_ei_per_cost":

    obj = UserFunctionWithCostWrapper(evaluate)

    init_design = RandomDesign(space)