def test_evaluate_min_params():
    res = gp_minimize(bench3, [(-2.0, 2.0)],
                      x0=[0.],
                      noise=1e-8,
                      n_calls=8,
                      n_random_starts=3,
                      random_state=1)

    x_min, f_min = expected_minimum(res, random_state=1)
    x_min2, f_min2 = expected_minimum_random_sampling(res,
                                                      n_random_starts=1000,
                                                      random_state=1)
    plots.plot_gaussian_process(res)
    assert _evaluate_min_params(res, params='result') == res.x
    assert _evaluate_min_params(res, params=[1.]) == [1.]
    assert _evaluate_min_params(res, params='expected_minimum',
                                random_state=1) == x_min
    assert _evaluate_min_params(res,
                                params='expected_minimum',
                                n_minimum_search=20,
                                random_state=1) == x_min
    assert _evaluate_min_params(res,
                                params='expected_minimum_random',
                                n_minimum_search=1000,
                                random_state=1) == x_min2
def plot_optimizer(res, n_iter, max_iters=5):
    if n_iter == 0:
        show_legend = True
    else:
        show_legend = False
    ax = plt.subplot(max_iters, 2, 2 * n_iter + 1)
    # Plot GP(x) + contours
    ax = plot_gaussian_process(res,
                               ax=ax,
                               objective=objective_wo_noise,
                               noise_level=noise_level,
                               show_legend=show_legend,
                               show_title=True,
                               show_next_point=False,
                               show_acq_func=False)
    ax.set_ylabel("")
    ax.set_xlabel("")
    if n_iter < max_iters - 1:
        ax.get_xaxis().set_ticklabels([])
    # Plot EI(x)
    ax = plt.subplot(max_iters, 2, 2 * n_iter + 2)
    ax = plot_gaussian_process(res,
                               ax=ax,
                               noise_level=noise_level,
                               show_legend=show_legend,
                               show_title=False,
                               show_next_point=True,
                               show_acq_func=True,
                               show_observations=False,
                               show_mu=False)
    ax.set_ylabel("")
    ax.set_xlabel("")
    if n_iter < max_iters - 1:
        ax.get_xaxis().set_ticklabels([])
Exemple #3
0
    "noise_level": noise_level,
    "show_legend": True,
    "show_title": True,
    "show_next_point": False,
    "show_acq_func": True
}

#############################################################################
# We run a an optimization loop with standard settings

for i in range(30):
    next_x = opt.ask()
    f_val = objective(next_x)
    opt.tell(next_x, f_val)
# The same output could be created with opt.run(objective, n_iter=30)
_ = plot_gaussian_process(opt.get_result(), **plot_args)

#############################################################################
# We see that some minima is found and "exploited"
#
# Now lets try to set kappa and xi using'to other values and
# pass it to the optimizer:
acq_func_kwargs = {"xi": 10000, "kappa": 10000}
#############################################################################

opt = Optimizer([(-2.0, 2.0)],
                "GP",
                n_initial_points=3,
                acq_optimizer="sampling",
                acq_func_kwargs=acq_func_kwargs)
#############################################################################
#############################################################################
# Plot the 5 iterations following the 5 random points

for n_iter in range(5):
    # Plot true function.
    plt.subplot(5, 2, 2 * n_iter + 1)

    if n_iter == 0:
        show_legend = True
    else:
        show_legend = False

    ax = plot_gaussian_process(res,
                               n_calls=n_iter,
                               objective=f_wo_noise,
                               noise_level=noise_level,
                               show_legend=show_legend,
                               show_title=False,
                               show_next_point=False,
                               show_acq_func=False)
    ax.set_ylabel("")
    ax.set_xlabel("")
    # Plot EI(x)
    plt.subplot(5, 2, 2 * n_iter + 2)
    ax = plot_gaussian_process(res,
                               n_calls=n_iter,
                               show_legend=show_legend,
                               show_title=False,
                               show_mu=False,
                               show_acq_func=True,
                               show_observations=False,
                               show_next_point=True)
#########################################################################
# Like ***_minimize()** the first few points are suggestions from
# the initial point generator as there
# is no data yet with which to fit a surrogate model.


for i in range(9):
    next_x = opt.ask()
    f_val = objective(next_x)
    res = opt.tell(next_x, f_val)

#########################################################################
# We can now plot the random suggestions and the first model that has been
# fit:
_ = plot_gaussian_process(res, objective=objective_wo_noise,
                          noise_level=noise_level,
                          show_next_point=False,
                          show_acq_func=True)
plt.show()
#########################################################################
# Let us sample a few more points and plot the optimizer again:


for i in range(10):
    next_x = opt.ask()
    f_val = objective(next_x)
    res = opt.tell(next_x, f_val)

_ = plot_gaussian_process(res, objective=objective_wo_noise,
                          noise_level=noise_level,
                          show_next_point=True,
                          show_acq_func=True)