param_bounds, objective_log=False, objective_log_bound=100, num_initial_evaluations=5) # optimisation starts here, use results and apply to model best_params = bo.optimize(num_iter=5) est.set_parameters_from_dict(best_params) est.fit(X) visualise_fit_2d(est, X) plt.suptitle("Original fit %s\nOptimised over: %s" % (str(est.get_parameters()), str(param_bounds))) if len(param_bounds) == 1: plt.figure() plot_bayesopt_model_1d(bo) plt.title("Objective") # now change data, with different length scale X = np.random.randn(200, D) * .1 # reset optimiser, which but initialise from old model, sample 3 random point to update best_params = bo.re_initialise(new_data=X, num_initial_evaluations=3) # this optimisation now runs on the "new" objective best_params = bo.optimize(num_iter=3) est.set_parameters_from_dict(best_params) est.fit(X) visualise_fit_2d(est, X) plt.suptitle("New fit %s\nOptimised over: %s" %
est = KernelExpLiteGaussianAdaptive(sigma=1., lmbda=.001, D=D, N=N, # these paramters are all optional, to controll Bayesian opt. num_initial_evaluations=3, num_evaluations=3, minimum_size_learning=100, # these depend on how much data changes between the "fit" calls num_initial_evaluations_relearn=3, num_evaluations_relearn=3, # this can be used to adjust search spaces or include more parameters # by default, only sigma is optimised param_bounds={'sigma': [-3, 3]} ) # automatically sets parameters est.fit(X) # only for illustration purpose plt.figure() plot_bayesopt_model_1d(est.bo) plt.title("Original fit") visualise_fit_2d(est, X) plt.suptitle("Original fit") # now change data, with different length scale X = np.random.randn(N, D) * .1 # re-learns parameters, but starts from previous ones est.fit(X) visualise_fit_2d(est, X) plt.suptitle("New fit") # only for illustration purpose
num_initial_evaluations=3, num_evaluations=3, minimum_size_learning=100, # these depend on how much data changes between the "fit" calls num_initial_evaluations_relearn=3, num_evaluations_relearn=3, # this can be used to adjust search spaces or include more parameters # by default, only sigma is optimised param_bounds={'sigma': [-3, 3]}) # automatically sets parameters est.fit(X) # only for illustration purpose plt.figure() plot_bayesopt_model_1d(est.bo) plt.title("Original fit") visualise_fit_2d(est, X) plt.suptitle("Original fit") # now change data, with different length scale X = np.random.randn(N, D) * .1 # re-learns parameters, but starts from previous ones est.fit(X) visualise_fit_2d(est, X) plt.suptitle("New fit") # only for illustration purpose
# oop interface for optimising and using results # objective is not put through log here, if it is, might want to bound away from zero bo = BayesOptSearch(est, X, param_bounds, objective_log=False, objective_log_bound=100, num_initial_evaluations=5) # optimisation starts here, use results and apply to model best_params = bo.optimize(num_iter=5) est.set_parameters_from_dict(best_params) est.fit(X) visualise_fit_2d(est, X) plt.suptitle("Original fit %s\nOptimised over: %s" % (str(est.get_parameters()), str(param_bounds))) if len(param_bounds) == 1: plt.figure() plot_bayesopt_model_1d(bo) plt.title("Objective") # now change data, with different length scale X = np.random.randn(200, D) * .1 # reset optimiser, which but initialise from old model, sample 3 random point to update best_params = bo.re_initialise(new_data=X, num_initial_evaluations=3) # this optimisation now runs on the "new" objective best_params = bo.optimize(num_iter=3) est.set_parameters_from_dict(best_params) est.fit(X) visualise_fit_2d(est, X) plt.suptitle("New fit %s\nOptimised over: %s" %