# oop interface for optimising and using results
        # objective is not put through log here, if it is, might want to bound away from zero
        bo = BayesOptSearch(est,
                            X,
                            param_bounds,
                            objective_log=False,
                            objective_log_bound=100,
                            num_initial_evaluations=5)

        # optimisation starts here, use results and apply to model
        best_params = bo.optimize(num_iter=5)
        est.set_parameters_from_dict(best_params)
        est.fit(X)

        visualise_fit_2d(est, X)
        plt.suptitle("Original fit %s\nOptimised over: %s" %
                     (str(est.get_parameters()), str(param_bounds)))
        if len(param_bounds) == 1:
            plt.figure()
            plot_bayesopt_model_1d(bo)
            plt.title("Objective")

        # now change data, with different length scale
        X = np.random.randn(200, D) * .1

        # reset optimiser, which but initialise from old model, sample 3 random point to update
        best_params = bo.re_initialise(new_data=X, num_initial_evaluations=3)

        # this optimisation now runs on the "new" objective
        best_params = bo.optimize(num_iter=3)
コード例 #2
0
    for i, sigma in enumerate(log_sigmas):
        est = KernelExpLiteGaussian(np.exp(sigma), lmbda, D, N)
        
        # this is an array num_repetitions x num_folds, each containing a objective
        xval_result = est.xvalidate_objective(X, num_folds=5, num_repetitions=2)
        O[i] = np.mean(xval_result)
        O_lower[i] = np.percentile(xval_result, 10)
        O_upper[i] = np.percentile(xval_result, 90)
    
    # best parameter
    best_log_sigma = log_sigmas[np.argmin(O)]
    
    # visualisation
    plt.figure()
    plt.plot([best_log_sigma, best_log_sigma], [np.min(O), np.max(O)], 'r')
    plt.plot(log_sigmas, O, 'b-')
    plt.plot(log_sigmas, O_lower, 'b--')
    plt.plot(log_sigmas, O_upper, 'b--')
    plt.xlim([np.min(log_sigmas) - 1, np.max(log_sigmas) + 1])
    plt.xlabel("log sigma")
    plt.ylabel("objective")
    plt.title("lmbda=%.4f" % lmbda)
    plt.legend(["Best sigma", "Performance"])
    plt.legend(["Best sigma", "Performance", "80% percentile"])
    plt.tight_layout()
    
    est.sigma = np.exp(best_log_sigma)
    est.fit(X)
    visualise_fit_2d(est, X)
    plt.show()