コード例 #1
0
class KernelExpLiteGaussianLowRankAdaptive(KernelExpLiteGaussianLowRank):
    def __init__(self,
                 sigma,
                 lmbda,
                 D,
                 N,
                 eta=0.1,
                 cg_tol=1e-3,
                 cg_maxiter=None,
                 num_initial_evaluations=3,
                 num_evaluations=3,
                 minimum_size_learning=100,
                 num_initial_evaluations_relearn=1,
                 num_evaluations_relearn=1,
                 param_bounds={'sigma': [-3, 3]}):
        KernelExpLiteGaussianLowRank.__init__(self, sigma, lmbda, D, N, eta,
                                              cg_tol, cg_maxiter)

        self.bo = None
        self.param_bounds = param_bounds
        self.num_initial_evaluations = num_initial_evaluations
        self.num_iter = num_evaluations
        self.minimum_size_learning = minimum_size_learning

        self.n_initial_relearn = num_initial_evaluations_relearn
        self.n_iter_relearn = num_evaluations_relearn

        self.learning_parameters = False

    def fit(self, X):
        # avoid infinite recursion from x-validation fit call
        if not self.learning_parameters and len(
                X) >= self.minimum_size_learning:
            self.learning_parameters = True
            if self.bo is None:
                logger.info("Bayesian optimisation from scratch.")
                self.bo = BayesOptSearch(
                    self,
                    X,
                    self.param_bounds,
                    n_initial=self.num_initial_evaluations)
                best_params = self.bo.optimize(self.num_iter)
            else:
                logger.info("Bayesian optimisation using prior model.")
                self.bo.re_initialise(X, self.n_initial_relearn)
                best_params = self.bo.optimize(self.n_iter_relearn)

            self.set_parameters_from_dict(best_params)
            self.learning_parameters = False
            logger.info("Learnt %s" % str(self.get_parameters()))

        # standard fit call from superclass
        KernelExpLiteGaussianLowRank.fit(self, X)
コード例 #2
0
class KernelExpLiteGaussianLowRankAdaptive(KernelExpLiteGaussianLowRank):
    def __init__(
        self,
        sigma,
        lmbda,
        D,
        N,
        eta=0.1,
        cg_tol=1e-3,
        cg_maxiter=None,
        num_initial_evaluations=3,
        num_evaluations=3,
        minimum_size_learning=100,
        num_initial_evaluations_relearn=1,
        num_evaluations_relearn=1,
        param_bounds={"sigma": [-3, 3]},
    ):
        KernelExpLiteGaussianLowRank.__init__(self, sigma, lmbda, D, N, eta, cg_tol, cg_maxiter)

        self.bo = None
        self.param_bounds = param_bounds
        self.num_initial_evaluations = num_initial_evaluations
        self.num_iter = num_evaluations
        self.minimum_size_learning = minimum_size_learning

        self.n_initial_relearn = num_initial_evaluations_relearn
        self.n_iter_relearn = num_evaluations_relearn

        self.learning_parameters = False

    def fit(self, X):
        # avoid infinite recursion from x-validation fit call
        if not self.learning_parameters and len(X) >= self.minimum_size_learning:
            self.learning_parameters = True
            if self.bo is None:
                logger.info("Bayesian optimisation from scratch.")
                self.bo = BayesOptSearch(self, X, self.param_bounds, n_initial=self.num_initial_evaluations)
                best_params = self.bo.optimize(self.num_iter)
            else:
                logger.info("Bayesian optimisation using prior model.")
                self.bo.re_initialise(X, self.n_initial_relearn)
                best_params = self.bo.optimize(self.n_iter_relearn)

            self.set_parameters_from_dict(best_params)
            self.learning_parameters = False
            logger.info("Learnt %s" % str(self.get_parameters()))

        # standard fit call from superclass
        KernelExpLiteGaussianLowRank.fit(self, X)
        est.set_parameters_from_dict(best_params)
        est.fit(X)

        visualise_fit_2d(est, X)
        plt.suptitle("Original fit %s\nOptimised over: %s" %
                     (str(est.get_parameters()), str(param_bounds)))
        if len(param_bounds) == 1:
            plt.figure()
            plot_bayesopt_model_1d(bo)
            plt.title("Objective")

        # now change data, with different length scale
        X = np.random.randn(200, D) * .1

        # reset optimiser, which but initialise from old model, sample 3 random point to update
        best_params = bo.re_initialise(new_data=X, num_initial_evaluations=3)

        # this optimisation now runs on the "new" objective
        best_params = bo.optimize(num_iter=3)
        est.set_parameters_from_dict(best_params)
        est.fit(X)

        visualise_fit_2d(est, X)
        plt.suptitle("New fit %s\nOptimised over: %s" %
                     (str(est.get_parameters()), str(param_bounds)))

        if len(param_bounds) == 1:
            plt.figure()
            plot_bayesopt_model_1d(bo)
            plt.title("New objective")
コード例 #4
0
 est.set_parameters_from_dict(best_params)
 est.fit(X)
 
 visualise_fit_2d(est, X)
 plt.suptitle("Original fit %s\nOptimised over: %s" % 
          (str(est.get_parameters()), str(param_bounds)))
 if len(param_bounds) == 1:
     plt.figure()
     plot_bayesopt_model_1d(bo)
     plt.title("Objective")
 
 # now change data, with different length scale
 X = np.random.randn(200, D) * .1
 
 # reset optimiser, which but initialise from old model, sample 3 random point to update
 best_params = bo.re_initialise(new_data=X, num_initial_evaluations=3)
 
 # this optimisation now runs on the "new" objective
 best_params = bo.optimize(num_iter=3)
 est.set_parameters_from_dict(best_params)
 est.fit(X)
 
 visualise_fit_2d(est, X)
 plt.suptitle("New fit %s\nOptimised over: %s" % 
          (str(est.get_parameters()), str(param_bounds)))
 
 if len(param_bounds) == 1:
     plt.figure()
     plot_bayesopt_model_1d(bo)
     plt.title("New objective")