コード例 #1
0
    def get_OracleKernelAdaptiveLangevin_instance(D, target_log_pdf):
        step_size = 1.
        m = 500
        N = 5000
        Z = sample_banana(N, D, bananicity, V)

        surrogate = KernelExpFiniteGaussian(sigma=10, lmbda=.001, m=m, D=D)
        surrogate.fit(Z)

        if False:
            param_bounds = {'sigma': [-2, 3]}
            bo = BayesOptSearch(surrogate, Z, param_bounds)
            best_params = bo.optimize()
            surrogate.set_parameters_from_dict(best_params)

        if False:
            sigma = 1. / gamma_median_heuristic(Z)
            surrogate.set_parameters_from_dict({'sigma': sigma})

        logger.info("kernel exp family uses %s" % surrogate.get_parameters())

        if False:
            import matplotlib.pyplot as plt
            Xs = np.linspace(-30, 30, 50)
            Ys = np.linspace(-20, 40, 50)
            visualise_fit_2d(surrogate, Z, Xs, Ys)
            plt.show()

        instance = OracleKernelAdaptiveLangevin(D, target_log_pdf, surrogate,
                                                step_size)

        return instance
コード例 #2
0
ファイル: pmc_banana.py プロジェクト: karlnapf/kameleon_rks
 def get_OracleKernelAdaptiveLangevin_instance(D, target_log_pdf):
     step_size = 1.
     m = 500
     N = 5000
     Z = sample_banana(N, D, bananicity, V)
     
     surrogate = KernelExpFiniteGaussian(sigma=10, lmbda=.001, m=m, D=D)
     surrogate.fit(Z)
     
     if False:
         param_bounds = {'sigma': [-2, 3]}
         bo = BayesOptSearch(surrogate, Z, param_bounds)
         best_params = bo.optimize()
         surrogate.set_parameters_from_dict(best_params)
     
     if False:
         sigma = 1. / gamma_median_heuristic(Z)
         surrogate.set_parameters_from_dict({'sigma': sigma})
     
     logger.info("kernel exp family uses %s" % surrogate.get_parameters())
 
     if False:
         import matplotlib.pyplot as plt
         Xs = np.linspace(-30, 30, 50)
         Ys = np.linspace(-20, 40, 50)
         visualise_fit_2d(surrogate, Z, Xs, Ys)
         plt.show()
         
     instance = OracleKernelAdaptiveLangevin(D, target_log_pdf, surrogate, step_size)
     
     return instance
コード例 #3
0
class KernelExpLiteGaussianLowRankAdaptive(KernelExpLiteGaussianLowRank):
    def __init__(self,
                 sigma,
                 lmbda,
                 D,
                 N,
                 eta=0.1,
                 cg_tol=1e-3,
                 cg_maxiter=None,
                 num_initial_evaluations=3,
                 num_evaluations=3,
                 minimum_size_learning=100,
                 num_initial_evaluations_relearn=1,
                 num_evaluations_relearn=1,
                 param_bounds={'sigma': [-3, 3]}):
        KernelExpLiteGaussianLowRank.__init__(self, sigma, lmbda, D, N, eta,
                                              cg_tol, cg_maxiter)

        self.bo = None
        self.param_bounds = param_bounds
        self.num_initial_evaluations = num_initial_evaluations
        self.num_iter = num_evaluations
        self.minimum_size_learning = minimum_size_learning

        self.n_initial_relearn = num_initial_evaluations_relearn
        self.n_iter_relearn = num_evaluations_relearn

        self.learning_parameters = False

    def fit(self, X):
        # avoid infinite recursion from x-validation fit call
        if not self.learning_parameters and len(
                X) >= self.minimum_size_learning:
            self.learning_parameters = True
            if self.bo is None:
                logger.info("Bayesian optimisation from scratch.")
                self.bo = BayesOptSearch(
                    self,
                    X,
                    self.param_bounds,
                    n_initial=self.num_initial_evaluations)
                best_params = self.bo.optimize(self.num_iter)
            else:
                logger.info("Bayesian optimisation using prior model.")
                self.bo.re_initialise(X, self.n_initial_relearn)
                best_params = self.bo.optimize(self.n_iter_relearn)

            self.set_parameters_from_dict(best_params)
            self.learning_parameters = False
            logger.info("Learnt %s" % str(self.get_parameters()))

        # standard fit call from superclass
        KernelExpLiteGaussianLowRank.fit(self, X)
コード例 #4
0
class KernelExpLiteGaussianLowRankAdaptive(KernelExpLiteGaussianLowRank):
    def __init__(
        self,
        sigma,
        lmbda,
        D,
        N,
        eta=0.1,
        cg_tol=1e-3,
        cg_maxiter=None,
        num_initial_evaluations=3,
        num_evaluations=3,
        minimum_size_learning=100,
        num_initial_evaluations_relearn=1,
        num_evaluations_relearn=1,
        param_bounds={"sigma": [-3, 3]},
    ):
        KernelExpLiteGaussianLowRank.__init__(self, sigma, lmbda, D, N, eta, cg_tol, cg_maxiter)

        self.bo = None
        self.param_bounds = param_bounds
        self.num_initial_evaluations = num_initial_evaluations
        self.num_iter = num_evaluations
        self.minimum_size_learning = minimum_size_learning

        self.n_initial_relearn = num_initial_evaluations_relearn
        self.n_iter_relearn = num_evaluations_relearn

        self.learning_parameters = False

    def fit(self, X):
        # avoid infinite recursion from x-validation fit call
        if not self.learning_parameters and len(X) >= self.minimum_size_learning:
            self.learning_parameters = True
            if self.bo is None:
                logger.info("Bayesian optimisation from scratch.")
                self.bo = BayesOptSearch(self, X, self.param_bounds, n_initial=self.num_initial_evaluations)
                best_params = self.bo.optimize(self.num_iter)
            else:
                logger.info("Bayesian optimisation using prior model.")
                self.bo.re_initialise(X, self.n_initial_relearn)
                best_params = self.bo.optimize(self.n_iter_relearn)

            self.set_parameters_from_dict(best_params)
            self.learning_parameters = False
            logger.info("Learnt %s" % str(self.get_parameters()))

        # standard fit call from superclass
        KernelExpLiteGaussianLowRank.fit(self, X)
コード例 #5
0
 def fit(self, X):
     # avoid infinite recursion from x-validation fit call
     if not self.learning_parameters and len(X) >= self.minimum_size_learning:
         self.learning_parameters = True
         if self.bo is None:
             logger.info("Bayesian optimisation from scratch.")
             self.bo = BayesOptSearch(self, X, self.param_bounds, num_initial_evaluations=self.num_initial_evaluations)
             best_params = self.bo.optimize(self.num_iter)
         else:
             logger.info("Bayesian optimisation using prior model.")
             self.bo.re_initialise(X, self.n_initial_relearn)
             best_params = self.bo.optimize(self.n_iter_relearn)
         
         self.set_parameters_from_dict(best_params)
         self.learning_parameters = False
         logger.info("Learnt %s" % str(self.get_parameters()))
     
     # standard fit call from superclass
     KernelExpStein.fit(self, X)
コード例 #6
0
    def fit(self, X):
        # avoid infinite recursion from x-validation fit call
        if not self.learning_parameters and len(X) >= self.minimum_size_learning:
            self.learning_parameters = True
            if self.bo is None:
                logger.info("Bayesian optimisation from scratch.")
                self.bo = BayesOptSearch(self, X, self.param_bounds, n_initial=self.num_initial_evaluations)
                best_params = self.bo.optimize(self.num_iter)
            else:
                logger.info("Bayesian optimisation using prior model.")
                self.bo.re_initialise(X, self.n_initial_relearn)
                best_params = self.bo.optimize(self.n_iter_relearn)

            self.set_parameters_from_dict(best_params)
            self.learning_parameters = False
            logger.info("Learnt %s" % str(self.get_parameters()))

        # standard fit call from superclass
        KernelExpLiteGaussianLowRank.fit(self, X)
    for est in estimators:
        print(est.__class__.__name__)

        est.fit(X)

        # specify bounds of parameters to search for
        param_bounds = {
            #             'lmbda': [-5,0], # fixed lmbda, uncomment to include in search
            'sigma': [-2, 3],
        }

        # oop interface for optimising and using results
        # objective is not put through log here, if it is, might want to bound away from zero
        bo = BayesOptSearch(est,
                            X,
                            param_bounds,
                            objective_log=False,
                            objective_log_bound=100,
                            num_initial_evaluations=5)

        # optimisation starts here, use results and apply to model
        best_params = bo.optimize(num_iter=5)
        est.set_parameters_from_dict(best_params)
        est.fit(X)

        visualise_fit_2d(est, X)
        plt.suptitle("Original fit %s\nOptimised over: %s" %
                     (str(est.get_parameters()), str(param_bounds)))
        if len(param_bounds) == 1:
            plt.figure()
            plot_bayesopt_model_1d(bo)
            plt.title("Objective")
コード例 #8
0
    visualise_trace(samples,
                    log_pdf,
                    accepted,
                    log_pdf_density=surrogate,
                    step_sizes=step_sizes)
    plt.suptitle("KMC lite %s, acceptance rate: %.2f" % \
                 (surrogate.__class__.__name__, np.mean(accepted)))

    # now initialise KMC finite with the samples from the surrogate, and run for more
    # learn parameters before starting
    thinned = samples[np.random.permutation(len(samples))[:N]]
    surrogate2 = KernelExpFiniteGaussian(sigma=2, lmbda=0.001, D=D, m=N)
    surrogate2.set_parameters_from_dict(
        BayesOptSearch(surrogate2, thinned, {
            'sigma': [-3, 3]
        }).optimize(3))
    surrogate2.fit(thinned)

    # now use conservative schedule, or None at all if confident in oracle samples
    schedule2 = lambda t: 0.01 if t < 3000 else 0.
    kmc2 = KMC(surrogate2, target, momentum, kmc.num_steps_min,
               kmc.num_steps_max, kmc.step_size[0], kmc.step_size[1],
               schedule2, acc_star)

    # run MCMC
    samples2, proposals2, accepted2, acc_prob2, log_pdf2, times2, step_sizes = mini_mcmc(
        kmc2, start, num_iter, D)
    visualise_trace(samples2,
                    log_pdf2,
                    accepted2,
コード例 #9
0
               ]
 
 for est in estimators:
     print(est.__class__.__name__)
     
     est.fit(X)
     
     # specify bounds of parameters to search for
     param_bounds = {
 #             'lmbda': [-5,0], # fixed lmbda, uncomment to include in search
             'sigma': [-2,3],
               }
     
     # oop interface for optimising and using results
     # objective is not put through log here, if it is, might want to bound away from zero
     bo = BayesOptSearch(est, X, param_bounds, objective_log=False, objective_log_bound=100,
                         num_initial_evaluations=5)
     
     # optimisation starts here, use results and apply to model
     best_params = bo.optimize(num_iter=5)
     est.set_parameters_from_dict(best_params)
     est.fit(X)
     
     visualise_fit_2d(est, X)
     plt.suptitle("Original fit %s\nOptimised over: %s" % 
              (str(est.get_parameters()), str(param_bounds)))
     if len(param_bounds) == 1:
         plt.figure()
         plot_bayesopt_model_1d(bo)
         plt.title("Objective")
     
     # now change data, with different length scale