Exemplo n.º 1
0
 def get_OracleKernelAdaptiveLangevin_instance(D, target_log_pdf):
     step_size = 1.
     m = 500
     N = 5000
     Z = sample_banana(N, D, bananicity, V)
     
     surrogate = KernelExpFiniteGaussian(sigma=10, lmbda=.001, m=m, D=D)
     surrogate.fit(Z)
     
     if False:
         param_bounds = {'sigma': [-2, 3]}
         bo = BayesOptSearch(surrogate, Z, param_bounds)
         best_params = bo.optimize()
         surrogate.set_parameters_from_dict(best_params)
     
     if False:
         sigma = 1. / gamma_median_heuristic(Z)
         surrogate.set_parameters_from_dict({'sigma': sigma})
     
     logger.info("kernel exp family uses %s" % surrogate.get_parameters())
 
     if False:
         import matplotlib.pyplot as plt
         Xs = np.linspace(-30, 30, 50)
         Ys = np.linspace(-20, 40, 50)
         visualise_fit_2d(surrogate, Z, Xs, Ys)
         plt.show()
         
     instance = OracleKernelAdaptiveLangevin(D, target_log_pdf, surrogate, step_size)
     
     return instance
Exemplo n.º 2
0
    def get_OracleKernelAdaptiveLangevin_instance(D, target_log_pdf):
        step_size = 1.
        m = 500
        N = 5000
        Z = sample_banana(N, D, bananicity, V)

        surrogate = KernelExpFiniteGaussian(sigma=10, lmbda=.001, m=m, D=D)
        surrogate.fit(Z)

        if False:
            param_bounds = {'sigma': [-2, 3]}
            bo = BayesOptSearch(surrogate, Z, param_bounds)
            best_params = bo.optimize()
            surrogate.set_parameters_from_dict(best_params)

        if False:
            sigma = 1. / gamma_median_heuristic(Z)
            surrogate.set_parameters_from_dict({'sigma': sigma})

        logger.info("kernel exp family uses %s" % surrogate.get_parameters())

        if False:
            import matplotlib.pyplot as plt
            Xs = np.linspace(-30, 30, 50)
            Ys = np.linspace(-20, 40, 50)
            visualise_fit_2d(surrogate, Z, Xs, Ys)
            plt.show()

        instance = OracleKernelAdaptiveLangevin(D, target_log_pdf, surrogate,
                                                step_size)

        return instance
Exemplo n.º 3
0
        kmc, start, num_iter, D)

    visualise_trace(samples,
                    log_pdf,
                    accepted,
                    log_pdf_density=surrogate,
                    step_sizes=step_sizes)
    plt.suptitle("KMC lite %s, acceptance rate: %.2f" % \
                 (surrogate.__class__.__name__, np.mean(accepted)))

    # now initialise KMC finite with the samples from the surrogate, and run for more
    # learn parameters before starting
    thinned = samples[np.random.permutation(len(samples))[:N]]
    surrogate2 = KernelExpFiniteGaussian(sigma=2, lmbda=0.001, D=D, m=N)
    surrogate2.set_parameters_from_dict(
        BayesOptSearch(surrogate2, thinned, {
            'sigma': [-3, 3]
        }).optimize(3))
    surrogate2.fit(thinned)

    # now use conservative schedule, or None at all if confident in oracle samples
    schedule2 = lambda t: 0.01 if t < 3000 else 0.
    kmc2 = KMC(surrogate2, target, momentum, kmc.num_steps_min,
               kmc.num_steps_max, kmc.step_size[0], kmc.step_size[1],
               schedule2, acc_star)

    # run MCMC
    samples2, proposals2, accepted2, acc_prob2, log_pdf2, times2, step_sizes = mini_mcmc(
        kmc2, start, num_iter, D)
    visualise_trace(samples2,
                    log_pdf2,
                    accepted2,
    # set to around 5000-10000 iterations to have KMC lite explored all of the support
    start = np.zeros(D)
    start[1] = -3
    num_iter = 500
    
    # run MCMC
    samples, proposals, accepted, acc_prob, log_pdf, times, step_sizes = mini_mcmc(kmc, start, num_iter, D)
    
    visualise_trace(samples, log_pdf, accepted, log_pdf_density=surrogate, step_sizes=step_sizes)
    plt.suptitle("KMC lite %s, acceptance rate: %.2f" % \
                 (surrogate.__class__.__name__, np.mean(accepted)))
    
    # now initialise KMC finite with the samples from the surrogate, and run for more
    # learn parameters before starting
    thinned = samples[np.random.permutation(len(samples))[:N]]
    surrogate2 = KernelExpFiniteGaussian(sigma=2, lmbda=0.001, D=D, m=N)
    surrogate2.set_parameters_from_dict(BayesOptSearch(surrogate2, thinned, {'sigma': [-3,3]}).optimize(3))
    surrogate2.fit(thinned)
    
    # now use conservative schedule, or None at all if confident in oracle samples
    schedule2 = lambda t: 0.01 if t < 3000 else 0.
    kmc2 = KMC(surrogate2, target,
              momentum, kmc.num_steps_min, kmc.num_steps_max, kmc.step_size[0], kmc.step_size[1],
              schedule2, acc_star)

    # run MCMC
    samples2, proposals2, accepted2, acc_prob2, log_pdf2, times2, step_sizes = mini_mcmc(kmc2, start, num_iter, D)
    visualise_trace(samples2, log_pdf2, accepted2, log_pdf_density=surrogate2, step_sizes=step_sizes)
    plt.suptitle("KMC finite, %s, acceptance rate: %.2f" % \
                 (surrogate.__class__.__name__, np.mean(accepted2)))
    plt.show()