# possible to change
    D = 2
    N = 1000
    
    # target is banana density, fallback to Gaussian if theano is not present
    if banana_available:
        target = Banana(D=D)
    else:
        target = IsotropicZeroMeanGaussian(D=D)

    samplers = [
                StandardMetropolis(target, D),
                AdaptiveMetropolis(target, D),
                KernelAdaptiveMetropolis(target, D, N=200)
                
                ]

    for sampler in samplers:
        # MCMC parameters, feel free to increase number of iterations
        start = np.zeros(D)
        num_iter = 1000
        
        # run MCMC
        samples, proposals, accepted, acc_prob, log_pdf, times, step_sizes = mini_mcmc(sampler, start, num_iter, D)
        
        visualise_trace(samples, log_pdf, accepted, step_sizes)
        plt.suptitle("%s, acceptance rate: %.2f" % \
                     (sampler.__class__.__name__, np.mean(accepted)))
        
    plt.show()
Exemple #2
0
    # kmc sampler instance, schedule here also controls updating the surrogate
    # this is a very liberate schedule, i.e. constant adaptation
    # necessary if KMC is not initialised with oracle samples
    schedule = lambda t: 0.001
    acc_star = 0.7
    kmc = KMC(surrogate, target, momentum, num_steps_min, num_steps_max,
              step_size_min, step_size_max, schedule, acc_star)

    # MCMC parameters
    # set to around 5000-10000 iterations to have KMC lite explored all of the support
    start = np.zeros(D)
    start[1] = -3
    num_iter = 500

    # run MCMC
    samples, proposals, accepted, acc_prob, log_pdf, times, step_sizes = mini_mcmc(
        kmc, start, num_iter, D)

    visualise_trace(samples,
                    log_pdf,
                    accepted,
                    log_pdf_density=surrogate,
                    step_sizes=step_sizes)
    plt.suptitle("KMC lite %s, acceptance rate: %.2f" % \
                 (surrogate.__class__.__name__, np.mean(accepted)))

    # now initialise KMC finite with the samples from the surrogate, and run for more
    # learn parameters before starting
    thinned = samples[np.random.permutation(len(samples))[:N]]
    surrogate2 = KernelExpFiniteGaussian(sigma=2, lmbda=0.001, D=D, m=N)
    surrogate2.set_parameters_from_dict(
        BayesOptSearch(surrogate2, thinned, {
    if glass_available:
        target = GlassPosterior()
        target.set_up()
    else:
        target = IsotropicZeroMeanGaussian(D=D)

    # transition kernel, pick any
    samplers = [
                get_am_instance(target),
                get_mh_instance(target),
                get_kam_instance(target),
                get_kmc_instance(target)
               ]
    
    for sampler in samplers:
        
        # MCMC parameters
        # small number of iterations here to keep runtime short, feel free to increase
        start = np.zeros(D)
        num_iter = 50
        
        # run MCMC
        samples, proposals, accepted, acc_prob, log_pdf, times, step_sizes = mini_mcmc(sampler, start, num_iter, D)
        
        visualise_trace(samples, log_pdf, accepted, step_sizes, idx0=1, idx1=6)
        
        plt.suptitle("%s on %s, acceptance rate: %.2f" % \
                     (sampler.__class__.__name__, target.__class__.__name__, np.mean(accepted)))
        
    plt.show()
 # this is a very liberate schedule, i.e. constant adaptation
 # necessary if KMC is not initialised with oracle samples
 schedule = lambda t: 0.001
 acc_star = 0.7
 kmc = KMC(surrogate, target,
           momentum, num_steps_min, num_steps_max, step_size_min, step_size_max,
           schedule, acc_star)
 
 # MCMC parameters
 # set to around 5000-10000 iterations to have KMC lite explored all of the support
 start = np.zeros(D)
 start[1] = -3
 num_iter = 500
 
 # run MCMC
 samples, proposals, accepted, acc_prob, log_pdf, times, step_sizes = mini_mcmc(kmc, start, num_iter, D)
 
 visualise_trace(samples, log_pdf, accepted, log_pdf_density=surrogate, step_sizes=step_sizes)
 plt.suptitle("KMC lite %s, acceptance rate: %.2f" % \
              (surrogate.__class__.__name__, np.mean(accepted)))
 
 # now initialise KMC finite with the samples from the surrogate, and run for more
 # learn parameters before starting
 thinned = samples[np.random.permutation(len(samples))[:N]]
 surrogate2 = KernelExpFiniteGaussian(sigma=2, lmbda=0.001, D=D, m=N)
 surrogate2.set_parameters_from_dict(BayesOptSearch(surrogate2, thinned, {'sigma': [-3,3]}).optimize(3))
 surrogate2.fit(thinned)
 
 # now use conservative schedule, or None at all if confident in oracle samples
 schedule2 = lambda t: 0.01 if t < 3000 else 0.
 kmc2 = KMC(surrogate2, target,