Ejemplo n.º 1
0
def main():
    distribution = Banana()
    #     distribution = Flower(amplitude=6, frequency=6, variance=1, radius=10, dimension=8)
    #     Visualise.visualise_distribution(distribution)
    show()
    #
    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    mcmc_sampler = KameleonWindowLearnScale(distribution,
                                            kernel,
                                            stop_adapt=inf)

    start = asarray([0, -5.])
    mcmc_params = MCMCParams(start=start, num_iterations=30000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(
        PlottingOutput(distribution,
                       plot_from=3000,
                       colour_by_likelihood=False,
                       num_samples_plot=0))
    chain.append_mcmc_output(StatisticsOutput(plot_times=False))
    chain.run()

    print distribution.emp_quantiles(chain.samples[10000:])
Ejemplo n.º 2
0
def main():
    dist=Ring(dimension=50)
    X=dist.sample(10000).samples
    #print X[:,2:dist.dimension]
    print dist.emp_quantiles(X)
    
    dist2=Banana(dimension=50)
    X2=dist2.sample(10000).samples
    
    print dist2.emp_quantiles(X2)
Ejemplo n.º 3
0
def main():
    distribution = Banana(dimension=8)

    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    mcmc_sampler = Kameleon(distribution, kernel,
                            distribution.sample(100).samples)

    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=20000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(plot_times=True))
    chain.run()
Ejemplo n.º 4
0
def main():
    distribution = Banana(dimension=2, bananicity=0.03, V=100.0)

    mcmc_sampler = StandardMetropolis(distribution)

    start = zeros(distribution.dimension)
    start = asarray([0., -2.])
    mcmc_params = MCMCParams(start=start, num_iterations=10000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(plot_times=True, lag=1000))
    #     chain.append_mcmc_output(PlottingOutput(distribution, plot_from=1, num_samples_plot=0,
    #                                             colour_by_likelihood=False))

    chain.run()
    f = open("std_metropolis_chain_gaussian.bin", 'w')
    dump(chain, f)
    f.close()
Ejemplo n.º 5
0
def main():
    distribution = Banana(dimension=8, bananicity=0.1, V=100.0)

    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    mcmc_sampler = KameleonWindow(distribution, kernel)

    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=80000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    #    chain.append_mcmc_output(PlottingOutput(distribution, plot_from=3000))
    chain.append_mcmc_output(StatisticsOutput(plot_times=True))
    chain.run()

    print distribution.emp_quantiles(chain.samples)
Ejemplo n.º 6
0
from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel
from kameleon_mcmc.mcmc.MCMCChain import MCMCChain
from kameleon_mcmc.mcmc.MCMCParams import MCMCParams
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolis import AdaptiveMetropolis
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import \
    KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis

if __name__ == '__main__':
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(
        os.sep)[-1].split(".")[0] + os.sep

    distribution = Banana(dimension=8, bananicity=0.03, V=100)
    sigma = GaussianKernel.get_sigma_median_heuristic(
        distribution.sample(1000).samples)
    sigma = 10
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    burnin = 20000
    num_iterations = 40000

    mcmc_sampler = KameleonWindowLearnScale(distribution,
                                            kernel,
                                            stop_adapt=burnin)
    mean_est = zeros(distribution.dimension, dtype="float64")
    cov_est = 1.0 * eye(distribution.dimension)
    cov_est[0, 0] = distribution.V
Ejemplo n.º 7
0
            os.sep)[-1], "<experiment_dir_base> <number_of_experiments>"
        print "example:"
        print "python " + str(sys.argv[0]).split(
            os.sep)[-1] + " /nfs/home1/ucabhst/kameleon_experiments/ 3"
        exit()

    experiment_dir_base = str(sys.argv[1])
    n = int(str(sys.argv[2]))

    # loop over parameters here

    experiment_dir = experiment_dir_base + str(os.path.abspath(
        sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
    print "running experiments", n, "times at base", experiment_dir

    distribution = Banana(dimension=100, bananicity=0.1, V=100)
    sigma = GaussianKernel.get_sigma_median_heuristic(
        distribution.sample(1000).samples)
    sigma = 15
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    for i in range(n):

        mcmc_samplers = []

        burnin = 40000
        num_iterations = 80000

        mcmc_samplers.append(
            KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin))
Ejemplo n.º 8
0
from matplotlib.pyplot import axis, savefig
from numpy import linspace

from kameleon_mcmc.distribution.Banana import Banana
from kameleon_mcmc.distribution.Flower import Flower
from kameleon_mcmc.distribution.Ring import Ring
from kameleon_mcmc.tools.Visualise import Visualise

if __name__ == '__main__':
    distributions = [Ring(), Banana(), Flower()]
    for d in distributions:
        Xs, Ys = d.get_plotting_bounds()
        resolution = 250
        Xs = linspace(Xs[0], Xs[1], resolution)
        Ys = linspace(Ys[0], Ys[1], resolution)

        Visualise.visualise_distribution(d, Xs=Xs, Ys=Ys)
        axis("Off")
        savefig("heatmap_" + d.__class__.__name__ + ".eps",
                bbox_inches='tight')
Ejemplo n.º 9
0
    def gradient(self, x, Y):
        assert (len(shape(x)) == 1)
        assert (len(shape(Y)) == 2)
        assert (len(x) == shape(Y)[1])

        if self.nu == 1.5 or self.nu == 2.5:
            x_2d = reshape(x, (1, len(x)))
            lower_order_rho = self.rho * sqrt(2 * (self.nu - 1)) / sqrt(
                2 * self.nu)
            lower_order_kernel = MaternKernel(lower_order_rho, self.nu - 1,
                                              self.sigma)
            k = lower_order_kernel.kernel(x_2d, Y)
            differences = Y - x
            G = (1.0 / lower_order_rho**2) * (k.T * differences)
            return G
        else:
            raise NotImplementedError()


if __name__ == '__main__':
    distribution = Banana()
    Z = distribution.sample(50).samples
    Z2 = distribution.sample(50).samples
    kernel = MaternKernel(5.0, nu=1.5, sigma=2.0)
    K = kernel.kernel(Z, Z2)
    imshow(K, interpolation="nearest")
    #G = kernel.gradient(Z[0],Z2)
    #print G
    show()