Example #1
0
def main():
    distribution = Banana()
    #     distribution = Flower(amplitude=6, frequency=6, variance=1, radius=10, dimension=8)
    #     Visualise.visualise_distribution(distribution)
    show()
    #
    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    mcmc_sampler = KameleonWindowLearnScale(distribution,
                                            kernel,
                                            stop_adapt=inf)

    start = asarray([0, -5.])
    mcmc_params = MCMCParams(start=start, num_iterations=30000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(
        PlottingOutput(distribution,
                       plot_from=3000,
                       colour_by_likelihood=False,
                       num_samples_plot=0))
    chain.append_mcmc_output(StatisticsOutput(plot_times=False))
    chain.run()

    print distribution.emp_quantiles(chain.samples[10000:])
Example #2
0
def main():
    dist=Ring(dimension=50)
    X=dist.sample(10000).samples
    #print X[:,2:dist.dimension]
    print dist.emp_quantiles(X)
    
    dist2=Banana(dimension=50)
    X2=dist2.sample(10000).samples
    
    print dist2.emp_quantiles(X2)
Example #3
0
def main():
    distribution = Banana(dimension=8)
    
    sigma=5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)
    
    mcmc_sampler = Kameleon(distribution, kernel, distribution.sample(100).samples)
    
    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=20000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
    chain.append_mcmc_output(StatisticsOutput(plot_times=True))
    chain.run()
Example #4
0
def main():
    distribution = Banana(dimension=8)

    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    mcmc_sampler = Kameleon(distribution, kernel,
                            distribution.sample(100).samples)

    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=20000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(plot_times=True))
    chain.run()
def main():
    distribution = Banana(dimension=8, bananicity=0.1, V=100.0)
    
    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)
    
    mcmc_sampler = KameleonWindow(distribution, kernel)
    
    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=80000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
#    chain.append_mcmc_output(PlottingOutput(distribution, plot_from=3000))
    chain.append_mcmc_output(StatisticsOutput(plot_times=True))
    chain.run()
    
    print distribution.emp_quantiles(chain.samples)
Example #6
0
def main():
    distribution = Banana(dimension=8, bananicity=0.1, V=100.0)

    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    mcmc_sampler = KameleonWindow(distribution, kernel)

    start = zeros(distribution.dimension)
    mcmc_params = MCMCParams(start=start, num_iterations=80000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    #    chain.append_mcmc_output(PlottingOutput(distribution, plot_from=3000))
    chain.append_mcmc_output(StatisticsOutput(plot_times=True))
    chain.run()

    print distribution.emp_quantiles(chain.samples)
def main():
    distribution = Banana()
#     distribution = Flower(amplitude=6, frequency=6, variance=1, radius=10, dimension=8)
#     Visualise.visualise_distribution(distribution)
    show()
#    
    sigma = 5
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)
    
    mcmc_sampler = KameleonWindowLearnScale(distribution, kernel, stop_adapt=inf)
    
    start = asarray([0,-5.])
    mcmc_params = MCMCParams(start=start, num_iterations=30000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
    chain.append_mcmc_output(PlottingOutput(distribution, plot_from=3000, colour_by_likelihood=False, num_samples_plot=0))
    chain.append_mcmc_output(StatisticsOutput(plot_times=False))
    chain.run()
    
    print distribution.emp_quantiles(chain.samples[10000:])
def main():
    distribution = Banana(dimension=2, bananicity=0.03, V=100.0)

    mcmc_sampler = StandardMetropolis(distribution)

    start = zeros(distribution.dimension)
    start = asarray([0., -2.])
    mcmc_params = MCMCParams(start=start, num_iterations=10000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)

    chain.append_mcmc_output(StatisticsOutput(plot_times=True, lag=1000))
    #     chain.append_mcmc_output(PlottingOutput(distribution, plot_from=1, num_samples_plot=0,
    #                                             colour_by_likelihood=False))

    chain.run()
    f = open("std_metropolis_chain_gaussian.bin", 'w')
    dump(chain, f)
    f.close()
Example #9
0
from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel
from kameleon_mcmc.mcmc.MCMCChain import MCMCChain
from kameleon_mcmc.mcmc.MCMCParams import MCMCParams
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolis import AdaptiveMetropolis
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import \
    KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis

if __name__ == '__main__':
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(
        os.sep)[-1].split(".")[0] + os.sep

    distribution = Banana(dimension=8, bananicity=0.03, V=100)
    sigma = GaussianKernel.get_sigma_median_heuristic(
        distribution.sample(1000).samples)
    sigma = 10
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)

    burnin = 20000
    num_iterations = 40000

    mcmc_sampler = KameleonWindowLearnScale(distribution,
                                            kernel,
                                            stop_adapt=burnin)
    mean_est = zeros(distribution.dimension, dtype="float64")
    cov_est = 1.0 * eye(distribution.dimension)
    cov_est[0, 0] = distribution.V
from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel
from kameleon_mcmc.mcmc.MCMCChain import MCMCChain
from kameleon_mcmc.mcmc.MCMCParams import MCMCParams
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolis import AdaptiveMetropolis
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import \
    KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis


if __name__ == '__main__':
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
    
    distribution = Banana(dimension=8, bananicity=0.1, V=100)
    sigma = GaussianKernel.get_sigma_median_heuristic(distribution.sample(1000).samples)
    sigma = 10
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)
    
    
    burnin = 40000
    num_iterations = 80000
    
    #mcmc_sampler = KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin)
    mean_est = zeros(distribution.dimension, dtype="float64")
    cov_est = 1.0 * eye(distribution.dimension)
    cov_est[0, 0] = distribution.V
    #mcmc_sampler = AdaptiveMetropolisLearnScale(distribution, mean_est=mean_est, cov_est=cov_est)
    #mcmc_sampler = AdaptiveMetropolis(distribution, mean_est=mean_est, cov_est=cov_est)
Example #11
0
from matplotlib.pyplot import axis, savefig
from numpy import linspace

from kameleon_mcmc.distribution.Banana import Banana
from kameleon_mcmc.distribution.Flower import Flower
from kameleon_mcmc.distribution.Ring import Ring
from kameleon_mcmc.tools.Visualise import Visualise

if __name__ == '__main__':
    distributions = [Ring(), Banana(), Flower()]
    for d in distributions:
        Xs, Ys = d.get_plotting_bounds()
        resolution = 250
        Xs = linspace(Xs[0], Xs[1], resolution)
        Ys = linspace(Ys[0], Ys[1], resolution)

        Visualise.visualise_distribution(d, Xs=Xs, Ys=Ys)
        axis("Off")
        savefig("heatmap_" + d.__class__.__name__ + ".eps",
                bbox_inches='tight')
Example #12
0
from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel
from kameleon_mcmc.mcmc.MCMCChain import MCMCChain
from kameleon_mcmc.mcmc.MCMCParams import MCMCParams
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolis import AdaptiveMetropolis
from kameleon_mcmc.mcmc.samplers.AdaptiveMetropolisLearnScale import \
    AdaptiveMetropolisLearnScale
from kameleon_mcmc.mcmc.samplers.KameleonWindowLearnScale import \
    KameleonWindowLearnScale
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis


if __name__ == '__main__':
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(os.sep)[-1].split(".")[0] + os.sep
    
    distribution = Banana(dimension=8, bananicity=0.03, V=100)
    sigma = GaussianKernel.get_sigma_median_heuristic(distribution.sample(1000).samples)
    sigma = 10
    print "using sigma", sigma
    kernel = GaussianKernel(sigma=sigma)
    
    
    burnin = 20000
    num_iterations = 40000
    
    #mcmc_sampler = KameleonWindowLearnScale(distribution, kernel, stop_adapt=burnin)
    mean_est = zeros(distribution.dimension, dtype="float64")
    cov_est = 1.0 * eye(distribution.dimension)
    cov_est[0, 0] = distribution.V
    #mcmc_sampler = AdaptiveMetropolisLearnScale(distribution, mean_est=mean_est, cov_est=cov_est)
    #mcmc_sampler = AdaptiveMetropolis(distribution, mean_est=mean_est, cov_est=cov_est)
Example #13
0
import sys

from kameleon_mcmc.distribution.Banana import Banana
from kameleon_mcmc.experiments.SingleChainExperiment import SingleChainExperiment
from kameleon_mcmc.kernel.GaussianKernel import GaussianKernel
from kameleon_mcmc.mcmc.MCMCChain import MCMCChain
from kameleon_mcmc.mcmc.MCMCParams import MCMCParams
from kameleon_mcmc.mcmc.output.StatisticsOutput import StatisticsOutput
from kameleon_mcmc.mcmc.samplers.GMMMetropolis import GMMMetropolis
from kameleon_mcmc.mcmc.samplers.StandardMetropolis import StandardMetropolis

if __name__ == '__main__':
    experiment_dir = str(os.path.abspath(sys.argv[0])).split(
        os.sep)[-1].split(".")[0] + os.sep

    distribution = Banana(dimension=8, bananicity=0.1, V=100)

    burnin = 40000
    num_iterations = 80000

    mcmc_sampler = GMMMetropolis(distribution,
                                 num_components=10,
                                 num_sample_discard=500,
                                 num_samples_gmm=1000,
                                 num_samples_when_to_switch=10000,
                                 num_runs_em=10)
    #mean_est = zeros(distribution.dimension, dtype="float64")
    #cov_est = 1.0 * eye(distribution.dimension)
    #cov_est[0, 0] = distribution.V
    #mcmc_sampler = AdaptiveMetropolisLearnScale(distribution, mean_est=mean_est, cov_est=cov_est)
    #mcmc_sampler = AdaptiveMetropolis(distribution, mean_est=mean_est, cov_est=cov_est)
Example #14
0
    def gradient(self, x, Y):
        assert (len(shape(x)) == 1)
        assert (len(shape(Y)) == 2)
        assert (len(x) == shape(Y)[1])

        if self.nu == 1.5 or self.nu == 2.5:
            x_2d = reshape(x, (1, len(x)))
            lower_order_rho = self.rho * sqrt(2 * (self.nu - 1)) / sqrt(
                2 * self.nu)
            lower_order_kernel = MaternKernel(lower_order_rho, self.nu - 1,
                                              self.sigma)
            k = lower_order_kernel.kernel(x_2d, Y)
            differences = Y - x
            G = (1.0 / lower_order_rho**2) * (k.T * differences)
            return G
        else:
            raise NotImplementedError()


if __name__ == '__main__':
    distribution = Banana()
    Z = distribution.sample(50).samples
    Z2 = distribution.sample(50).samples
    kernel = MaternKernel(5.0, nu=1.5, sigma=2.0)
    K = kernel.kernel(Z, Z2)
    imshow(K, interpolation="nearest")
    #G = kernel.gradient(Z[0],Z2)
    #print G
    show()
Example #15
0
        else:
            raise NotImplementedError()
        return K
    
    def gradient(self, x, Y):
        assert(len(shape(x))==1)
        assert(len(shape(Y))==2)
        assert(len(x)==shape(Y)[1])
        
        if self.nu==1.5 or self.nu==2.5:
            x_2d=reshape(x, (1, len(x)))
            lower_order_rho = self.rho * sqrt(2*(self.nu-1)) / sqrt(2*self.nu)
            lower_order_kernel = MaternKernel(lower_order_rho,self.nu-1,self.sigma)
            k = lower_order_kernel.kernel(x_2d, Y)
            differences = Y - x
            G = ( 1.0 / lower_order_rho ** 2 ) * (k.T * differences)
            return G
        else:
            raise NotImplementedError()
    
if __name__ == '__main__':
    distribution = Banana()
    Z = distribution.sample(50).samples
    Z2 = distribution.sample(50).samples
    kernel = MaternKernel(5.0, nu=1.5, sigma=2.0)
    K = kernel.kernel(Z, Z2)
    imshow(K, interpolation="nearest")
    #G = kernel.gradient(Z[0],Z2)
    #print G
    show()