def main():
    # define the MCMC target distribution
    # possible distributions are in kameleon_mcmc.distribution: Banana, Flower, Ring
#    distribution = Banana(dimension=2, bananicity=0.03, V=100.0)
    distribution = Ring()
    
    # create instance of kameleon sampler that learns the length scale
    # can be replaced by any other sampler in kameleon_mcmc.mcmc.samplers
    kernel = GaussianKernel(sigma=5)
    mcmc_sampler = KameleonWindowLearnScale(distribution, kernel, stop_adapt=inf, nu2=0.05)
    
    # mcmc chain and its parameters
    start = asarray([0,-3])
    mcmc_params = MCMCParams(start=start, num_iterations=30000)
    chain = MCMCChain(mcmc_sampler, mcmc_params)
    
    # plot every iteration and print some statistics
    chain.append_mcmc_output(PlottingOutput(distribution, plot_from=2000))
    chain.append_mcmc_output(StatisticsOutput())
    
    # run cmcm
    chain.run()
    
    # print empirical quantiles
    burnin=10000
    print distribution.emp_quantiles(chain.samples[burnin:])
    
    Visualise.visualise_distribution(distribution, chain.samples)
예제 #2
0
 def plot_proposal(self, ys):
     # evaluate density itself
     Visualise.visualise_distribution(self.distribution, Z=self.Z, Xs=self.Xs, Ys=self.Ys)
     
     # precompute constants of proposal
     mcmc_hammer = Kameleon(self.distribution, self.kernel, self.Z, \
                              self.nu2, self.gamma)
     
     # plot proposal around each y
     for y in ys:
         mu, L_R = mcmc_hammer.compute_constants(y)
         gaussian = Gaussian(mu, L_R, is_cholesky=True)
         
         hold(True)
         Visualise.contour_plot_density(gaussian)
         hold(False)
         draw()
예제 #3
0
    def plot(self, y=array([[-2, -2]]), gradient_scale=None, plot_data=False):
        
        # where to evaluate G?
        G = zeros((len(self.Ys), len(self.Xs)))
    
        # for plotting the gradient field, each U and V are one dimension of gradient
        if gradient_scale is not None:
            GXs2 = linspace(self.Xs.min(), self.Xs.max(), 30)
            GYs2 = linspace(self.Ys.min(), self.Ys.max(), 20)
            X, Y = meshgrid(GXs2, GYs2)
            U = zeros(shape(X))
            V = zeros(shape(Y))
    
        # evaluate g at a set of points in Xs and Ys
        for i in range(len(self.Xs)):
#            print i, "/", len(self.Xs)
            for j in range(len(self.Ys)):
                x_2d = array([[self.Xs[i], self.Ys[j]]])
                y_2d = reshape(y, (1, len(y)))
                G[j, i] = self.compute(x_2d, y_2d, self.Z, self.beta)
    
        # gradient at lower resolution
        if gradient_scale is not None:
            for i in range(len(GXs2)):
#                print i, "/", len(GXs2)
                for j in range(len(GYs2)):
                    x_1d = array([GXs2[i], GYs2[j]])
                    y_2d = reshape(y, (1, len(y)))
                    G_grad = self.compute_gradient(x_1d, y_2d, self.Z, self.beta)
                    U[j, i] = -G_grad[0, 0]
                    V[j, i] = -G_grad[0, 1]
    
        # plot g and Z points and y
        y_2d = reshape(y, (1, len(y)))
        Visualise.plot_array(self.Xs, self.Ys, G)
        
        if gradient_scale is not None:
            hold(True)
            quiver(X, Y, U, V, color='y', scale=gradient_scale)
            hold(False)

        if plot_data:
            hold(True)
            Visualise.plot_data(self.Z, y_2d)
            hold(False)
예제 #4
0
 def __init__(self, distribution=None, plot_from=0, lag=1, num_samples_plot=2000,
              colour_by_likelihood=True):
     ion()
     self.distribution=distribution
     self.plot_from = plot_from
     self.lag=lag
     self.num_samples_plot=num_samples_plot
     self.colour_by_likelihood=colour_by_likelihood
     
     if distribution is not None:
         self.Xs, self.Ys=Visualise.get_plotting_arrays(distribution)
예제 #5
0
from matplotlib.pyplot import axis, savefig
from numpy import linspace

from kameleon_mcmc.distribution.Banana import Banana
from kameleon_mcmc.distribution.Flower import Flower
from kameleon_mcmc.distribution.Ring import Ring
from kameleon_mcmc.tools.Visualise import Visualise

if __name__ == '__main__':
    distributions = [Ring(), Banana(), Flower()]
    for d in distributions:
        Xs, Ys = d.get_plotting_bounds()
        resolution = 250
        Xs = linspace(Xs[0], Xs[1], resolution)
        Ys = linspace(Ys[0], Ys[1], resolution)

        Visualise.visualise_distribution(d, Xs=Xs, Ys=Ys)
        axis("Off")
        savefig("heatmap_" + d.__class__.__name__ + ".eps",
                bbox_inches='tight')
예제 #6
0
from matplotlib.pyplot import axis, savefig
from numpy import linspace

from kameleon_mcmc.distribution.Banana import Banana
from kameleon_mcmc.distribution.Flower import Flower
from kameleon_mcmc.distribution.Ring import Ring
from kameleon_mcmc.tools.Visualise import Visualise


if __name__ == '__main__':
    distributions = [Ring(), Banana(), Flower()]
    for d in distributions:
        Xs, Ys = d.get_plotting_bounds()
        resolution = 250
        Xs = linspace(Xs[0], Xs[1], resolution)
        Ys = linspace(Ys[0], Ys[1], resolution)
        
        Visualise.visualise_distribution(d, Xs=Xs, Ys=Ys)
        axis("Off")
        savefig("heatmap_" + d.__class__.__name__ + ".eps", bbox_inches='tight')
예제 #7
0
    def update(self, mcmc_chain, step_output):
        if mcmc_chain.iteration > self.plot_from and mcmc_chain.iteration%self.lag==0:
            if mcmc_chain.mcmc_sampler.distribution.dimension==2:
                subplot(2, 3, 1)
                if self.distribution is not None:
                    Visualise.plot_array(self.Xs, self.Ys, self.P)
                
                # only plot a number of random samples otherwise this is too slow
                if self.num_samples_plot>0:
                    num_plot=min(mcmc_chain.iteration-1,self.num_samples_plot)
                    indices=permutation(mcmc_chain.iteration)[:num_plot]
                else:
                    num_plot=mcmc_chain.iteration-1
                    indices=arange(num_plot)
                    
                samples=mcmc_chain.samples[0:mcmc_chain.iteration]
                samples_to_plot=mcmc_chain.samples[indices]
                
                # still plot all likelihoods
                likelihoods=mcmc_chain.log_liks[0:mcmc_chain.iteration]
                likelihoods_to_plot=mcmc_chain.log_liks[indices]
                proposal_1d=step_output.proposal_object.samples[0,:]
                
                y = samples[len(samples) - 1]
                
                # plot samples, coloured by likelihood, or just connect
                if self.colour_by_likelihood:
                    likelihoods_to_plot=likelihoods_to_plot.copy()
                    likelihoods_to_plot=likelihoods_to_plot-likelihoods_to_plot.min()
                    likelihoods_to_plot=likelihoods_to_plot/likelihoods_to_plot.max()
                    
                    cm=get_cmap("jet")
                    for i in range(len(samples_to_plot)):
                        color = cm(likelihoods_to_plot[i])
                        plot(samples_to_plot[i,0], samples_to_plot[i,1]  ,"o",
                             color=color, zorder=1)
                else:
                    plot(samples_to_plot[:,0], samples_to_plot[:,1], "m", zorder=1)
                
                plot(y[0], y[1], 'r*', markersize=15.0)
                plot(proposal_1d[0], proposal_1d[1], 'y*', markersize=15.0)
                if self.distribution is not None:
                    Visualise.contour_plot_density(mcmc_chain.mcmc_sampler.Q, self.Xs, \
                                                   self.Ys, log_domain=False)
                else:
                    Visualise.contour_plot_density(mcmc_chain.mcmc_sampler.Q)
#                    axis('equal')
                
                xlabel("$x_1$")
                ylabel("$x_2$")
                if self.num_samples_plot>0:
                    title(str(self.num_samples_plot) + " random samples")
            
                subplot(2, 3, 2)
                plot(samples[:, 0], 'b')
                title("Trace $x_1$")
                
                subplot(2, 3, 3)
                plot(samples[:, 1], 'b')
                title("Trace $x_2$")
                
                subplot(2, 3, 4)
                plot(mcmc_chain.log_liks[0:mcmc_chain.iteration], 'b')
                title("Log-likelihood")
                
                if len(samples) > 2:
                    subplot(2, 3, 5)
                    hist(samples[:, 0])
                    title("Histogram $x_1$")
            
                    subplot(2, 3, 6)
                    hist(samples[:, 1])
                    title("Histogram $x_2$")
            else:
                # if target dimension is not two, plot traces
                num_plots=mcmc_chain.mcmc_sampler.distribution.dimension
                samples=mcmc_chain.samples[0:mcmc_chain.iteration]
                likelihoods=mcmc_chain.log_liks[0:mcmc_chain.iteration]
                num_y=round(sqrt(num_plots))
                num_x=num_plots/num_y+1
                for i in range(num_plots):
                    subplot(num_y, num_x, i+1)
                    plot(samples[:, i], 'b')
                    title("Trace $x_" +str(i) + "$")
                    
                subplot(num_y, num_x, num_plots+1)
                plot(likelihoods)
                title("Log-Likelihood")
                
            suptitle(mcmc_chain.mcmc_sampler.__class__.__name__)
            show()
            draw()
            clf()