def plot(self, y=array([[-2, -2]]), gradient_scale=None, plot_data=False): # where to evaluate G? G = zeros((len(self.Ys), len(self.Xs))) # for plotting the gradient field, each U and V are one dimension of gradient if gradient_scale is not None: GXs2 = linspace(self.Xs.min(), self.Xs.max(), 30) GYs2 = linspace(self.Ys.min(), self.Ys.max(), 20) X, Y = meshgrid(GXs2, GYs2) U = zeros(shape(X)) V = zeros(shape(Y)) # evaluate g at a set of points in Xs and Ys for i in range(len(self.Xs)): # print i, "/", len(self.Xs) for j in range(len(self.Ys)): x_2d = array([[self.Xs[i], self.Ys[j]]]) y_2d = reshape(y, (1, len(y))) G[j, i] = self.compute(x_2d, y_2d, self.Z, self.beta) # gradient at lower resolution if gradient_scale is not None: for i in range(len(GXs2)): # print i, "/", len(GXs2) for j in range(len(GYs2)): x_1d = array([GXs2[i], GYs2[j]]) y_2d = reshape(y, (1, len(y))) G_grad = self.compute_gradient(x_1d, y_2d, self.Z, self.beta) U[j, i] = -G_grad[0, 0] V[j, i] = -G_grad[0, 1] # plot g and Z points and y y_2d = reshape(y, (1, len(y))) Visualise.plot_array(self.Xs, self.Ys, G) if gradient_scale is not None: hold(True) quiver(X, Y, U, V, color='y', scale=gradient_scale) hold(False) if plot_data: hold(True) Visualise.plot_data(self.Z, y_2d) hold(False)
def update(self, mcmc_chain, step_output): if mcmc_chain.iteration > self.plot_from and mcmc_chain.iteration%self.lag==0: if mcmc_chain.mcmc_sampler.distribution.dimension==2: subplot(2, 3, 1) if self.distribution is not None: Visualise.plot_array(self.Xs, self.Ys, self.P) # only plot a number of random samples otherwise this is too slow if self.num_samples_plot>0: num_plot=min(mcmc_chain.iteration-1,self.num_samples_plot) indices=permutation(mcmc_chain.iteration)[:num_plot] else: num_plot=mcmc_chain.iteration-1 indices=arange(num_plot) samples=mcmc_chain.samples[0:mcmc_chain.iteration] samples_to_plot=mcmc_chain.samples[indices] # still plot all likelihoods likelihoods=mcmc_chain.log_liks[0:mcmc_chain.iteration] likelihoods_to_plot=mcmc_chain.log_liks[indices] proposal_1d=step_output.proposal_object.samples[0,:] y = samples[len(samples) - 1] # plot samples, coloured by likelihood, or just connect if self.colour_by_likelihood: likelihoods_to_plot=likelihoods_to_plot.copy() likelihoods_to_plot=likelihoods_to_plot-likelihoods_to_plot.min() likelihoods_to_plot=likelihoods_to_plot/likelihoods_to_plot.max() cm=get_cmap("jet") for i in range(len(samples_to_plot)): color = cm(likelihoods_to_plot[i]) plot(samples_to_plot[i,0], samples_to_plot[i,1] ,"o", color=color, zorder=1) else: plot(samples_to_plot[:,0], samples_to_plot[:,1], "m", zorder=1) plot(y[0], y[1], 'r*', markersize=15.0) plot(proposal_1d[0], proposal_1d[1], 'y*', markersize=15.0) if self.distribution is not None: Visualise.contour_plot_density(mcmc_chain.mcmc_sampler.Q, self.Xs, \ self.Ys, log_domain=False) else: Visualise.contour_plot_density(mcmc_chain.mcmc_sampler.Q) # axis('equal') xlabel("$x_1$") ylabel("$x_2$") if self.num_samples_plot>0: title(str(self.num_samples_plot) + " random samples") subplot(2, 3, 2) plot(samples[:, 0], 'b') title("Trace $x_1$") subplot(2, 3, 3) plot(samples[:, 1], 'b') title("Trace $x_2$") subplot(2, 3, 4) plot(mcmc_chain.log_liks[0:mcmc_chain.iteration], 'b') title("Log-likelihood") if len(samples) > 2: subplot(2, 3, 5) hist(samples[:, 0]) title("Histogram $x_1$") subplot(2, 3, 6) hist(samples[:, 1]) title("Histogram $x_2$") else: # if target dimension is not two, plot traces num_plots=mcmc_chain.mcmc_sampler.distribution.dimension samples=mcmc_chain.samples[0:mcmc_chain.iteration] likelihoods=mcmc_chain.log_liks[0:mcmc_chain.iteration] num_y=round(sqrt(num_plots)) num_x=num_plots/num_y+1 for i in range(num_plots): subplot(num_y, num_x, i+1) plot(samples[:, i], 'b') title("Trace $x_" +str(i) + "$") subplot(num_y, num_x, num_plots+1) plot(likelihoods) title("Log-Likelihood") suptitle(mcmc_chain.mcmc_sampler.__class__.__name__) show() draw() clf()