load_from = save_to logposterior = lambda aa: model.logposterior_func2(x=x, z=aa) print('optimiznig local', samp_i) z = optimize_local_expressive_only_sample(logposterior, model, x, save_to=save_to, load_from=load_from) z = z.view(-1, z_size) z = z.data.cpu().numpy() # print (z) plot_kde(ax, samps=z, xlimits=xlimits, ylimits=ylimits, cmap='Reds') # #Plot prob # col +=1 # ax = plt.subplot2grid((rows,cols), (samp_i,col), frameon=False) # Ws, logpW, logqW = model.sample_W() #_ , [1], [1] # func = lambda zs: log_bernoulli(model.decode(Ws, Variable(torch.unsqueeze(zs,1))), Variable(torch.unsqueeze(samp,0)))+ Variable(torch.unsqueeze(lognormal4(torch.Tensor(zs), torch.zeros(2), torch.zeros(2)), 1)) # plot_isocontours2_exp(ax, func, cmap='Greens', legend=legend) # if samp_i==0: ax.annotate('p(z,x|W2)', xytext=(.1, 1.1), xy=(0, 1), textcoords='axes fraction') # func = lambda zs: lognormal4(torch.Tensor(zs), torch.zeros(2), torch.zeros(2)) # plot_isocontours(ax, func, cmap='Blues', alpha=.3) # func = lambda zs: lognormal4(torch.Tensor(zs), torch.squeeze(mean.data), torch.squeeze(logvar.data)) # plot_isocontours(ax, func, cmap='Reds') # #Plot prob # col +=1
x = x.view(1,784) save_to = this_dir+'/local_params'+str(samp_i)+'.pt' load_from = save_to logposterior = lambda aa: model.logposterior_func2(x=x,z=aa) print ('optimiznig local', samp_i) z = optimize_local_expressive_only_sample(logposterior, model, x, save_to=save_to, load_from=load_from) z = z.view(-1,z_size) z = z.data.cpu().numpy() # print (z) plot_kde(ax,samps=z,xlimits=xlimits,ylimits=ylimits,cmap='Reds') # #Plot prob # col +=1 # ax = plt.subplot2grid((rows,cols), (samp_i,col), frameon=False) # Ws, logpW, logqW = model.sample_W() #_ , [1], [1] # func = lambda zs: log_bernoulli(model.decode(Ws, Variable(torch.unsqueeze(zs,1))), Variable(torch.unsqueeze(samp,0)))+ Variable(torch.unsqueeze(lognormal4(torch.Tensor(zs), torch.zeros(2), torch.zeros(2)), 1)) # plot_isocontours2_exp(ax, func, cmap='Greens', legend=legend)
print '\nPosterior', p_i, posterior_names[p_i] posterior = ttp.posterior_class(posteriors[p_i]) ax = plt.subplot2grid((rows,columns), (p_i,0), frameon=False)#, colspan=3) plot_isocontours(ax, posterior.run_log_post, cmap='Blues') if p_i == 0: ax.annotate('Posterior', xytext=(.3, 1.1), xy=(0, 1), textcoords='axes fraction') for q_i in range(len(models)): print model_names[q_i] ax = plt.subplot2grid((rows,columns), (p_i,q_i+1), frameon=False)#, colspan=3) model = models[q_i](posteriors[p_i]) # model.train(10000, save_to=home+'/Documents/tmp/vars.ckpt') model.train(9999000, save_to='') samps = model.sample(1000) plot_kde(ax, samps, cmap='Reds') plot_isocontours(ax, posterior.run_log_post, cmap='Blues', alpha=alpha) if p_i == 0: ax.annotate(model_names[q_i], xytext=(.38, 1.1), xy=(0, 1), textcoords='axes fraction') # plt.show() plt.savefig(home+'/Documents/tmp/plots.png') print 'saved'
load_from = save_to logposterior = lambda aa: model.logposterior_func2(x=x, z=aa) print('optimiznig local flow', samp_i) # z = optimize_local_expressive_only_sample(logposterior, model, x, save_to=save_to, load_from=load_from) # z = optimize_local_expressive_only_sample_2(logposterior, model, x) z = optimize_local_expressive_only_sample(logposterior, model, x) z = z.view(-1, z_size) z = z.data.cpu().numpy() # plot_kde(ax,samps=z,xlimits=xlimits,ylimits=ylimits,cmap='Reds') plot_kde(ax, samps=z, xlimits=xlimits, ylimits=ylimits, cmap='Blues') # flow = optimize_local_flow1(logposterior, model, x) # func = lambda zs: flow.logprob(torch.Tensor(zs).cuda()) # _ = plot_isocontours_new(ax, func, cmap='Blues',xlimits=xlimits,ylimits=ylimits, legend=False, levels=[]) # #Plot prob # col +=1 # ax = plt.subplot2grid((rows,cols), (samp_i,col), frameon=False) # Ws, logpW, logqW = model.sample_W() #_ , [1], [1] # func = lambda zs: log_bernoulli(model.decode(Ws, Variable(torch.unsqueeze(zs,1))), Variable(torch.unsqueeze(samp,0)))+ Variable(torch.unsqueeze(lognormal4(torch.Tensor(zs), torch.zeros(2), torch.zeros(2)), 1)) # plot_isocontours2_exp(ax, func, cmap='Greens', legend=legend) # if samp_i==0: ax.annotate('p(z,x|W2)', xytext=(.1, 1.1), xy=(0, 1), textcoords='axes fraction') # func = lambda zs: lognormal4(torch.Tensor(zs), torch.zeros(2), torch.zeros(2))
load_from = home + '/Documents/tmp/' + posterior_names[ p_i] + '_' + model_names[q_i] + '.ckpt' else: load_from = '' print model_names[q_i] ax = plt.subplot2grid((rows, columns), (q_i + 1, p_i + 1), frameon=False) #, colspan=3) model = models[q_i](posteriors[p_i]) if train_: model.train(100000, save_to=save_to, load_from=load_from) model.load_params(load_from) samps = model.sample(100) plot_kde(ax, samps, cmap='Blues') plot_isocontours(ax, posterior.run_log_post, cmap='Greys', alpha=alpha) # if p_i == 0: ax.annotate(model_names[q_i], xytext=(.38, 1.1), xy=(0, 1), textcoords='axes fraction') # plt.show() plt.savefig(home + '/Documents/tmp/plots.png') print 'saved' plt.savefig(home + '/Documents/tmp/plots.eps') print 'saved' plt.savefig(home + '/Documents/tmp/plots.pdf') print 'saved' # if __name__ == '__main__':