pi = get_model(args, target=target).to(args.device) p = StandardNormal((target.size, )).to(args.device) model_id = get_model_id(args) state_dict = torch.load(path_check) pi.load_state_dict(state_dict) ############## ## Sampling ## ############## print('Sampling...') pi = pi.eval() with torch.no_grad(): z = p.sample(eval_args.num_samples) for t in pi.transforms: z, _ = t(z) theta = z imgs = target.vec2img(theta).cpu().float().unsqueeze(1) ############ ## Sample ## ############ path_samples = os.path.join(exp_path, 'samples.png') vutils.save_image(imgs, fp=path_samples, nrow=eval_args.nrow) data_true = (target.img.unsqueeze(0).unsqueeze(0) + 1) / 2 data_corr = (target.img_corrupted.unsqueeze(0).unsqueeze(0) + 1) / 2 vutils.save_image(data_true,
############## ## Sampling ## ############## if args.num_dims == 2: print('Sampling...') # Make dir if not os.path.exists('figures'): os.mkdir('figures') # Learned distribution z = p.sample(num_samples=args.num_samples) for t in pi.transforms: z, _ = t(z) theta = z.detach().numpy() plt.figure(figsize=(args.pixels/args.dpi, args.pixels/args.dpi), dpi=args.dpi) if args.num_bits is not None: plt.hist2d(theta[:,0], theta[:,1], bins=list(range(2**args.num_bits+1)), density=True) else: plt.hist2d(theta[:,0], theta[:,1], bins=100, density=True) if args.minimal: plt.axis('off') else: plt.title('Learned Distribution') plt.colorbar() if args.num_bits is not None: plt.xticks(list(range(2**args.num_bits))) plt.yticks(list(range(2**args.num_bits)))