def main(args): from nnest import NestedSampler def loglike(x): return multivariate_normal.logpdf(x, mean=np.zeros(args.x_dim), cov=np.eye(args.x_dim) + args.corr * (1 - np.eye(args.x_dim))) def transform(x): return 3. * x sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise)
def main(args): from nnest import NestedSampler g = GaussianMix() def loglike(z): return np.array([g(x)[0] for x in z]) def transform(x): return 10. * x sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise)
def main(args): from nnest import NestedSampler def loglike(z): return np.array([ -sum(100.0 * (x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) for x in z ]) def transform(x): return 5. * x sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise, num_test_samples=args.test_samples, test_mcmc_steps=args.test_mcmc_steps)
def main(args): from nnest import NestedSampler def loglike(z): z1 = z[:, 0] z2 = z[:, 1] return -(z1**2 + z2 - 11.)**2 - (z1 + z2**2 - 7.)**2 def transform(x): return 5. * x sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise, num_test_samples=args.test_samples, test_mcmc_steps=args.test_mcmc_steps)
def main(args): from nnest import NestedSampler from nnest.distributions import GeneralisedNormal from nnest.likelihoods import Himmelblau, Rosenbrock, Gaussian, Eggbox, GaussianShell, GaussianMix if args.base_dist == 'gen_normal': base_dist = GeneralisedNormal(torch.zeros(args.x_dim), torch.ones(args.x_dim), torch.tensor(args.beta)) else: base_dist = None if args.likelihood.lower() == 'himmelblau': like = Himmelblau(args.x_dim) transform = lambda x: 5 * x elif args.likelihood.lower() == 'rosenbrock': like = Rosenbrock(args.x_dim) transform = lambda x: 5 * x elif args.likelihood.lower() == 'gaussian': like = Gaussian(args.x_dim, args.corr, lim=3) transform = lambda x: 3 * x elif args.likelihood.lower() == 'eggbox': like = Eggbox(args.x_dim) transform = lambda x: x * 5 * np.pi elif args.likelihood.lower() == 'shell': like = GaussianShell(args.x_dim) transform = lambda x: 5 * x elif args.likelihood.lower() == 'mixture': like = GaussianMix(args.x_dim) transform = lambda x: 10 * x else: raise ValueError('Likelihood not found') log_dir = os.path.join(args.log_dir, args.likelihood) log_dir += args.log_suffix sampler = NestedSampler(like.x_dim, like, transform=transform, log_dir=log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu, base_dist=base_dist, scale=args.scale, flow=args.flow) start_time = time.time() sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, jitter=args.jitter, mcmc_num_chains=args.mcmc_num_chains, mcmc_dynamic_step_size=not args.mcmc_fixed_step_size) end_time = time.time() print('Run time %s' % datetime.timedelta(seconds=end_time - start_time))
def test_rosenbrock(): transform = lambda x: 5 * x like = Rosenbrock(2) sampler = NestedSampler(2, like, transform=transform, num_live_points=1000, hidden_dim=16, num_layers=1, num_blocks=3, num_slow=0, flow='spline') sampler.run(mcmc_num_chains=10, mcmc_dynamic_step_size=False) diff = sampler.logz + 5.80 assert np.abs(diff) <= max_evidence_error
def main(args): from nnest import NestedSampler from nnest.distributions import GeneralisedNormal def loglike(z): return np.array([ -sum(100.0 * (x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) for x in z ]) def transform(x): return 5. * x if args.base_dist == 'gen_normal': base_dist = GeneralisedNormal(torch.zeros(args.x_dim), torch.ones(args.x_dim), torch.tensor(args.beta)) else: base_dist = None sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu, base_dist=base_dist, scale=args.scale) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise, num_test_mcmc_samples=args.test_samples, test_mcmc_steps=args.test_mcmc_steps)
def main(args): from nnest import NestedSampler from nnest.distributions import GeneralisedNormal def loglike(z): z1 = z[:, 0] z2 = z[:, 1] return - (z1**2 + z2 - 11.)**2 - (z1 + z2**2 - 7.)**2 def transform(x): return 5. * x if args.base_dist == 'gen_normal': base_dist = GeneralisedNormal(torch.zeros(args.x_dim), torch.ones(args.x_dim), torch.tensor(args.beta)) else: base_dist = None sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu, base_dist=base_dist, scale=args.scale) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise, num_test_mcmc_samples=args.test_samples, test_mcmc_steps=args.test_mcmc_steps)
def main(args): from nnest import NestedSampler from nnest.distributions import GeneralisedNormal g = GaussianMix(nderived=args.num_derived) def loglike(z): return np.array([g(x)[0] for x in z]), np.array([g(x)[1] for x in z]) def transform(x): return 10. * x if args.base_dist == 'gen_normal': base_dist = GeneralisedNormal(torch.zeros(args.x_dim), torch.ones(args.x_dim), torch.tensor(args.beta)) else: base_dist = None sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, num_derived=args.num_derived, use_gpu=args.use_gpu, base_dist=base_dist, scale=args.scale) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise)
def main(args): from nnest import NestedSampler from nnest.distributions import GeneralisedNormal def loglike(x): return multivariate_normal.logpdf(x, mean=np.zeros(args.x_dim), cov=np.eye(args.x_dim) + args.corr * (1 - np.eye(args.x_dim))) def transform(x): return 3. * x if args.base_dist == 'gen_normal': base_dist = GeneralisedNormal(torch.zeros(args.x_dim), torch.ones(args.x_dim), torch.tensor(args.beta)) else: base_dist = None sampler = NestedSampler(args.x_dim, loglike, transform=transform, log_dir=args.log_dir, num_live_points=args.num_live_points, hidden_dim=args.hidden_dim, num_layers=args.num_layers, num_blocks=args.num_blocks, num_slow=args.num_slow, use_gpu=args.use_gpu, base_dist=base_dist, scale=args.scale) sampler.run(train_iters=args.train_iters, mcmc_steps=args.mcmc_steps, volume_switch=args.switch, noise=args.noise)
def run(cosmo, data, command_line): derived_param_names = data.get_mcmc_parameters(['derived']) NN_param_names = data.NN_param_names nDims = len(data.NN_param_names) nDerived = len(derived_param_names) if data.NN_arguments['sampler'].lower() == 'nested': def prior(cube): # NN uses cube -1 to 1 so convert to 0 to 1 cube = cube / 2 + 0.5 if len(cube.shape) == 1: theta = [0.0] * nDims for i, name in enumerate(data.NN_param_names): theta[i] = data.mcmc_parameters[name]['prior'] \ .map_from_unit_interval(cube[i]) return np.array([theta]) else: thetas = [] for c in cube: theta = [0.0] * nDims for i, name in enumerate(data.NN_param_names): theta[i] = data.mcmc_parameters[name]['prior'] \ .map_from_unit_interval(c[i]) thetas.append(theta) return np.array(thetas) def loglike(thetas): logls = [] for theta in thetas: try: data.check_for_slow_step(theta) except KeyError: pass for i, name in enumerate(data.NN_param_names): data.mcmc_parameters[name]['current'] = theta[i] data.update_cosmo_arguments() # Compute likelihood logl = sampler.compute_lkl(cosmo, data) if not np.isfinite(logl): print('Nan encountered in likelihood') print(data.mcmc_parameters) logls.append(logl) logls = np.array(logls) return logls nn = NestedSampler(data.NN_arguments['x_dim'], loglike, transform=prior, append_run_num=False, hidden_dim=data.NN_arguments['hidden_dim'], num_slow=data.NN_arguments['num_slow'], num_derived=data.NN_arguments['num_derived'], batch_size=data.NN_arguments['batch_size'], flow=data.NN_arguments['flow'], num_blocks=data.NN_arguments['num_blocks'], num_layers=data.NN_arguments['hidden_layers'], log_dir=data.NN_arguments['log_dir'], num_live_points=data.NN_arguments['n_live_points']) nn.run(train_iters=data.NN_arguments['train_iters'], volume_switch=data.NN_arguments['switch'], mcmc_steps=data.NN_arguments['mcmc_steps'], dlogz=data.NN_arguments['evidence_tolerance'], mcmc_batch_size=1) else: def loglike(thetas): logls = [] for theta in thetas: try: data.check_for_slow_step(theta) except KeyError: pass flag = 0 for i, name in enumerate(data.NN_param_names): value = data.mcmc_parameters[name]['initial'] if ((str(value[1]) != str(-1) and value[1] is not None) and (theta[i] < value[1])): flag += 1 # if a boundary value is reached, increment elif ((str(value[2]) != str(-1) and value[2] is not None) and theta[i] > value[2]): flag += 1 # same if flag == 0: for i, name in enumerate(data.NN_param_names): data.mcmc_parameters[name]['current'] = theta[i] data.update_cosmo_arguments() # Compute likelihood logl = sampler.compute_lkl(cosmo, data) if not np.isfinite(logl): print('Nan encountered in likelihood') print(data.mcmc_parameters) else: logl = data.boundary_loglike logls.append(logl) logls = np.array(logls) return logls nn = MCMCSampler(data.NN_arguments['x_dim'], loglike, append_run_num=False, hidden_dim=data.NN_arguments['hidden_dim'], num_slow=data.NN_arguments['num_slow'], num_derived=data.NN_arguments['num_derived'], batch_size=data.NN_arguments['batch_size'], flow=data.NN_arguments['flow'], num_blocks=data.NN_arguments['num_blocks'], num_layers=data.NN_arguments['hidden_layers'], log_dir=data.NN_arguments['log_dir']) nn.run(train_iters=data.NN_arguments['train_iters'], mcmc_steps=data.NN_arguments['mcmc_steps'], bootstrap_fileroot=data.NN_arguments['bootstrap_fileroot'], bootstrap_match='*__*.txt', bootstrap_iters=1)