def main(args): assert 11_000_000 >= args.num_datapoints, "11,000,000 data points in the Higgs dataset" # full dataset takes hours for plain hmc! if args.dataset == 'higgs': _, fetch = load_dataset(HIGGS, shuffle=False, num_datapoints=args.num_datapoints) data, obs = fetch() else: data, obs = (np.random.normal(size=(10, 28)), np.ones(10)) hmcecs_key, hmc_key = random.split(random.PRNGKey(args.rng_seed)) # choose inner_kernel if args.inner_kernel == 'hmc': inner_kernel = HMC(model) else: inner_kernel = NUTS(model) start = time.time() losses, hmcecs_samples = run_hmcecs(hmcecs_key, args, data, obs, inner_kernel) hmcecs_runtime = time.time() - start start = time.time() hmc_samples = run_hmc(hmc_key, args, data, obs, inner_kernel) hmc_runtime = time.time() - start summary_plot(losses, hmc_samples, hmcecs_samples, hmc_runtime, hmcecs_runtime)
def run_inference(model, capture_history, sex, rng_key, args): if args.algo == "NUTS": kernel = NUTS(model) elif args.algo == "HMC": kernel = HMC(model) mcmc = MCMC(kernel, args.num_warmup, args.num_samples, num_chains=args.num_chains, progress_bar=False if "NUMPYRO_SPHINXBUILD" in os.environ else True) mcmc.run(rng_key, capture_history, sex) mcmc.print_summary() return mcmc.get_samples()
def run_inference(model, at_bats, hits, rng_key, args): if args.algo == "NUTS": kernel = NUTS(model) elif args.algo == "HMC": kernel = HMC(model) elif args.algo == "SA": kernel = SA(model) mcmc = MCMC(kernel, args.num_warmup, args.num_samples, num_chains=args.num_chains, progress_bar=False if ( "NUMPYRO_SPHINXBUILD" in os.environ or args.disable_progbar) else True) mcmc.run(rng_key, at_bats, hits) return mcmc.get_samples()
def run_inference( model: Callable, at_bats: jnp.ndarray, hits: jnp.ndarray, rng_key: jnp.ndarray, *, num_warmup: int = 1500, num_samples: int = 3000, num_chains: int = 1, algo_name: str = "NUTS", ) -> Dict[str, jnp.ndarray]: if algo_name == "NUTS": kernel = NUTS(model) elif algo_name == "HMC": kernel = HMC(model) elif algo_name == "SA": kernel = SA(model) else: raise ValueError("Unknown algorithm name") mcmc = MCMC(kernel, num_warmup, num_samples, num_chains) mcmc.run(rng_key, at_bats, hits) return mcmc.get_samples()
def benchmark_hmc(args, features, labels): rng_key = random.PRNGKey(1) start = time.time() # a MAP estimate at the following source # https://github.com/google/edward2/blob/master/examples/no_u_turn_sampler/logistic_regression.py#L117 ref_params = { "coefs": jnp.array([ +2.03420663e00, -3.53567265e-02, -1.49223924e-01, -3.07049364e-01, -1.00028366e-01, -1.46827862e-01, -1.64167881e-01, -4.20344204e-01, +9.47479829e-02, -1.12681836e-02, +2.64442056e-01, -1.22087866e-01, -6.00568838e-02, -3.79419506e-01, -1.06668741e-01, -2.97053963e-01, -2.05253899e-01, -4.69537191e-02, -2.78072730e-02, -1.43250525e-01, -6.77954629e-02, -4.34899796e-03, +5.90927452e-02, +7.23133609e-02, +1.38526391e-02, -1.24497898e-01, -1.50733739e-02, -2.68872194e-02, -1.80925727e-02, +3.47936489e-02, +4.03552800e-02, -9.98773426e-03, +6.20188080e-02, +1.15002751e-01, +1.32145107e-01, +2.69109547e-01, +2.45785132e-01, +1.19035013e-01, -2.59744357e-02, +9.94279515e-04, +3.39266285e-02, -1.44057125e-02, -6.95222765e-02, -7.52013028e-02, +1.21171586e-01, +2.29205526e-02, +1.47308692e-01, -8.34354162e-02, -9.34122875e-02, -2.97472421e-02, -3.03937674e-01, -1.70958012e-01, -1.59496680e-01, -1.88516974e-01, -1.20889175e00, ]) } if args.algo == "HMC": step_size = jnp.sqrt(0.5 / features.shape[0]) trajectory_length = step_size * args.num_steps kernel = HMC( model, step_size=step_size, trajectory_length=trajectory_length, adapt_step_size=False, dense_mass=args.dense_mass, ) subsample_size = None elif args.algo == "NUTS": kernel = NUTS(model, dense_mass=args.dense_mass) subsample_size = None elif args.algo == "HMCECS": subsample_size = 1000 inner_kernel = NUTS( model, init_strategy=init_to_value(values=ref_params), dense_mass=args.dense_mass, ) # note: if num_blocks=100, we'll update 10 index at each MCMC step # so it took 50000 MCMC steps to iterative the whole dataset kernel = HMCECS(inner_kernel, num_blocks=100, proxy=HMCECS.taylor_proxy(ref_params)) elif args.algo == "SA": # NB: this kernel requires large num_warmup and num_samples # and running on GPU is much faster than on CPU kernel = SA(model, adapt_state_size=1000, init_strategy=init_to_value(values=ref_params)) subsample_size = None elif args.algo == "FlowHMCECS": subsample_size = 1000 guide = AutoBNAFNormal(model, num_flows=1, hidden_factors=[8]) svi = SVI(model, guide, numpyro.optim.Adam(0.01), Trace_ELBO()) svi_result = svi.run(random.PRNGKey(2), 2000, features, labels) params, losses = svi_result.params, svi_result.losses plt.plot(losses) plt.show() neutra = NeuTraReparam(guide, params) neutra_model = neutra.reparam(model) neutra_ref_params = {"auto_shared_latent": jnp.zeros(55)} # no need to adapt mass matrix if the flow does a good job inner_kernel = NUTS( neutra_model, init_strategy=init_to_value(values=neutra_ref_params), adapt_mass_matrix=False, ) kernel = HMCECS(inner_kernel, num_blocks=100, proxy=HMCECS.taylor_proxy(neutra_ref_params)) else: raise ValueError( "Invalid algorithm, either 'HMC', 'NUTS', or 'HMCECS'.") mcmc = MCMC(kernel, num_warmup=args.num_warmup, num_samples=args.num_samples) mcmc.run(rng_key, features, labels, subsample_size, extra_fields=("accept_prob", )) print("Mean accept prob:", jnp.mean(mcmc.get_extra_fields()["accept_prob"])) mcmc.print_summary(exclude_deterministic=False) print("\nMCMC elapsed time:", time.time() - start)
def test_pickle_discrete_hmc(kernel): mcmc = MCMC(kernel(HMC(bernoulli_model)), num_warmup=10, num_samples=10) mcmc.run(random.PRNGKey(0)) pickled_mcmc = pickle.loads(pickle.dumps(mcmc)) test_util.check_close(mcmc.get_samples(), pickled_mcmc.get_samples())