コード例 #1
0
def run_inference(
    pyro_model: Callable[[Tensor, Tensor, Tensor, bool, str, float], None],
    X: Tensor,
    Y: Tensor,
    Yvar: Tensor,
    num_samples: int = 512,
    warmup_steps: int = 1024,
    thinning: int = 16,
    use_input_warping: bool = False,
    max_tree_depth: int = 6,
    use_saas: bool = False,
    disable_progbar: bool = False,
) -> Tensor:
    start = time.time()
    try:
        from pyro.infer.mcmc import NUTS, MCMC
    except ImportError:  # pragma: no cover
        raise RuntimeError("Cannot call run_inference without pyro installed!")
    kernel = NUTS(
        pyro_model,
        jit_compile=True,
        full_mass=True,
        ignore_jit_warnings=True,
        max_tree_depth=max_tree_depth,
    )
    mcmc = MCMC(
        kernel,
        warmup_steps=warmup_steps,
        num_samples=num_samples,
        disable_progbar=disable_progbar,
    )
    mcmc.run(
        # there is an issue with jit-compilation and cuda
        # for now, we run MCMC on the CPU.
        X.cpu(),
        Y.cpu(),
        Yvar.cpu(),
        use_input_warping=use_input_warping,
        use_saas=use_saas,
    )
    # this prints the summary
    orig_std_out = sys.stdout.write
    sys.stdout.write = logger.info
    mcmc.summary()
    sys.stdout.write = orig_std_out
    logger.info(f"MCMC elapsed time: {time.time() - start}")
    samples = mcmc.get_samples()
    if use_saas:  # compute the lengthscale for saas and throw away everything else
        inv_length_sq = (samples["kernel_tausq"].unsqueeze(-1) *
                         samples["_kernel_inv_length_sq"])
        samples["lengthscale"] = (1.0 /
                                  inv_length_sq).sqrt()  # pyre-ignore [16]
        del samples["kernel_tausq"], samples["_kernel_inv_length_sq"]
    # thin
    for k, v in samples.items():
        # apply thinning and move back to X's device
        samples[k] = v[::thinning].to(device=X.device)
    return samples
コード例 #2
0
def monte_carlo(y):
    pyro.clear_param_store()

    # create a Simple Hamiltonian Monte Carlo kernel with step_size of 0.1
    hmc_kernel = HMC(conditioned_model, step_size=.1)
    mcmc = MCMC(hmc_kernel, num_samples=500, warmup_steps=100)
    # create a Markov Chain Monte Carlo method with: 
    # the hmc_kernel, 500 samples, and 100 warmup iterations
    mcmc.run(model,y)
    
    mcmc.run(model, y)

    sample_dict = mcmc.get_samples(num_samples=5000)
    plt.figure(figsize=(8, 6))
    sns.distplot(sample_dict["p"].numpy())
    plt.xlabel("Observed probability value")
    plt.ylabel("Observed frequency")
    plt.show()
    mcmc.summary(prob=0.95)

    return sample_dict
コード例 #3
0
gp = GPy.models.GPRegression(
    ss.transform(x.detach().numpy())[0],
    f.reshape(-1, 1).detach().numpy(), kernel)
gp.optimize_restarts(5, verbose=False)

# Use No U-Turn Sampler (NUTS) Hamiltonian Monte Carlo to sample from the posterior of the original model.
#plain NUTS
num_chains = 1
num_samples = 100
kernel = NUTS(model)
mcmc = MCMC(kernel,
            num_samples=num_samples,
            warmup_steps=100,
            num_chains=num_chains)
mcmc.run(f)
mcmc.summary()
mcmc_samples = mcmc.get_samples(group_by_chain=True)
print(mcmc_samples.keys())
chains = mcmc_samples["input"]
print(chains.shape)

# Show the probablity posterior distribution of each inputs' component (input_dim).
for i in range(5):
    plt.figure(figsize=(6, 4))
    sns.distplot(mcmc_samples['input'][:, :, i])
    plt.title("Full model")
    plt.xlabel("input {}th-component".format(i + 1))
    plt.show()

# Posterior samples of the active variable from original model
print(ss.transform(chains[0])[0].mean())