Example #1
0
def test_hmc(model_class, X, y, kernel, likelihood):
    if model_class is SparseGPRegression or model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X, likelihood)
    else:
        gp = model_class(X, y, kernel, likelihood)

    kernel.set_prior("variance",
                     dist.Uniform(torch.tensor(0.5), torch.tensor(1.5)))
    kernel.set_prior("lengthscale",
                     dist.Uniform(torch.tensor(1.0), torch.tensor(3.0)))

    hmc_kernel = HMC(gp.model, step_size=1)
    mcmc_run = MCMC(hmc_kernel, num_samples=10)

    post_trace = defaultdict(list)
    for trace, _ in mcmc_run._traces():
        variance_name = param_with_module_name(kernel.name, "variance")
        post_trace["variance"].append(trace.nodes[variance_name]["value"])
        lengthscale_name = param_with_module_name(kernel.name, "lengthscale")
        post_trace["lengthscale"].append(
            trace.nodes[lengthscale_name]["value"])
        if model_class is VariationalGP:
            f_name = param_with_module_name(gp.name, "f")
            post_trace["f"].append(trace.nodes[f_name]["value"])
        if model_class is VariationalSparseGP:
            u_name = param_with_module_name(gp.name, "u")
            post_trace["u"].append(trace.nodes[u_name]["value"])

    for param in post_trace:
        param_mean = torch.mean(torch.stack(post_trace[param]), 0)
        logger.info("Posterior mean - {}".format(param))
        logger.info(param_mean)
Example #2
0
def run_pyro_nuts(data, pfile, n_samples, params):

    # import model, transformed_data functions (if exists) from pyro module

    model = import_by_string(pfile + ".model")
    assert model is not None, "model couldn't be imported"
    transformed_data = import_by_string(pfile + ".transformed_data")
    if transformed_data is not None:
        transformed_data(data)

    nuts_kernel = NUTS(model, step_size=0.0855)
    mcmc_run = MCMC(nuts_kernel,
                    num_samples=n_samples,
                    warmup_steps=int(n_samples / 2))
    posteriors = {k: [] for k in params}

    for trace, _ in mcmc_run._traces(data, params):
        for k in posteriors:
            posteriors[k].append(trace.nodes[k]['value'])

    #posteriors["sigma"] = list(map(torch.exp, posteriors["log_sigma"]))
    #del posteriors["log_sigma"]

    posterior_means = {
        k: torch.mean(torch.stack(posteriors[k]), 0)
        for k in posteriors
    }
    bb()
    return posterior_means
Example #3
0
def test_hmc(model_class, X, y, kernel, likelihood):
    if model_class is SparseGPRegression or model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X, likelihood)
    else:
        gp = model_class(X, y, kernel, likelihood)

    kernel.set_prior("variance", dist.Uniform(torch.tensor(0.5), torch.tensor(1.5)))
    kernel.set_prior("lengthscale", dist.Uniform(torch.tensor(1.0), torch.tensor(3.0)))

    hmc_kernel = HMC(gp.model, step_size=1)
    mcmc_run = MCMC(hmc_kernel, num_samples=10)

    post_trace = defaultdict(list)
    for trace, _ in mcmc_run._traces():
        variance_name = param_with_module_name(kernel.name, "variance")
        post_trace["variance"].append(trace.nodes[variance_name]["value"])
        lengthscale_name = param_with_module_name(kernel.name, "lengthscale")
        post_trace["lengthscale"].append(trace.nodes[lengthscale_name]["value"])
        if model_class is VariationalGP:
            f_name = param_with_module_name(gp.name, "f")
            post_trace["f"].append(trace.nodes[f_name]["value"])
        if model_class is VariationalSparseGP:
            u_name = param_with_module_name(gp.name, "u")
            post_trace["u"].append(trace.nodes[u_name]["value"])

    for param in post_trace:
        param_mean = torch.mean(torch.stack(post_trace[param]), 0)
        logger.info("Posterior mean - {}".format(param))
        logger.info(param_mean)