Exemple #1
0
def test_beta_binomial_hmc():
    num_samples = 1000
    total = 10
    counts = dist.Binomial(total, 0.3).sample()
    concentration1 = torch.tensor(0.5)
    concentration0 = torch.tensor(1.5)

    prior = dist.Beta(concentration1, concentration0)
    likelihood = dist.Beta(1 + counts, 1 + total - counts)
    posterior = dist.Beta(concentration1 + counts,
                          concentration0 + total - counts)

    def model():
        prob = pyro.sample("prob", prior)
        pyro.sample("counts", dist.Binomial(total, prob), obs=counts)

    reparam_model = poutine.reparam(model,
                                    {"prob": ConjugateReparam(likelihood)})

    kernel = HMC(reparam_model)
    samples = MCMC(kernel, num_samples, warmup_steps=0).run()
    pred = Predictive(reparam_model, samples, num_samples=num_samples)
    trace = pred.get_vectorized_trace()
    samples = trace.nodes["prob"]["value"]

    assert_close(samples.mean(), posterior.mean, atol=0.01)
    assert_close(samples.std(), posterior.variance.sqrt(), atol=0.01)
Exemple #2
0
def test_hmc(model_class, X, y, kernel, likelihood):
    if model_class is SparseGPRegression or model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X, likelihood)
    else:
        gp = model_class(X, y, kernel, likelihood)

    kernel.set_prior("variance",
                     dist.Uniform(torch.tensor(0.5), torch.tensor(1.5)))
    kernel.set_prior("lengthscale",
                     dist.Uniform(torch.tensor(1.0), torch.tensor(3.0)))

    hmc_kernel = HMC(gp.model, step_size=1)
    mcmc_run = MCMC(hmc_kernel, num_samples=10)

    post_trace = defaultdict(list)
    for trace, _ in mcmc_run._traces():
        variance_name = param_with_module_name(kernel.name, "variance")
        post_trace["variance"].append(trace.nodes[variance_name]["value"])
        lengthscale_name = param_with_module_name(kernel.name, "lengthscale")
        post_trace["lengthscale"].append(
            trace.nodes[lengthscale_name]["value"])
        if model_class is VariationalGP:
            f_name = param_with_module_name(gp.name, "f")
            post_trace["f"].append(trace.nodes[f_name]["value"])
        if model_class is VariationalSparseGP:
            u_name = param_with_module_name(gp.name, "u")
            post_trace["u"].append(trace.nodes[u_name]["value"])

    for param in post_trace:
        param_mean = torch.mean(torch.stack(post_trace[param]), 0)
        logger.info("Posterior mean - {}".format(param))
        logger.info(param_mean)
Exemple #3
0
def test_hmc_conjugate_gaussian(fixture,
                                num_samples,
                                warmup_steps,
                                hmc_params,
                                expected_means,
                                expected_precs,
                                mean_tol,
                                std_tol):
    pyro.get_param_store().clear()
    hmc_kernel = HMC(fixture.model, **hmc_params)
    samples = MCMC(hmc_kernel, num_samples, warmup_steps).run(fixture.data)
    for i in range(1, fixture.chain_len + 1):
        param_name = 'loc_' + str(i)
        marginal = samples[param_name]
        latent_loc = marginal.mean(0)
        latent_std = marginal.var(0).sqrt()
        expected_mean = torch.ones(fixture.dim) * expected_means[i - 1]
        expected_std = 1 / torch.sqrt(torch.ones(fixture.dim) * expected_precs[i - 1])

        # Actual vs expected posterior means for the latents
        logger.debug('Posterior mean (actual) - {}'.format(param_name))
        logger.debug(latent_loc)
        logger.debug('Posterior mean (expected) - {}'.format(param_name))
        logger.debug(expected_mean)
        assert_equal(rmse(latent_loc, expected_mean).item(), 0.0, prec=mean_tol)

        # Actual vs expected posterior precisions for the latents
        logger.debug('Posterior std (actual) - {}'.format(param_name))
        logger.debug(latent_std)
        logger.debug('Posterior std (expected) - {}'.format(param_name))
        logger.debug(expected_std)
        assert_equal(rmse(latent_std, expected_std).item(), 0.0, prec=std_tol)
Exemple #4
0
def test_logistic_regression(step_size, trajectory_length, num_steps,
                             adapt_step_size, adapt_mass_matrix, full_mass):
    dim = 3
    data = torch.randn(2000, dim)
    true_coefs = torch.arange(1., dim + 1.)
    labels = dist.Bernoulli(logits=(true_coefs * data).sum(-1)).sample()

    def model(data):
        coefs_mean = pyro.param('coefs_mean', torch.zeros(dim))
        coefs = pyro.sample('beta', dist.Normal(coefs_mean, torch.ones(dim)))
        y = pyro.sample('y',
                        dist.Bernoulli(logits=(coefs * data).sum(-1)),
                        obs=labels)
        return y

    hmc_kernel = HMC(model,
                     step_size=step_size,
                     trajectory_length=trajectory_length,
                     num_steps=num_steps,
                     adapt_step_size=adapt_step_size,
                     adapt_mass_matrix=adapt_mass_matrix,
                     full_mass=full_mass)
    mcmc = MCMC(hmc_kernel,
                num_samples=500,
                warmup_steps=100,
                disable_progbar=True)
    mcmc.run(data)
    samples = mcmc.get_samples()['beta']
    assert_equal(rmse(true_coefs, samples.mean(0)).item(), 0.0, prec=0.1)
Exemple #5
0
def test_bernoulli_latent_model(jit):
    def model(data):
        y_prob = pyro.sample("y_prob", dist.Beta(1.0, 1.0))
        y = pyro.sample("y", dist.Bernoulli(y_prob))
        with pyro.plate("data", data.shape[0]):
            z = pyro.sample("z", dist.Bernoulli(0.65 * y + 0.1))
            pyro.sample("obs", dist.Normal(2.0 * z, 1.0), obs=data)
        pyro.sample("nuisance", dist.Bernoulli(0.3))

    N = 2000
    y_prob = torch.tensor(0.3)
    y = dist.Bernoulli(y_prob).sample(torch.Size((N, )))
    z = dist.Bernoulli(0.65 * y + 0.1).sample()
    data = dist.Normal(2.0 * z, 1.0).sample()
    hmc_kernel = HMC(
        model,
        trajectory_length=1,
        max_plate_nesting=1,
        jit_compile=jit,
        ignore_jit_warnings=True,
    )
    mcmc = MCMC(hmc_kernel, num_samples=600, warmup_steps=200)
    mcmc.run(data)
    samples = mcmc.get_samples()
    assert_equal(samples["y_prob"].mean(0), y_prob, prec=0.06)
Exemple #6
0
def test_dirichlet_categorical(jit):
    def model(data):
        concentration = torch.tensor([1.0, 1.0, 1.0])
        p_latent = pyro.sample('p_latent', dist.Dirichlet(concentration))
        pyro.sample("obs", dist.Categorical(p_latent), obs=data)
        return p_latent

    true_probs = torch.tensor([0.1, 0.6, 0.3])
    data = dist.Categorical(true_probs).sample(sample_shape=(torch.Size((2000,))))
    hmc_kernel = HMC(model, trajectory_length=1, jit_compile=jit, ignore_jit_warnings=True)
    mcmc_run = MCMC(hmc_kernel, num_samples=200, warmup_steps=100).run(data)
    posterior = mcmc_run.marginal('p_latent').empirical['p_latent']
    assert_equal(posterior.mean, true_probs, prec=0.02)
Exemple #7
0
def test_normal_gamma_with_dual_averaging():
    def model(data):
        rate = torch.tensor([1.0, 1.0])
        concentration = torch.tensor([1.0, 1.0])
        p_latent = pyro.sample('p_latent', dist.Gamma(rate, concentration))
        pyro.sample("obs", dist.Normal(3, p_latent), obs=data)
        return p_latent

    true_std = torch.tensor([0.5, 2])
    data = dist.Normal(3, true_std).sample(sample_shape=(torch.Size((2000, ))))
    hmc_kernel = HMC(model, trajectory_length=1, adapt_step_size=True)
    mcmc_run = MCMC(hmc_kernel, num_samples=200, warmup_steps=100).run(data)
    posterior = EmpiricalMarginal(mcmc_run, sites='p_latent')
    assert_equal(posterior.mean, true_std, prec=0.05)
Exemple #8
0
def test_categorical_dirichlet():
    def model(data):
        concentration = torch.tensor([1.0, 1.0, 1.0])
        p_latent = pyro.sample('p_latent', dist.Dirichlet(concentration))
        pyro.sample("obs", dist.Categorical(p_latent), obs=data)
        return p_latent

    true_probs = torch.tensor([0.1, 0.6, 0.3])
    data = dist.Categorical(true_probs).sample(
        sample_shape=(torch.Size((2000, ))))
    hmc_kernel = HMC(model, step_size=0.01, num_steps=3)
    mcmc_run = MCMC(hmc_kernel, num_samples=200, warmup_steps=100).run(data)
    posterior = EmpiricalMarginal(mcmc_run, sites='p_latent')
    assert_equal(posterior.mean, true_probs, prec=0.02)
Exemple #9
0
def test_singular_matrix_catch(jit, op):
    def potential_energy(z):
        return op(z['cov']).sum()

    init_params = {'cov': torch.eye(3)}
    potential_fn = potential_energy if not jit else torch.jit.trace(potential_energy, init_params)
    hmc_kernel = HMC(potential_fn=potential_fn, adapt_step_size=False,
                     num_steps=10, step_size=1e-20)
    hmc_kernel.initial_params = init_params
    hmc_kernel.setup(warmup_steps=0)
    # setup an invalid cache to trigger singular error for torch.inverse
    hmc_kernel._cache({'cov': torch.ones(3, 3)}, torch.tensor(0.), {'cov': torch.zeros(3, 3)})

    samples = init_params
    for i in range(10):
        samples = hmc_kernel.sample(samples)
Exemple #10
0
def test_gamma_normal(jit):
    def model(data):
        rate = torch.tensor([1.0, 1.0])
        concentration = torch.tensor([1.0, 1.0])
        p_latent = pyro.sample('p_latent', dist.Gamma(rate, concentration))
        pyro.sample("obs", dist.Normal(3, p_latent), obs=data)
        return p_latent

    true_std = torch.tensor([0.5, 2])
    data = dist.Normal(3, true_std).sample(sample_shape=(torch.Size((2000,))))
    hmc_kernel = HMC(model, trajectory_length=1, step_size=0.03, adapt_step_size=False,
                     jit_compile=jit, ignore_jit_warnings=True)
    mcmc_run = MCMC(hmc_kernel, num_samples=200, warmup_steps=200).run(data)
    posterior = mcmc_run.marginal(['p_latent']).empirical['p_latent']
    assert_equal(posterior.mean, true_std, prec=0.05)
Exemple #11
0
def test_gamma_normal():
    def model(data):
        rate = torch.tensor([1.0, 1.0])
        concentration = torch.tensor([1.0, 1.0])
        p_latent = pyro.sample("p_latent", dist.Gamma(rate, concentration))
        pyro.sample("obs", dist.Normal(3, p_latent), obs=data)
        return p_latent

    true_std = torch.tensor([0.5, 2])
    data = dist.Normal(3, true_std).sample(sample_shape=(torch.Size((2000, ))))
    hmc_kernel = HMC(model, num_steps=15, step_size=0.01, adapt_step_size=True)
    mcmc = MCMC(hmc_kernel, num_samples=200, warmup_steps=200)
    mcmc.run(data)
    samples = mcmc.get_samples()
    assert_equal(samples["p_latent"].mean(0), true_std, prec=0.05)
Exemple #12
0
def test_bernoulli_beta_with_dual_averaging():
    def model(data):
        alpha = torch.tensor([1.1, 1.1])
        beta = torch.tensor([1.1, 1.1])
        p_latent = pyro.sample('p_latent', dist.Beta(alpha, beta))
        pyro.sample('obs', dist.Bernoulli(p_latent), obs=data)
        return p_latent

    true_probs = torch.tensor([0.9, 0.1])
    data = dist.Bernoulli(true_probs).sample(
        sample_shape=(torch.Size((1000, ))))
    hmc_kernel = HMC(model, trajectory_length=1, adapt_step_size=True)
    mcmc_run = MCMC(hmc_kernel, num_samples=800, warmup_steps=500).run(data)
    posterior = EmpiricalMarginal(mcmc_run, sites='p_latent')
    assert_equal(posterior.mean, true_probs, prec=0.05)
Exemple #13
0
def test_beta_bernoulli(jit):
    def model(data):
        alpha = torch.tensor([1.1, 1.1])
        beta = torch.tensor([1.1, 1.1])
        p_latent = pyro.sample('p_latent', dist.Beta(alpha, beta))
        with pyro.plate("data", data.shape[0], dim=-2):
            pyro.sample('obs', dist.Bernoulli(p_latent), obs=data)
        return p_latent

    true_probs = torch.tensor([0.9, 0.1])
    data = dist.Bernoulli(true_probs).sample(sample_shape=(torch.Size((1000,))))
    hmc_kernel = HMC(model, trajectory_length=1, max_plate_nesting=2,
                     jit_compile=jit, ignore_jit_warnings=True)
    mcmc_run = MCMC(hmc_kernel, num_samples=800, warmup_steps=500).run(data)
    posterior = mcmc_run.marginal(["p_latent"]).empirical["p_latent"]
    assert_equal(posterior.mean, true_probs, prec=0.05)
Exemple #14
0
def test_hmc(model_class, X, y, kernel, likelihood):
    if model_class is SparseGPRegression or model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X.clone(), likelihood)
    else:
        gp = model_class(X, y, kernel, likelihood)

    kernel.variance = PyroSample(dist.Uniform(torch.tensor(0.5), torch.tensor(1.5)))
    kernel.lengthscale = PyroSample(dist.Uniform(torch.tensor(1.0), torch.tensor(3.0)))

    hmc_kernel = HMC(gp.model, step_size=1)
    mcmc = MCMC(hmc_kernel, num_samples=10)
    mcmc.run()

    for name, param in mcmc.get_samples().items():
        param_mean = torch.mean(param, 0)
        logger.info("Posterior mean - {}".format(name))
        logger.info(param_mean)
Exemple #15
0
def test_logistic_regression(step_size, trajectory_length, num_steps,
                             adapt_step_size, adapt_mass_matrix, full_mass):
    dim = 3
    data = torch.randn(2000, dim)
    true_coefs = torch.arange(1., dim + 1.)
    labels = dist.Bernoulli(logits=(true_coefs * data).sum(-1)).sample()

    def model(data):
        coefs_mean = torch.zeros(dim)
        coefs = pyro.sample('beta', dist.Normal(coefs_mean, torch.ones(dim)))
        y = pyro.sample('y', dist.Bernoulli(logits=(coefs * data).sum(-1)), obs=labels)
        return y

    hmc_kernel = HMC(model, step_size, trajectory_length, num_steps,
                     adapt_step_size, adapt_mass_matrix, full_mass)
    mcmc_run = MCMC(hmc_kernel, num_samples=500, warmup_steps=100, disable_progbar=True).run(data)
    beta_posterior = mcmc_run.marginal(['beta']).empirical['beta']
    assert_equal(rmse(true_coefs, beta_posterior.mean).item(), 0.0, prec=0.1)
Exemple #16
0
def test_logistic_regression_with_dual_averaging():
    dim = 3
    true_coefs = torch.arange(1, dim + 1)
    data = torch.randn(2000, dim)
    labels = dist.Bernoulli(logits=(true_coefs * data).sum(-1)).sample()

    def model(data):
        coefs_mean = torch.zeros(dim)
        coefs = pyro.sample('beta', dist.Normal(coefs_mean, torch.ones(dim)))
        y = pyro.sample('y',
                        dist.Bernoulli(logits=(coefs * data).sum(-1)),
                        obs=labels)
        return y

    hmc_kernel = HMC(model, trajectory_length=1, adapt_step_size=True)
    mcmc_run = MCMC(hmc_kernel, num_samples=500, warmup_steps=100).run(data)
    posterior = EmpiricalMarginal(mcmc_run, sites='beta')
    assert_equal(rmse(posterior.mean, true_coefs).item(), 0.0, prec=0.1)
Exemple #17
0
def test_beta_bernoulli(jit):
    def model(data):
        alpha = torch.tensor([1.1, 1.1])
        beta = torch.tensor([1.1, 1.1])
        p_latent = pyro.sample("p_latent", dist.Beta(alpha, beta))
        with pyro.plate("data", data.shape[0], dim=-2):
            pyro.sample("obs", dist.Bernoulli(p_latent), obs=data)
        return p_latent

    true_probs = torch.tensor([0.9, 0.1])
    data = dist.Bernoulli(true_probs).sample(
        sample_shape=(torch.Size((1000, ))))
    hmc_kernel = HMC(
        model,
        trajectory_length=1,
        max_plate_nesting=2,
        jit_compile=jit,
        ignore_jit_warnings=True,
    )
    mcmc = MCMC(hmc_kernel, num_samples=800, warmup_steps=500)
    mcmc.run(data)
    samples = mcmc.get_samples()
    assert_equal(samples["p_latent"].mean(0), true_probs, prec=0.05)