Beispiel #1
0
def test_log_prob_with_different_x():

    num_dim = 2

    prior = MultivariateNormal(loc=zeros(num_dim),
                               covariance_matrix=eye(num_dim))
    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = SNPE_C(prior)
    theta, x = simulate_for_sbi(simulator, prior, 1000)
    _ = inference.append_simulations(theta, x).train()
    posterior = inference.build_posterior()

    _ = posterior.sample((10, ), x=ones(1, num_dim))
    theta = posterior.sample((10, ), ones(1, num_dim))
    posterior.log_prob(theta, x=ones(num_dim))
    posterior.log_prob(theta, x=ones(num_dim))
    posterior.log_prob(theta, x=ones(1, num_dim))
    posterior = posterior.set_default_x(ones(1, num_dim))
    posterior.log_prob(theta, x=None)
    posterior.sample((10, ), x=None)

    # Both must fail due to batch size of x > 1.
    with pytest.raises(ValueError):
        posterior.log_prob(theta, x=ones(2, num_dim))
    with pytest.raises(ValueError):
        posterior.sample(2, x=ones(2, num_dim))
Beispiel #2
0
def example_posterior():
    """Return an inferred `NeuralPosterior` for interactive examination."""
    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        show_progress_bars=False,
    )
    theta, x = simulate_for_sbi(simulator,
                                prior,
                                1000,
                                simulation_batch_size=10,
                                num_workers=6)
    _ = inference.append_simulations(theta, x).train()
    return inference.build_posterior().set_default_x(x_o)
Beispiel #3
0
def test_c2st_snpe_on_linearGaussian_different_dims(set_seed):
    """Test whether SNPE B/C infer well a simple example with available ground truth.

    This example has different number of parameters theta than number of x. Also
    this implicitly tests simulation_batch_size=1.

    Args:
        set_seed: fixture for manual seeding
    """

    theta_dim = 3
    x_dim = 2
    discard_dims = theta_dim - x_dim

    x_o = zeros(1, x_dim)
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(x_dim)
    likelihood_cov = 0.3 * eye(x_dim)

    prior_mean = zeros(theta_dim)
    prior_cov = eye(theta_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims(
        x_o[0],
        likelihood_shift,
        likelihood_cov,
        prior_mean,
        prior_cov,
        num_discarded_dims=discard_dims,
        num_samples=num_samples,
    )

    def simulator(theta):
        return linear_gaussian(theta,
                               likelihood_shift,
                               likelihood_cov,
                               num_discarded_dims=discard_dims)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        density_estimator="maf",
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                2000,
                                simulation_batch_size=1)  # type: ignore
    _ = inference.append_simulations(theta, x).train()
    posterior = inference.build_posterior()
    samples = posterior.sample((num_samples, ), x=x_o)

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snpe_c")
def test_inference_with_restriction_estimator():

    # likelihood_mean will be likelihood_shift+theta
    num_dim = 3
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)
    x_o = zeros(1, num_dim)
    num_samples = 500

    def linear_gaussian_nan(theta,
                            likelihood_shift=likelihood_shift,
                            likelihood_cov=likelihood_cov):
        condition = theta[:, 0] < 0.0
        x = linear_gaussian(theta, likelihood_shift, likelihood_cov)
        x[condition] = float("nan")

        return x

    prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))
    target_samples = samples_true_posterior_linear_gaussian_uniform_prior(
        x_o,
        likelihood_shift=likelihood_shift,
        likelihood_cov=likelihood_cov,
        num_samples=num_samples,
        prior=prior,
    )

    simulator, prior = prepare_for_sbi(linear_gaussian_nan, prior)
    restriction_estimator = RestrictionEstimator(prior=prior)
    proposals = [prior]
    num_rounds = 2

    for r in range(num_rounds):
        theta, x = simulate_for_sbi(simulator, proposals[-1], 1000)
        restriction_estimator.append_simulations(theta, x)
        if r < num_rounds - 1:
            _ = restriction_estimator.train()
        proposals.append(restriction_estimator.restrict_prior())

    all_theta, all_x, _ = restriction_estimator.get_simulations()

    # Any method can be used in combination with the `RejectionEstimator`.
    inference = SNPE_C(prior=prior)
    posterior_estimator = inference.append_simulations(all_theta,
                                                       all_x).train()

    # Build posterior.
    posterior = DirectPosterior(
        prior=prior,
        posterior_estimator=posterior_estimator).set_default_x(x_o)

    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg=f"{SNPE_C}")
Beispiel #5
0
def test_inference_with_user_sbi_problems(user_simulator: Callable, user_prior):
    """
    Test inference with combinations of user defined simulators, priors and x_os.
    """

    simulator, prior = prepare_for_sbi(user_simulator, user_prior)
    inference = SNPE_C(prior, density_estimator="maf", show_progress_bars=False,)

    # Run inference.
    theta, x = simulate_for_sbi(simulator, prior, 100)
    _ = inference.append_simulations(theta, x).train(max_num_epochs=2)
    _ = inference.build_posterior()
def test_api_snpe_c_posterior_correction(
    sample_with_mcmc, mcmc_method, prior_str, set_seed
):
    """Test that leakage correction applied to sampling works, with both MCMC and
    rejection.

    Args:
        set_seed: fixture for manual seeding
    """

    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    if prior_str == "gaussian":
        prior_mean = zeros(num_dim)
        prior_cov = eye(num_dim)
        prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    else:
        prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        density_estimator="maf",
        simulation_batch_size=50,
        sample_with_mcmc=sample_with_mcmc,
        mcmc_method=mcmc_method,
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator, prior, 1000)
    _ = inference.append_simulations(theta, x).train(max_num_epochs=5)
    posterior = inference.build_posterior()
    posterior = posterior.set_sample_with_mcmc(sample_with_mcmc).set_mcmc_method(
        mcmc_method
    )

    # Posterior should be corrected for leakage even if num_rounds just 1.
    samples = posterior.sample((10,), x=x_o)

    # Evaluate the samples to check correction factor.
    posterior.log_prob(samples, x=x_o)
Beispiel #7
0
def test_api_snpe_c_posterior_correction(sample_with, mcmc_method, prior_str):
    """Test that leakage correction applied to sampling works, with both MCMC and
    rejection.

    """

    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    if prior_str == "gaussian":
        prior_mean = zeros(num_dim)
        prior_cov = eye(num_dim)
        prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    else:
        prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))

    simulator, prior = prepare_for_sbi(
        lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov),
        prior)
    inference = SNPE_C(prior, show_progress_bars=False)

    theta, x = simulate_for_sbi(simulator, prior, 1000)
    posterior_estimator = inference.append_simulations(theta, x).train()
    potential_fn, theta_transform = posterior_estimator_based_potential(
        posterior_estimator, prior, x_o)
    if sample_with == "mcmc":
        posterior = MCMCPosterior(
            potential_fn=potential_fn,
            theta_transform=theta_transform,
            proposal=prior,
            method=mcmc_method,
        )
    elif sample_with == "rejection":
        posterior = RejectionPosterior(
            potential_fn=potential_fn,
            proposal=prior,
            theta_transform=theta_transform,
        )

    # Posterior should be corrected for leakage even if num_rounds just 1.
    samples = posterior.sample((10, ))

    # Evaluate the samples to check correction factor.
    _ = posterior.log_prob(samples)
Beispiel #8
0
def test_c2st_snpe_external_data_on_linearGaussian(set_seed):
    """Test whether SNPE C infers well a simple example with available ground truth.

    Args:
        set_seed: fixture for manual seeding
    """

    num_dim = 2

    device = "cpu"
    configure_default_device(device)
    x_o = zeros(1, num_dim)
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    target_samples = gt_posterior.sample((num_samples, ))

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    infer = SNPE_C(
        *prepare_for_sbi(simulator, prior),
        simulation_batch_size=1000,
        density_estimator="nsf",
        show_progress_bars=False,
        sample_with_mcmc=False,
        device=device,
    )

    external_theta = prior.sample((500, ))
    external_x = simulator(external_theta)

    infer.provide_presimulated(external_theta, external_x)

    posterior = infer(num_simulations=500,
                      training_batch_size=50).set_default_x(x_o)
    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snpe_c")
def example_posterior():
    """Return an inferred `NeuralPosterior` for interactive examination."""
    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    infer = SNPE_C(
        *prepare_for_sbi(simulator, prior),
        simulation_batch_size=10,
        num_workers=6,
        show_progress_bars=False,
        sample_with_mcmc=False,
    )

    return infer(num_simulations=1000).set_default_x(x_o)
Beispiel #10
0
def test_passing_custom_density_estimator(arg):

    x_numel = 2
    y_numel = 2
    hidden_features = 10
    num_components = 1
    mdn = MultivariateGaussianMDN(
        features=x_numel,
        context_features=y_numel,
        hidden_features=hidden_features,
        hidden_net=nn.Sequential(
            nn.Linear(y_numel, hidden_features),
            nn.ReLU(),
            nn.Dropout(p=0.0),
            nn.Linear(hidden_features, hidden_features),
            nn.ReLU(),
            nn.Linear(hidden_features, hidden_features),
            nn.ReLU(),
        ),
        num_components=num_components,
        custom_initialization=True,
    )
    if arg == "nn":
        # Just the nn.Module.
        density_estimator = mdn
    elif arg == "fun":
        # A function returning the nn.Module.
        density_estimator = lambda batch_theta, batch_x: mdn
    else:
        density_estimator = arg
    prior = MultivariateNormal(torch.zeros(2), torch.eye(2))
    _ = SNPE_C(prior=prior, density_estimator=density_estimator)
def test_c2st_multi_round_snpe_on_linearGaussian(method_str: str, set_seed):
    """Test whether SNPE B/C infer well a simple example with available ground truth.

    Args:
        set_seed: fixture for manual seeding.
    """

    num_dim = 2
    x_o = zeros((1, num_dim))
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    target_samples = gt_posterior.sample((num_samples, ))

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    if method_str == "snpe_c_non_atomic":
        density_estimator = utils.posterior_nn("mdn", num_components=5)
        method_str = "snpe_c"
    else:
        density_estimator = "maf"

    simulator, prior = prepare_for_sbi(simulator, prior)
    creation_args = dict(
        simulator=simulator,
        prior=prior,
        density_estimator=density_estimator,
        show_progress_bars=False,
    )

    if method_str == "snpe_b":
        infer = SNPE_B(simulation_batch_size=10, **creation_args)
        posterior1 = infer(num_simulations=1000)
        posterior1.set_default_x(x_o)
        posterior = infer(num_simulations=1000, proposal=posterior1)
    elif method_str == "snpe_c":
        infer = SNPE_C(simulation_batch_size=50,
                       sample_with_mcmc=False,
                       **creation_args)
        posterior = infer(num_simulations=500, num_atoms=10).set_default_x(x_o)
        posterior = infer(num_simulations=1000,
                          num_atoms=10,
                          proposal=posterior).set_default_x(x_o)

    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg=method_str)
def test_z_scoring_warning():

    # Create data with large variance.
    num_dim = 2
    theta = torch.ones(100, num_dim)
    x = torch.rand(100, num_dim)
    x[:50] += 1e7

    # Make sure a warning is raised because z-scoring will map these data to duplicate
    # data points.
    with pytest.warns(UserWarning, match="Z-scoring these simulation outputs"):
        SNPE_C(utils.BoxUniform(zeros(num_dim),
                                ones(num_dim))).append_simulations(
                                    theta, x).train(max_num_epochs=1)
Beispiel #13
0
def test_inference_with_user_sbi_problems(user_simulator: Callable,
                                          user_prior):
    """
    Test inference with combinations of user defined simulators, priors and x_os.
    """

    infer = SNPE_C(
        *prepare_for_sbi(user_simulator, user_prior),
        density_estimator="maf",
        simulation_batch_size=1,
        show_progress_bars=False,
    )

    # Run inference.
    _ = infer(num_rounds=1, num_simulations_per_round=100, max_num_epochs=2)
Beispiel #14
0
simulator, prior = prepare_for_sbi(w_sim_wrapper, model.prior)


def build_custom_post_net(batch_theta, batch_x):
    flow_lik, flow_post = func.set_up_networks(model.prior.base_dist.low,
                                               model.prior.base_dist.high,
                                               dim_post=model.nbr_params)

    return flow_post


print(summary_stats_obs)

print(summary_stats_obs_w)

inference = SNPE_C(simulator, prior, density_estimator=build_custom_post_net)

start = time.time()

torch.manual_seed(seed)
np.random.seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

prior_samples = prior.sample(sample_shape=(1,))
data_sets = torch.zeros(1, 19)

for i in range(1):
    data_sets[i, :] = w_sim_wrapper(prior_samples[i, :])

print(prior_samples)
Beispiel #15
0
def test_log_prob_with_different_x():

    num_dim = 2

    prior = MultivariateNormal(loc=zeros(num_dim), covariance_matrix=eye(num_dim))
    posterior = SNPE_C(*prepare_for_sbi(diagonal_linear_gaussian, prior))(
        num_rounds=1, num_simulations_per_round=1000
    )

    _ = posterior.sample((10,), x=ones(1, num_dim))
    theta = posterior.sample((10,), ones(1, num_dim))
    posterior.log_prob(theta, x=ones(num_dim))
    posterior.log_prob(theta, x=ones(num_dim))
    posterior.log_prob(theta, x=ones(1, num_dim))
    posterior = posterior.set_default_x(ones(1, num_dim))
    posterior.log_prob(theta, x=None)
    posterior.sample((10,), x=None)

    # Both must fail due to batch size of x > 1.
    with pytest.raises(ValueError):
        posterior.log_prob(theta, x=ones(2, num_dim))
    with pytest.raises(ValueError):
        posterior.sample(2, x=ones(2, num_dim))
Beispiel #16
0
def test_c2st_snpe_on_linearGaussian(
    num_dim: int,
    prior_str: str,
    set_seed,
):
    """Test whether SNPE C infers well a simple example with available ground truth.

    Args:
        set_seed: fixture for manual seeding
    """

    x_o = zeros(1, num_dim)
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    if prior_str == "gaussian":
        prior_mean = zeros(num_dim)
        prior_cov = eye(num_dim)
        prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
        gt_posterior = true_posterior_linear_gaussian_mvn_prior(
            x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
        target_samples = gt_posterior.sample((num_samples, ))
    else:
        prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))
        target_samples = samples_true_posterior_linear_gaussian_uniform_prior(
            x_o,
            likelihood_shift,
            likelihood_cov,
            prior=prior,
            num_samples=num_samples)

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                2000,
                                simulation_batch_size=1000)
    _ = inference.append_simulations(theta, x).train(training_batch_size=100)
    posterior = inference.build_posterior().set_default_x(x_o)
    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snpe_c")

    # Checks for log_prob()
    if prior_str == "gaussian":
        # For the Gaussian prior, we compute the KLd between ground truth and posterior.
        dkl = get_dkl_gaussian_prior(posterior, x_o[0], likelihood_shift,
                                     likelihood_cov, prior_mean, prior_cov)

        max_dkl = 0.15

        assert (
            dkl < max_dkl
        ), f"D-KL={dkl} is more than 2 stds above the average performance."

    elif prior_str == "uniform":
        # Check whether the returned probability outside of the support is zero.
        posterior_prob = get_prob_outside_uniform_prior(posterior, num_dim)
        assert (
            posterior_prob == 0.0
        ), "The posterior probability outside of the prior support is not zero"

        # Check whether normalization (i.e. scaling up the density due
        # to leakage into regions without prior support) scales up the density by the
        # correct factor.
        (
            posterior_likelihood_unnorm,
            posterior_likelihood_norm,
            acceptance_prob,
        ) = get_normalization_uniform_prior(posterior, prior, x_o)
        # The acceptance probability should be *exactly* the ratio of the unnormalized
        # and the normalized likelihood. However, we allow for an error margin of 1%,
        # since the estimation of the acceptance probability is random (based on
        # rejection sampling).
        assert (
            acceptance_prob * 0.99 < posterior_likelihood_unnorm /
            posterior_likelihood_norm < acceptance_prob * 1.01
        ), "Normalizing the posterior density using the acceptance probability failed."
Beispiel #17
0
def test_sample_conditional(set_seed):
    """
    Test whether sampling from the conditional gives the same results as evaluating.

    This compares samples that get smoothed with a Gaussian kde to evaluating the
    conditional log-probability with `eval_conditional_density`.

    `eval_conditional_density` is itself tested in `sbiutils_test.py`. Here, we use
    a bimodal posterior to test the conditional.
    """

    num_dim = 3
    dim_to_sample_1 = 0
    dim_to_sample_2 = 2

    x_o = zeros(1, num_dim)

    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.1 * eye(num_dim)

    prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))

    def simulator(theta):
        if torch.rand(1) > 0.5:
            return linear_gaussian(theta, likelihood_shift, likelihood_cov)
        else:
            return linear_gaussian(theta, -likelihood_shift, likelihood_cov)

    net = utils.posterior_nn("maf", hidden_features=20)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        density_estimator=net,
        show_progress_bars=False,
    )

    # We need a pretty big dataset to properly model the bimodality.
    theta, x = simulate_for_sbi(simulator, prior, 10000)
    _ = inference.append_simulations(theta, x).train(max_num_epochs=50)
    posterior = inference.build_posterior().set_default_x(x_o)
    samples = posterior.sample((50, ))

    # Evaluate the conditional density be drawing samples and smoothing with a Gaussian
    # kde.
    cond_samples = posterior.sample_conditional(
        (500, ),
        condition=samples[0],
        dims_to_sample=[dim_to_sample_1, dim_to_sample_2])
    _ = utils.pairplot(
        cond_samples,
        limits=[[-2, 2], [-2, 2], [-2, 2]],
        fig_size=(2, 2),
        diag="kde",
        upper="kde",
    )

    limits = [[-2, 2], [-2, 2], [-2, 2]]

    density = gaussian_kde(cond_samples.numpy().T, bw_method="scott")

    X, Y = np.meshgrid(
        np.linspace(
            limits[0][0],
            limits[0][1],
            50,
        ),
        np.linspace(
            limits[1][0],
            limits[1][1],
            50,
        ),
    )
    positions = np.vstack([X.ravel(), Y.ravel()])
    sample_kde_grid = np.reshape(density(positions).T, X.shape)

    # Evaluate the conditional with eval_conditional_density.
    eval_grid = utils.eval_conditional_density(
        posterior,
        condition=samples[0],
        dim1=dim_to_sample_1,
        dim2=dim_to_sample_2,
        limits=torch.tensor([[-2, 2], [-2, 2], [-2, 2]]),
    )

    # Compare the two densities.
    sample_kde_grid = sample_kde_grid / np.sum(sample_kde_grid)
    eval_grid = eval_grid / torch.sum(eval_grid)

    error = np.abs(sample_kde_grid - eval_grid.numpy())

    max_err = np.max(error)
    assert max_err < 0.0025
Beispiel #18
0
def test_c2st_snpe_on_linearGaussian_different_dims():
    """Test whether SNPE B/C infer well a simple example with available ground truth.

    This example has different number of parameters theta than number of x. Also
    this implicitly tests simulation_batch_size=1. It also impleictly tests whether the
    prior can be `None` and whether we can stop and resume training.

    """

    theta_dim = 3
    x_dim = 2
    discard_dims = theta_dim - x_dim

    x_o = zeros(1, x_dim)
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(x_dim)
    likelihood_cov = 0.3 * eye(x_dim)

    prior_mean = zeros(theta_dim)
    prior_cov = eye(theta_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims(
        x_o,
        likelihood_shift,
        likelihood_cov,
        prior_mean,
        prior_cov,
        num_discarded_dims=discard_dims,
        num_samples=num_samples,
    )

    simulator, prior = prepare_for_sbi(
        lambda theta: linear_gaussian(theta,
                                      likelihood_shift,
                                      likelihood_cov,
                                      num_discarded_dims=discard_dims),
        prior,
    )
    # Test whether prior can be `None`.
    inference = SNPE_C(prior=None,
                       density_estimator="maf",
                       show_progress_bars=False)

    # type: ignore
    theta, x = simulate_for_sbi(simulator,
                                prior,
                                2000,
                                simulation_batch_size=1)

    inference = inference.append_simulations(theta, x)
    posterior_estimator = inference.train(
        max_num_epochs=10)  # Test whether we can stop and resume.
    posterior_estimator = inference.train(resume_training=True,
                                          force_first_round_loss=True)
    posterior = DirectPosterior(
        prior=prior,
        posterior_estimator=posterior_estimator).set_default_x(x_o)
    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snpe_c")
Beispiel #19
0
def test_mdn_conditional_density(num_dim: int = 3, cond_dim: int = 1):
    """Test whether the conditional density infered from MDN parameters of a
    `DirectPosterior` matches analytical results for MVN. This uses a n-D joint and
    conditions on the last m values to generate a conditional.

    Gaussian prior used for easier ground truthing of conditional posterior.

    Args:
        num_dim: Dimensionality of the MVM.
        cond_dim: Dimensionality of the condition.
    """

    assert (
        num_dim > cond_dim
    ), "The number of dimensions needs to be greater than that of the condition!"

    x_o = zeros(1, num_dim)
    num_samples = 1000
    num_simulations = 2700
    condition = 0.1 * ones(1, num_dim)

    dims = list(range(num_dim))
    dims2sample = dims[-cond_dim:]
    dims2condition = dims[:-cond_dim]

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    joint_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    joint_cov = joint_posterior.covariance_matrix
    joint_mean = joint_posterior.loc

    conditional_mean, conditional_cov = conditional_of_mvn(
        joint_mean, joint_cov, condition[0, dims2condition])
    conditional_dist_gt = MultivariateNormal(conditional_mean, conditional_cov)

    conditional_samples_gt = conditional_dist_gt.sample((num_samples, ))

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(density_estimator="mdn", show_progress_bars=False)

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                num_simulations,
                                simulation_batch_size=1000)
    posterior_mdn = inference.append_simulations(
        theta, x).train(training_batch_size=100)
    conditioned_mdn = ConditionedMDN(posterior_mdn,
                                     x_o,
                                     condition=condition,
                                     dims_to_sample=[0])
    conditional_samples_sbi = conditioned_mdn.sample((num_samples, ))
    check_c2st(
        conditional_samples_sbi,
        conditional_samples_gt,
        alg="analytic_mdn_conditioning_of_direct_posterior",
    )
Beispiel #20
0
def test_c2st_multi_round_snpe_on_linearGaussian(method_str: str, set_seed):
    """Test whether SNPE B/C infer well a simple example with available ground truth.

    Args:
        set_seed: fixture for manual seeding.
    """

    num_dim = 2
    x_o = zeros((1, num_dim))
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o, likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    target_samples = gt_posterior.sample((num_samples, ))

    if method_str == "snpe_c_non_atomic":
        density_estimator = utils.posterior_nn("mdn", num_components=5)
        method_str = "snpe_c"
    elif method_str == "snpe_a":
        density_estimator = "mdn_snpe_a"
    else:
        density_estimator = "maf"

    simulator, prior = prepare_for_sbi(
        lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov),
        prior)
    creation_args = dict(
        simulator=simulator,
        prior=prior,
        density_estimator=density_estimator,
        show_progress_bars=False,
    )

    if method_str == "snpe_b":
        inference = SNPE_B(**creation_args)
        theta, x = simulate_for_sbi(simulator,
                                    prior,
                                    500,
                                    simulation_batch_size=10)
        _ = inference.append_simulations(theta, x).train()
        posterior1 = inference.build_posterior().set_default_x(x_o)
        theta, x = simulate_for_sbi(simulator,
                                    posterior1,
                                    1000,
                                    simulation_batch_size=10)
        _ = inference.append_simulations(theta, x, proposal=posterior1).train()
        posterior = inference.build_posterior().set_default_x(x_o)
    elif method_str == "snpe_c":
        inference = SNPE_C(**creation_args)
        theta, x = simulate_for_sbi(simulator,
                                    prior,
                                    500,
                                    simulation_batch_size=50)
        _ = inference.append_simulations(theta, x).train()
        posterior1 = inference.build_posterior().set_default_x(x_o)
        theta, x = simulate_for_sbi(simulator,
                                    posterior1,
                                    1000,
                                    simulation_batch_size=50)
        _ = inference.append_simulations(theta, x, proposal=posterior1).train()
        posterior = inference.build_posterior().set_default_x(x_o)
    elif method_str == "snpe_a":
        inference = SNPE_A(**creation_args)
        proposal = prior
        final_round = False
        num_rounds = 3
        for r in range(num_rounds):
            if r == 2:
                final_round = True
            theta, x = simulate_for_sbi(simulator,
                                        proposal,
                                        500,
                                        simulation_batch_size=50)
            inference = inference.append_simulations(theta,
                                                     x,
                                                     proposal=proposal)
            _ = inference.train(max_num_epochs=200, final_round=final_round)
            posterior = inference.build_posterior().set_default_x(x_o)
            proposal = posterior

    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg=method_str)