Example #1
0
def test_api_sre_on_linearGaussian(num_dim: int):
    """Test inference API of SRE with linear Gaussian model.

    Avoids intense computation for fast testing of API etc.

    Args:
        num_dim: parameter dimension of the Gaussian model
    """

    x_o = zeros(num_dim)
    prior = MultivariateNormal(loc=zeros(num_dim),
                               covariance_matrix=eye(num_dim))

    infer = SRE(
        *prepare_for_sbi(diagonal_linear_gaussian, prior),
        classifier="resnet",
        simulation_batch_size=50,
        mcmc_method="slice_np",
        show_progress_bars=False,
    )

    posterior = infer(num_rounds=1,
                      num_simulations_per_round=1000,
                      max_num_epochs=5)

    posterior.sample(sample_shape=(10, ),
                     x=x_o,
                     mcmc_parameters={"num_chains": 2})
Example #2
0
def test_log_prob_with_different_x(snpe_method: type):

    num_dim = 2

    prior = MultivariateNormal(loc=zeros(num_dim),
                               covariance_matrix=eye(num_dim))
    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = snpe_method(prior)
    theta, x = simulate_for_sbi(simulator, prior, 1000)
    _ = inference.append_simulations(theta, x).train()
    posterior = inference.build_posterior()

    _ = posterior.sample((10, ), x=ones(1, num_dim))
    theta = posterior.sample((10, ), ones(1, num_dim))
    posterior.log_prob(theta, x=ones(num_dim))
    posterior.log_prob(theta, x=ones(num_dim))
    posterior.log_prob(theta, x=ones(1, num_dim))
    posterior = posterior.set_default_x(ones(1, num_dim))
    posterior.log_prob(theta, x=None)
    posterior.sample((10, ), x=None)

    # Both must fail due to batch size of x > 1.
    with pytest.raises(ValueError):
        posterior.log_prob(theta, x=ones(2, num_dim))
    with pytest.raises(ValueError):
        posterior.sample(2, x=ones(2, num_dim))
Example #3
0
def test_api_sre_sampling_methods(mcmc_method: str, prior_str: str, set_seed):
    """Test leakage correction both for MCMC and rejection sampling.

    Args:
        mcmc_method: which mcmc method to use for sampling
        prior_str: one of "gaussian" or "uniform"
        set_seed: fixture for manual seeding
    """

    num_dim = 2
    x_o = zeros(num_dim)
    if prior_str == "gaussian":
        prior = MultivariateNormal(loc=zeros(num_dim),
                                   covariance_matrix=eye(num_dim))
    else:
        prior = utils.BoxUniform(low=-1.0 * ones(num_dim), high=ones(num_dim))

    infer = SRE(
        *prepare_for_sbi(diagonal_linear_gaussian, prior),
        classifier="resnet",
        simulation_batch_size=50,
        mcmc_method=mcmc_method,
        show_progress_bars=False,
    )

    posterior = infer(num_rounds=1,
                      num_simulations_per_round=200,
                      max_num_epochs=5)

    posterior.sample(sample_shape=(10, ), x=x_o)
Example #4
0
def test_log_prob_with_different_x(snpe_method: type, x_o_batch_dim: bool):

    num_dim = 2

    prior = MultivariateNormal(loc=zeros(num_dim),
                               covariance_matrix=eye(num_dim))
    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = snpe_method(prior=prior)
    theta, x = simulate_for_sbi(simulator, prior, 1000)
    posterior_estimator = inference.append_simulations(
        theta, x).train(max_num_epochs=3)

    if x_o_batch_dim == 0:
        x_o = ones(num_dim)
    elif x_o_batch_dim == 1:
        x_o = ones(1, num_dim)
    elif x_o_batch_dim == 2:
        x_o = ones(2, num_dim)
    else:
        raise NotImplementedError

    posterior = DirectPosterior(posterior_estimator=posterior_estimator,
                                prior=prior).set_default_x(x_o)
    samples = posterior.sample((10, ))
    _ = posterior.log_prob(samples)
Example #5
0
def example_posterior():
    """Return an inferred `NeuralPosterior` for interactive examination."""
    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        show_progress_bars=False,
    )
    theta, x = simulate_for_sbi(simulator,
                                prior,
                                1000,
                                simulation_batch_size=10,
                                num_workers=6)
    _ = inference.append_simulations(theta, x).train()
    return inference.build_posterior().set_default_x(x_o)
Example #6
0
def test_api_sre_on_linearGaussian(num_dim: int):
    """Test inference API of SRE with linear Gaussian model.

    Avoids intense computation for fast testing of API etc.

    Args:
        num_dim: parameter dimension of the Gaussian model
    """

    prior = MultivariateNormal(loc=zeros(num_dim), covariance_matrix=eye(num_dim))

    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = SNRE_B(
        classifier="resnet",
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator, prior, 1000, simulation_batch_size=50)
    ratio_estimator = inference.append_simulations(theta, x).train(max_num_epochs=5)

    for num_trials in [1, 2]:
        x_o = zeros(num_trials, num_dim)
        potential_fn, theta_transform = ratio_estimator_based_potential(
            ratio_estimator=ratio_estimator, prior=prior, x_o=x_o
        )
        posterior = MCMCPosterior(
            potential_fn=potential_fn,
            theta_transform=theta_transform,
            proposal=prior,
            num_chains=2,
        )
        posterior.sample(sample_shape=(10,))
Example #7
0
def test_mdn_with_1D_uniform_prior():
    """
    Note, we have this test because for 1D uniform priors, mdn log prob evaluation
    results in batch_size x batch_size return. This is probably because Uniform does
    not allow for event dimension > 1 and somewhere in pyknos it is used as if this was
    possible.
    Casting to BoxUniform solves it.
    """
    num_dim = 1
    x_o = torch.tensor([[1.0]])
    num_samples = 100

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior = Uniform(low=torch.zeros(num_dim), high=torch.ones(num_dim))

    def simulator(theta: Tensor) -> Tensor:
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE(prior, density_estimator="mdn")

    theta, x = simulate_for_sbi(simulator, prior, 100)
    _ = inference.append_simulations(theta, x).train(training_batch_size=50)
    posterior = inference.build_posterior().set_default_x(x_o)
    samples = posterior.sample((num_samples,))
    log_probs = posterior.log_prob(samples)

    assert log_probs.shape == torch.Size([num_samples])
Example #8
0
def test_api_snl_on_linearGaussian(num_dim: int, set_seed):
    """Test API for inference on linear Gaussian model using SNL.

    Avoids expensive computations by training on few simulations and generating few
    posterior samples.

    Args:
        num_dim: parameter dimension of the gaussian model
    """
    num_samples = 10
    x_o = zeros(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = SNL(
        prior,
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                1000,
                                simulation_batch_size=50)
    _ = inference.append_simulations(theta, x).train(max_num_epochs=5)
    posterior = inference.build_posterior().set_default_x(x_o)

    posterior.sample(sample_shape=(num_samples, ),
                     x=x_o,
                     mcmc_parameters={"thin": 3})
def test_api_sre_on_linearGaussian(num_dim: int):
    """Test inference API of SRE with linear Gaussian model.

    Avoids intense computation for fast testing of API etc.

    Args:
        num_dim: parameter dimension of the Gaussian model
    """

    prior = MultivariateNormal(loc=zeros(num_dim), covariance_matrix=eye(num_dim))

    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = SNRE_B(
        prior,
        classifier="resnet",
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator, prior, 1000, simulation_batch_size=50)
    _ = inference.append_simulations(theta, x).train(max_num_epochs=5)
    posterior = inference.build_posterior()

    for num_trials in [1, 2]:
        x_o = zeros(num_trials, num_dim)
        posterior.sample(sample_shape=(10,), x=x_o, mcmc_parameters={"num_chains": 2})
Example #10
0
def mdn_inference_with_different_methods(method, set_seed):

    num_dim = 2
    x_o = torch.tensor([[1.0, 0.0]])
    num_samples = 500
    num_simulations = 2000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov
    )
    target_samples = gt_posterior.sample((num_samples,))

    def simulator(theta: Tensor) -> Tensor:
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = method(prior, density_estimator="mdn")

    theta, x = simulate_for_sbi(simulator, prior, num_simulations)
    _ = inference.append_simulations(theta, x).train(training_batch_size=50)
    posterior = inference.build_posterior().set_default_x(x_o)

    samples = posterior.sample((num_samples,))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg=f"{method}")
Example #11
0
def test_api_snl_sampling_methods(mcmc_method: str, prior_str: str, set_seed):
    """Runs SNL on linear Gaussian and tests sampling from posterior via mcmc.

    Args:
        mcmc_method: which mcmc method to use for sampling
        prior_str: use gaussian or uniform prior
        set_seed: fixture for manual seeding
    """

    num_dim = 2
    num_samples = 10
    x_o = zeros((1, num_dim))

    if prior_str == "gaussian":
        prior = MultivariateNormal(loc=zeros(num_dim),
                                   covariance_matrix=eye(num_dim))
    else:
        prior = utils.BoxUniform(-1.0 * ones(num_dim), ones(num_dim))

    infer = SNL(
        *prepare_for_sbi(diagonal_linear_gaussian, prior),
        simulation_batch_size=50,
        mcmc_method="slice_np",
        show_progress_bars=False,
    )

    posterior = infer(num_rounds=1,
                      num_simulations_per_round=200,
                      max_num_epochs=5)

    posterior.sample(sample_shape=(num_samples, ),
                     x=x_o,
                     mcmc_parameters={"thin": 3})
Example #12
0
def example_posterior():
    """Return an inferred `NeuralPosterior` for interactive examination."""
    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    infer = SNPE_C(
        *prepare_for_sbi(simulator, prior),
        simulation_batch_size=10,
        num_workers=6,
        show_progress_bars=False,
        sample_with_mcmc=False,
    )

    return infer(num_simulations=1000).set_default_x(x_o)
Example #13
0
def test_api_snl_on_linearGaussian(num_dim: int, set_seed):
    """Test API for inference on linear Gaussian model using SNL.

    Avoids expensive computations by training on few simulations and generating few
    posterior samples.

    Args:
        num_dim: parameter dimension of the gaussian model
    """
    num_samples = 10
    x_o = zeros(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    infer = SNL(
        *prepare_for_sbi(diagonal_linear_gaussian, prior),
        simulation_batch_size=50,
        mcmc_method="slice_np",
        show_progress_bars=False,
    )

    posterior = infer(num_rounds=1,
                      num_simulations_per_round=1000,
                      max_num_epochs=5)

    posterior.sample(sample_shape=(num_samples, ),
                     x=x_o,
                     mcmc_parameters={"thin": 3})
Example #14
0
def test_example_posterior(snpe_method: type):
    """Return an inferred `NeuralPosterior` for interactive examination."""
    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    if snpe_method == SNPE_A:
        extra_kwargs = dict(final_round=True)
    else:
        extra_kwargs = dict()

    simulator, prior = prepare_for_sbi(
        lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov),
        prior)
    inference = snpe_method(prior, show_progress_bars=False, **extra_kwargs)
    theta, x = simulate_for_sbi(simulator,
                                prior,
                                1000,
                                simulation_batch_size=10,
                                num_workers=6)
    _ = inference.append_simulations(theta, x).train()

    posterior = inference.build_posterior().set_default_x(x_o)
    assert posterior is not None
Example #15
0
def run_snpe(total_runs=10, num_generation=6, seed=46, nde='maf'):
    torch.manual_seed(seed)
    num_workers = 16 #for parallel execution of simulations
    Ndata = 3000
    use_embedding_net = True
    use_mcmc = False #becomes very slow, but can handle leakage
    result_posterior = []
    store_time = []

    prior = utils.BoxUniform(low=torch.tensor([-2.0,-1.0]), 
                         high=torch.tensor([2.0,1.0]))

    simulator_sbi, prior = prepare_for_sbi(simulator, prior)
    
    x_o = np.load("target_ts.npy")
    x_o = torch.tensor(x_o)
    x_o = x_o.reshape((1,100))

    #NN for summary statistic 
    embedding_net = SummaryNet()
    
    for run in range(total_runs):
        print(f"staring run {run}")

        theta_store = []
        time_ticks = []
        posteriors = []
        proposal = prior

        if use_embedding_net:
            neural_posterior = utils.posterior_nn(model=nde, 
                                                embedding_net=embedding_net,
                                                hidden_features=10,
                                                num_transforms=2)
        else:
            neural_posterior = utils.posterior_nn(model=nde, 
                                                hidden_features=10,
                                                num_transforms=2)
        
        inference = SNPE_C(prior=prior, density_estimator=neural_posterior)
    
        for i in range(num_generation):
            print(f"staring round {i}")
            time_begin = time.time()
            theta, x = simulate_for_sbi(simulator_sbi, proposal, num_simulations=Ndata, num_workers=num_workers)
            
            density_estimator = inference.append_simulations(theta, x, proposal=proposal).train()
            posterior = inference.build_posterior(density_estimator, sample_with_mcmc=use_mcmc)
            print("building post done")
            posteriors.append(posterior)
            proposal = posterior.set_default_x(x_o)

            posterior_samples = posterior.sample((Ndata,), x=x_o).numpy()
            print("Post samples done")
            theta_store.append(posterior_samples)
            time_ticks.append(time.time() - time_begin)
        
        result_posterior.append(theta_store)
        store_time.append(time_ticks)
    return np.asarray(result_posterior), np.asarray(store_time), posteriors
def test_c2st_sre_on_linearGaussian_different_dims(set_seed):
    """Test whether SRE infers well a simple example with available ground truth.

    This example has different number of parameters theta than number of x. This test
    also acts as the only functional test for SRE not marked as slow.

    Args:
        set_seed: fixture for manual seeding
    """

    theta_dim = 3
    x_dim = 2
    discard_dims = theta_dim - x_dim

    x_o = ones(1, x_dim)
    num_samples = 1000

    likelihood_shift = -1.0 * ones(
        x_dim)  # likelihood_mean will be likelihood_shift+theta
    likelihood_cov = 0.3 * eye(x_dim)

    prior_mean = zeros(theta_dim)
    prior_cov = eye(theta_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims(
        x_o[0],
        likelihood_shift,
        likelihood_cov,
        prior_mean,
        prior_cov,
        num_discarded_dims=discard_dims,
        num_samples=num_samples,
    )

    def simulator(theta):
        return linear_gaussian(theta,
                               likelihood_shift,
                               likelihood_cov,
                               num_discarded_dims=discard_dims)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SRE(
        prior,
        classifier="resnet",
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                5000,
                                simulation_batch_size=50)
    _ = inference.append_simulations(theta, x).train()
    posterior = inference.build_posterior()
    samples = posterior.sample((num_samples, ),
                               x=x_o,
                               mcmc_parameters={"thin": 3})

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snpe_c")
Example #17
0
def test_c2st_snl_on_linearGaussian(num_dim: int, prior_str: str, set_seed):
    """Test SNL on linear Gaussian, comparing to ground truth posterior via c2st.

    Args:
        num_dim: parameter dimension of the gaussian model
        prior_str: one of "gaussian" or "uniform"
        set_seed: fixture for manual seeding
    """

    x_o = zeros((1, num_dim))
    num_samples = 500

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    if prior_str == "gaussian":
        prior_mean = zeros(num_dim)
        prior_cov = eye(num_dim)
        prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
        gt_posterior = true_posterior_linear_gaussian_mvn_prior(
            x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
        target_samples = gt_posterior.sample((num_samples, ))
    else:
        prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))
        target_samples = samples_true_posterior_linear_gaussian_uniform_prior(
            x_o,
            likelihood_shift,
            likelihood_cov,
            prior=prior,
            num_samples=num_samples)

    simulator = lambda theta: linear_gaussian(theta, likelihood_shift,
                                              likelihood_cov)

    infer = SNL(
        *prepare_for_sbi(simulator, prior),
        mcmc_method="slice_np",
        show_progress_bars=False,
    )

    posterior = infer(num_rounds=1,
                      num_simulations_per_round=1000).set_default_x(x_o)

    samples = posterior.sample(sample_shape=(num_samples, ),
                               mcmc_parameters={"thin": 3})

    # Check performance based on c2st accuracy.
    check_c2st(samples, target_samples, alg=f"snle_a-{prior_str}-prior")

    # TODO: we do not have a test for SNL log_prob(). This is because the output
    # TODO: density is not normalized, so KLd does not make sense.
    if prior_str == "uniform":
        # Check whether the returned probability outside of the support is zero.
        posterior_prob = get_prob_outside_uniform_prior(posterior, num_dim)
        assert (
            posterior_prob == 0.0
        ), "The posterior probability outside of the prior support is not zero"
Example #18
0
def test_c2st_multi_round_snl_on_linearGaussian(num_trials: int):
    """Test SNL on linear Gaussian, comparing to ground truth posterior via c2st."""

    num_dim = 2
    x_o = zeros((num_trials, num_dim))
    num_samples = 500
    num_simulations_per_round = 600 * num_trials

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o, likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    target_samples = gt_posterior.sample((num_samples, ))

    simulator, prior = prepare_for_sbi(
        lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov),
        prior)
    inference = SNLE(show_progress_bars=False)

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                num_simulations_per_round,
                                simulation_batch_size=50)
    likelihood_estimator = inference.append_simulations(theta, x).train()
    potential_fn, theta_transform = likelihood_estimator_based_potential(
        prior=prior, likelihood_estimator=likelihood_estimator, x_o=x_o)
    posterior1 = MCMCPosterior(
        proposal=prior,
        potential_fn=potential_fn,
        theta_transform=theta_transform,
        thin=5,
        num_chains=20,
    )

    theta, x = simulate_for_sbi(simulator,
                                posterior1,
                                num_simulations_per_round,
                                simulation_batch_size=50)
    likelihood_estimator = inference.append_simulations(theta, x).train()
    potential_fn, theta_transform = likelihood_estimator_based_potential(
        prior=prior, likelihood_estimator=likelihood_estimator, x_o=x_o)
    posterior = MCMCPosterior(
        proposal=prior,
        potential_fn=potential_fn,
        theta_transform=theta_transform,
        thin=5,
        num_chains=20,
    )

    samples = posterior.sample(sample_shape=(num_samples, ))

    # Check performance based on c2st accuracy.
    check_c2st(samples, target_samples, alg="multi-round-snl")
Example #19
0
def test_c2st_multi_round_snpe_on_linearGaussian(method_str: str, set_seed):
    """Test whether SNPE B/C infer well a simple example with available ground truth.

    Args:
        set_seed: fixture for manual seeding.
    """

    num_dim = 2
    x_o = zeros((1, num_dim))
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)

    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    target_samples = gt_posterior.sample((num_samples, ))

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    if method_str == "snpe_c_non_atomic":
        density_estimator = utils.posterior_nn("mdn", num_components=5)
        method_str = "snpe_c"
    else:
        density_estimator = "maf"

    simulator, prior = prepare_for_sbi(simulator, prior)
    creation_args = dict(
        simulator=simulator,
        prior=prior,
        density_estimator=density_estimator,
        show_progress_bars=False,
    )

    if method_str == "snpe_b":
        infer = SNPE_B(simulation_batch_size=10, **creation_args)
        posterior1 = infer(num_simulations=1000)
        posterior1.set_default_x(x_o)
        posterior = infer(num_simulations=1000, proposal=posterior1)
    elif method_str == "snpe_c":
        infer = SNPE_C(simulation_batch_size=50,
                       sample_with_mcmc=False,
                       **creation_args)
        posterior = infer(num_simulations=500, num_atoms=10).set_default_x(x_o)
        posterior = infer(num_simulations=1000,
                          num_atoms=10,
                          proposal=posterior).set_default_x(x_o)

    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg=method_str)
Example #20
0
def test_c2st_snpe_on_linearGaussian_different_dims(set_seed):
    """Test whether SNPE B/C infer well a simple example with available ground truth.

    This example has different number of parameters theta than number of x. Also
    this implicitly tests simulation_batch_size=1.

    Args:
        set_seed: fixture for manual seeding
    """

    theta_dim = 3
    x_dim = 2
    discard_dims = theta_dim - x_dim

    x_o = zeros(1, x_dim)
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(x_dim)
    likelihood_cov = 0.3 * eye(x_dim)

    prior_mean = zeros(theta_dim)
    prior_cov = eye(theta_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims(
        x_o[0],
        likelihood_shift,
        likelihood_cov,
        prior_mean,
        prior_cov,
        num_discarded_dims=discard_dims,
        num_samples=num_samples,
    )

    def simulator(theta):
        return linear_gaussian(theta,
                               likelihood_shift,
                               likelihood_cov,
                               num_discarded_dims=discard_dims)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        density_estimator="maf",
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                2000,
                                simulation_batch_size=1)  # type: ignore
    _ = inference.append_simulations(theta, x).train()
    posterior = inference.build_posterior()
    samples = posterior.sample((num_samples, ), x=x_o)

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snpe_c")
Example #21
0
def test_c2st_snl_on_linearGaussian_different_dims(set_seed):
    """Test whether SNL infers well a simple example with available ground truth.

    This example has different number of parameters theta than number of x. This test
    also acts as the only functional test for SNL not marked as slow.

    Args:
        set_seed: fixture for manual seeding
    """

    device = "cpu"
    configure_default_device(device)
    theta_dim = 3
    x_dim = 2
    discard_dims = theta_dim - x_dim

    x_o = ones(1, x_dim)
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(x_dim)
    likelihood_cov = 0.3 * eye(x_dim)

    prior_mean = zeros(theta_dim)
    prior_cov = eye(theta_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims(
        x_o[0],
        likelihood_shift,
        likelihood_cov,
        prior_mean,
        prior_cov,
        num_discarded_dims=discard_dims,
        num_samples=num_samples,
    )

    simulator = lambda theta: linear_gaussian(theta,
                                              likelihood_shift,
                                              likelihood_cov,
                                              num_discarded_dims=discard_dims)

    infer = SNL(
        *prepare_for_sbi(simulator, prior),
        simulation_batch_size=50,
        mcmc_method="slice_np",
        show_progress_bars=False,
        device=device,
    )

    posterior = infer(num_rounds=1,
                      num_simulations_per_round=5000)  # type: ignore
    samples = posterior.sample((num_samples, ),
                               x=x_o,
                               mcmc_parameters={"thin": 3})

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snle_a")
Example #22
0
def test_c2st_multi_round_snl_on_linearGaussian(set_seed):
    """Test SNL on linear Gaussian, comparing to ground truth posterior via c2st.

    Args:
        set_seed: fixture for manual seeding
    """

    num_dim = 2
    x_o = zeros((1, num_dim))
    num_samples = 500

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    target_samples = gt_posterior.sample((num_samples, ))

    simulator = lambda theta: linear_gaussian(theta, likelihood_shift,
                                              likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNL(
        prior,
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                750,
                                simulation_batch_size=50)
    _ = inference.append_simulations(theta, x).train()
    posterior1 = inference.build_posterior(mcmc_method="slice_np_vectorized",
                                           mcmc_parameters={
                                               "thin": 5,
                                               "num_chains": 20
                                           }).set_default_x(x_o)

    theta, x = simulate_for_sbi(simulator,
                                posterior1,
                                750,
                                simulation_batch_size=50)
    _ = inference.append_simulations(theta, x).train()
    posterior = inference.build_posterior().copy_hyperparameters_from(
        posterior1)

    samples = posterior.sample(sample_shape=(num_samples, ),
                               mcmc_parameters={"thin": 3})

    # Check performance based on c2st accuracy.
    check_c2st(samples, target_samples, alg="multi-round-snl")
def test_inference_with_restriction_estimator():

    # likelihood_mean will be likelihood_shift+theta
    num_dim = 3
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)
    x_o = zeros(1, num_dim)
    num_samples = 500

    def linear_gaussian_nan(theta,
                            likelihood_shift=likelihood_shift,
                            likelihood_cov=likelihood_cov):
        condition = theta[:, 0] < 0.0
        x = linear_gaussian(theta, likelihood_shift, likelihood_cov)
        x[condition] = float("nan")

        return x

    prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))
    target_samples = samples_true_posterior_linear_gaussian_uniform_prior(
        x_o,
        likelihood_shift=likelihood_shift,
        likelihood_cov=likelihood_cov,
        num_samples=num_samples,
        prior=prior,
    )

    simulator, prior = prepare_for_sbi(linear_gaussian_nan, prior)
    restriction_estimator = RestrictionEstimator(prior=prior)
    proposals = [prior]
    num_rounds = 2

    for r in range(num_rounds):
        theta, x = simulate_for_sbi(simulator, proposals[-1], 1000)
        restriction_estimator.append_simulations(theta, x)
        if r < num_rounds - 1:
            _ = restriction_estimator.train()
        proposals.append(restriction_estimator.restrict_prior())

    all_theta, all_x, _ = restriction_estimator.get_simulations()

    # Any method can be used in combination with the `RejectionEstimator`.
    inference = SNPE_C(prior=prior)
    posterior_estimator = inference.append_simulations(all_theta,
                                                       all_x).train()

    # Build posterior.
    posterior = DirectPosterior(
        prior=prior,
        posterior_estimator=posterior_estimator).set_default_x(x_o)

    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg=f"{SNPE_C}")
Example #24
0
def test_c2st_snle_external_data_on_linearGaussian(set_seed):
    """Test whether SNPE C infers well a simple example with available ground truth.

    Args:
        set_seed: fixture for manual seeding
    """

    num_dim = 2

    device = "cpu"
    configure_default_device(device)
    x_o = zeros(1, num_dim)
    num_samples = 1000

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    prior_mean = zeros(num_dim)
    prior_cov = eye(num_dim)
    prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    gt_posterior = true_posterior_linear_gaussian_mvn_prior(
        x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov)
    target_samples = gt_posterior.sample((num_samples, ))

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    infer = SNL(
        *prepare_for_sbi(simulator, prior),
        simulation_batch_size=1000,
        show_progress_bars=False,
        device=device,
    )

    external_theta = prior.sample((1000, ))
    external_x = simulator(external_theta)

    infer.provide_presimulated(external_theta, external_x)

    posterior = infer(
        num_rounds=1,
        num_simulations_per_round=1000,
        training_batch_size=100,
    ).set_default_x(x_o)
    samples = posterior.sample((num_samples, ))

    # Compute the c2st and assert it is near chance level of 0.5.
    check_c2st(samples, target_samples, alg="snpe_c")
def test_api_snl_sampling_methods(sampling_method: str, prior_str: str,
                                  set_seed):
    """Runs SNL on linear Gaussian and tests sampling from posterior via mcmc.

    Args:
        mcmc_method: which mcmc method to use for sampling
        prior_str: use gaussian or uniform prior
        set_seed: fixture for manual seeding
    """

    num_dim = 2
    num_samples = 10
    num_trials = 2
    # HMC with uniform prior needs good likelihood.
    num_simulations = 10000 if sampling_method == "hmc" else 1000
    x_o = zeros((num_trials, num_dim))
    # Test for multiple chains is cheap when vectorized.
    num_chains = 3 if sampling_method == "slice_np_vectorized" else 1
    if sampling_method == "rejection":
        sample_with = "rejection"
    else:
        sample_with = "mcmc"

    if prior_str == "gaussian":
        prior = MultivariateNormal(loc=zeros(num_dim),
                                   covariance_matrix=eye(num_dim))
    else:
        prior = utils.BoxUniform(-1.0 * ones(num_dim), ones(num_dim))

    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = SNL(prior, show_progress_bars=False)

    theta, x = simulate_for_sbi(simulator,
                                prior,
                                num_simulations,
                                simulation_batch_size=50)
    _ = inference.append_simulations(theta, x).train(max_num_epochs=5)
    posterior = inference.build_posterior(
        sample_with=sample_with,
        mcmc_method=sampling_method).set_default_x(x_o)

    posterior.sample(
        sample_shape=(num_samples, ),
        x=x_o,
        mcmc_parameters={
            "thin": 3,
            "num_chains": num_chains
        },
    )
Example #26
0
def test_api_snpe_c_posterior_correction(
    sample_with_mcmc, mcmc_method, prior_str, set_seed
):
    """Test that leakage correction applied to sampling works, with both MCMC and
    rejection.

    Args:
        set_seed: fixture for manual seeding
    """

    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    if prior_str == "gaussian":
        prior_mean = zeros(num_dim)
        prior_cov = eye(num_dim)
        prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    else:
        prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))

    def simulator(theta):
        return linear_gaussian(theta, likelihood_shift, likelihood_cov)

    simulator, prior = prepare_for_sbi(simulator, prior)
    inference = SNPE_C(
        prior,
        density_estimator="maf",
        simulation_batch_size=50,
        sample_with_mcmc=sample_with_mcmc,
        mcmc_method=mcmc_method,
        show_progress_bars=False,
    )

    theta, x = simulate_for_sbi(simulator, prior, 1000)
    _ = inference.append_simulations(theta, x).train(max_num_epochs=5)
    posterior = inference.build_posterior()
    posterior = posterior.set_sample_with_mcmc(sample_with_mcmc).set_mcmc_method(
        mcmc_method
    )

    # Posterior should be corrected for leakage even if num_rounds just 1.
    samples = posterior.sample((10,), x=x_o)

    # Evaluate the samples to check correction factor.
    posterior.log_prob(samples, x=x_o)
Example #27
0
def flexible():
    num_dim = 3
    x_o = torch.ones(1, num_dim)
    prior_mean = torch.zeros(num_dim)
    prior_cov = torch.eye(num_dim)

    # flexible interface
    prior = torch.distributions.MultivariateNormal(loc=prior_mean,
                                                   covariance_matrix=prior_cov)
    simulator, prior = prepare_for_sbi(diagonal_linear_gaussian, prior)
    inference = SNPE(simulator, prior)
    posterior = inference(num_simulations=500)
    posterior.sample((100, ), x=x_o)

    return posterior
Example #28
0
def test_api_snpe_c_posterior_correction(sample_with, mcmc_method, prior_str):
    """Test that leakage correction applied to sampling works, with both MCMC and
    rejection.

    """

    num_dim = 2
    x_o = zeros(1, num_dim)

    # likelihood_mean will be likelihood_shift+theta
    likelihood_shift = -1.0 * ones(num_dim)
    likelihood_cov = 0.3 * eye(num_dim)

    if prior_str == "gaussian":
        prior_mean = zeros(num_dim)
        prior_cov = eye(num_dim)
        prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov)
    else:
        prior = utils.BoxUniform(-2.0 * ones(num_dim), 2.0 * ones(num_dim))

    simulator, prior = prepare_for_sbi(
        lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov),
        prior)
    inference = SNPE_C(prior, show_progress_bars=False)

    theta, x = simulate_for_sbi(simulator, prior, 1000)
    posterior_estimator = inference.append_simulations(theta, x).train()
    potential_fn, theta_transform = posterior_estimator_based_potential(
        posterior_estimator, prior, x_o)
    if sample_with == "mcmc":
        posterior = MCMCPosterior(
            potential_fn=potential_fn,
            theta_transform=theta_transform,
            proposal=prior,
            method=mcmc_method,
        )
    elif sample_with == "rejection":
        posterior = RejectionPosterior(
            potential_fn=potential_fn,
            proposal=prior,
            theta_transform=theta_transform,
        )

    # Posterior should be corrected for leakage even if num_rounds just 1.
    samples = posterior.sample((10, ))

    # Evaluate the samples to check correction factor.
    _ = posterior.log_prob(samples)
def test_inference_with_2d_x(embedding, method):

    num_dim = 2
    num_samples = 10
    num_simulations = 100

    prior = utils.BoxUniform(zeros(num_dim), torch.ones(num_dim))

    simulator, prior = prepare_for_sbi(simulator_2d, prior)

    theta_o = torch.ones(1, num_dim)

    if method == SNPE:
        net_provider = utils.posterior_nn(
            model="mdn",
            embedding_net=embedding(),
        )
        sample_kwargs = {"sample_with_mcmc": True}
        num_trials = 1
    elif method == SNLE:
        net_provider = utils.likelihood_nn(model="mdn",
                                           embedding_net=embedding())
        sample_kwargs = {}
        num_trials = 2
    else:
        net_provider = utils.classifier_nn(
            model="mlp",
            embedding_net_x=embedding(),
        )
        sample_kwargs = {
            "mcmc_method": "slice_np_vectorized",
            "mcmc_parameters": {
                "num_chains": 2
            },
        }
        num_trials = 2

    inference = method(prior, net_provider, show_progress_bars=False)
    theta, x = simulate_for_sbi(simulator, prior, num_simulations)
    _ = inference.append_simulations(theta, x).train(training_batch_size=100,
                                                     max_num_epochs=10)
    x_o = simulator(theta_o.repeat(num_trials, 1))
    posterior = inference.build_posterior(**sample_kwargs).set_default_x(x_o)

    posterior.log_prob(
        posterior.sample((num_samples, ), show_progress_bars=False))
Example #30
0
def test_inference_with_2d_x(embedding, method):

    num_dim = 2
    num_samples = 10
    num_simulations = 100

    prior = utils.BoxUniform(zeros(num_dim), torch.ones(num_dim))

    simulator, prior = prepare_for_sbi(simulator_2d, prior)

    theta_o = torch.ones(1, num_dim)
    x_o = simulator(theta_o)

    if method == SNPE:
        kwargs = dict(
            density_estimator=utils.posterior_nn(
                model="mdn",
                embedding_net=embedding(),
            ),
            sample_with_mcmc=True,
        )
    elif method == SNLE:
        kwargs = dict(density_estimator=utils.likelihood_nn(
            model="mdn", embedding_net=embedding()))
    else:
        kwargs = dict(density_estimator=utils.classifier_nn(
            model="mlp",
            embedding_net_x=embedding(),
        ))

    infer = method(
        simulator,
        prior,
        1,
        1,
        show_progress_bars=False,
        mcmc_method="slice_np",
        **kwargs,
    )

    posterior = infer(num_simulations=num_simulations,
                      training_batch_size=100,
                      max_num_epochs=10).set_default_x(x_o)

    posterior.log_prob(
        posterior.sample((num_samples, ), show_progress_bars=False))