def test_c2st_sre_on_linearGaussian_different_dims(set_seed): """Test whether SRE infers well a simple example with available ground truth. This example has different number of parameters theta than number of x. This test also acts as the only functional test for SRE not marked as slow. Args: set_seed: fixture for manual seeding """ theta_dim = 3 x_dim = 2 discard_dims = theta_dim - x_dim x_o = ones(1, x_dim) num_samples = 1000 likelihood_shift = -1.0 * ones( x_dim) # likelihood_mean will be likelihood_shift+theta likelihood_cov = 0.3 * eye(x_dim) prior_mean = zeros(theta_dim) prior_cov = eye(theta_dim) prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov) target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims( x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov, num_discarded_dims=discard_dims, num_samples=num_samples, ) def simulator(theta): return linear_gaussian(theta, likelihood_shift, likelihood_cov, num_discarded_dims=discard_dims) simulator, prior = prepare_for_sbi(simulator, prior) inference = SRE( prior, classifier="resnet", show_progress_bars=False, ) theta, x = simulate_for_sbi(simulator, prior, 5000, simulation_batch_size=50) _ = inference.append_simulations(theta, x).train() posterior = inference.build_posterior() samples = posterior.sample((num_samples, ), x=x_o, mcmc_parameters={"thin": 3}) # Compute the c2st and assert it is near chance level of 0.5. check_c2st(samples, target_samples, alg="snpe_c")
def test_c2st_snpe_on_linearGaussian_different_dims(set_seed): """Test whether SNPE B/C infer well a simple example with available ground truth. This example has different number of parameters theta than number of x. Also this implicitly tests simulation_batch_size=1. Args: set_seed: fixture for manual seeding """ theta_dim = 3 x_dim = 2 discard_dims = theta_dim - x_dim x_o = zeros(1, x_dim) num_samples = 1000 # likelihood_mean will be likelihood_shift+theta likelihood_shift = -1.0 * ones(x_dim) likelihood_cov = 0.3 * eye(x_dim) prior_mean = zeros(theta_dim) prior_cov = eye(theta_dim) prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov) target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims( x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov, num_discarded_dims=discard_dims, num_samples=num_samples, ) def simulator(theta): return linear_gaussian(theta, likelihood_shift, likelihood_cov, num_discarded_dims=discard_dims) simulator, prior = prepare_for_sbi(simulator, prior) inference = SNPE_C( prior, density_estimator="maf", show_progress_bars=False, ) theta, x = simulate_for_sbi(simulator, prior, 2000, simulation_batch_size=1) # type: ignore _ = inference.append_simulations(theta, x).train() posterior = inference.build_posterior() samples = posterior.sample((num_samples, ), x=x_o) # Compute the c2st and assert it is near chance level of 0.5. check_c2st(samples, target_samples, alg="snpe_c")
def test_c2st_snl_on_linearGaussian_different_dims(set_seed): """Test whether SNL infers well a simple example with available ground truth. This example has different number of parameters theta than number of x. This test also acts as the only functional test for SNL not marked as slow. Args: set_seed: fixture for manual seeding """ device = "cpu" configure_default_device(device) theta_dim = 3 x_dim = 2 discard_dims = theta_dim - x_dim x_o = ones(1, x_dim) num_samples = 1000 # likelihood_mean will be likelihood_shift+theta likelihood_shift = -1.0 * ones(x_dim) likelihood_cov = 0.3 * eye(x_dim) prior_mean = zeros(theta_dim) prior_cov = eye(theta_dim) prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov) target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims( x_o[0], likelihood_shift, likelihood_cov, prior_mean, prior_cov, num_discarded_dims=discard_dims, num_samples=num_samples, ) simulator = lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov, num_discarded_dims=discard_dims) infer = SNL( *prepare_for_sbi(simulator, prior), simulation_batch_size=50, mcmc_method="slice_np", show_progress_bars=False, device=device, ) posterior = infer(num_rounds=1, num_simulations_per_round=5000) # type: ignore samples = posterior.sample((num_samples, ), x=x_o, mcmc_parameters={"thin": 3}) # Compute the c2st and assert it is near chance level of 0.5. check_c2st(samples, target_samples, alg="snle_a")
def test_c2st_snl_on_linearGaussian(): """Test whether SNL infers well a simple example with available ground truth. This example has different number of parameters theta than number of x. This test also acts as the only functional test for SNL not marked as slow. """ theta_dim = 3 x_dim = 2 discard_dims = theta_dim - x_dim x_o = zeros(1, x_dim) num_samples = 1000 num_simulations = 3100 # likelihood_mean will be likelihood_shift+theta likelihood_shift = -1.0 * ones(x_dim) likelihood_cov = 0.3 * eye(x_dim) prior_mean = zeros(theta_dim) prior_cov = eye(theta_dim) prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov) target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims( x_o, likelihood_shift, likelihood_cov, prior_mean, prior_cov, num_discarded_dims=discard_dims, num_samples=num_samples, ) simulator, prior = prepare_for_sbi( lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov, num_discarded_dims=discard_dims), prior, ) density_estimator = likelihood_nn("maf", num_transforms=3) inference = SNLE(density_estimator=density_estimator, show_progress_bars=False) theta, x = simulate_for_sbi(simulator, prior, num_simulations, simulation_batch_size=50) likelihood_estimator = inference.append_simulations(theta, x).train() potential_fn, theta_transform = likelihood_estimator_based_potential( prior=prior, likelihood_estimator=likelihood_estimator, x_o=x_o) posterior = MCMCPosterior( proposal=prior, potential_fn=potential_fn, theta_transform=theta_transform, method="slice_np_vectorized", num_chains=5, thin=10, ) samples = posterior.sample((num_samples, )) # Compute the c2st and assert it is near chance level of 0.5. check_c2st(samples, target_samples, alg="snle_a")
def test_c2st_snpe_on_linearGaussian_different_dims(): """Test whether SNPE B/C infer well a simple example with available ground truth. This example has different number of parameters theta than number of x. Also this implicitly tests simulation_batch_size=1. It also impleictly tests whether the prior can be `None` and whether we can stop and resume training. """ theta_dim = 3 x_dim = 2 discard_dims = theta_dim - x_dim x_o = zeros(1, x_dim) num_samples = 1000 # likelihood_mean will be likelihood_shift+theta likelihood_shift = -1.0 * ones(x_dim) likelihood_cov = 0.3 * eye(x_dim) prior_mean = zeros(theta_dim) prior_cov = eye(theta_dim) prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov) target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims( x_o, likelihood_shift, likelihood_cov, prior_mean, prior_cov, num_discarded_dims=discard_dims, num_samples=num_samples, ) simulator, prior = prepare_for_sbi( lambda theta: linear_gaussian(theta, likelihood_shift, likelihood_cov, num_discarded_dims=discard_dims), prior, ) # Test whether prior can be `None`. inference = SNPE_C(prior=None, density_estimator="maf", show_progress_bars=False) # type: ignore theta, x = simulate_for_sbi(simulator, prior, 2000, simulation_batch_size=1) inference = inference.append_simulations(theta, x) posterior_estimator = inference.train( max_num_epochs=10) # Test whether we can stop and resume. posterior_estimator = inference.train(resume_training=True, force_first_round_loss=True) posterior = DirectPosterior( prior=prior, posterior_estimator=posterior_estimator).set_default_x(x_o) samples = posterior.sample((num_samples, )) # Compute the c2st and assert it is near chance level of 0.5. check_c2st(samples, target_samples, alg="snpe_c")
def test_c2st_sre_on_linearGaussian(): """Test whether SRE infers well a simple example with available ground truth. This example has different number of parameters theta than number of x. This test also acts as the only functional test for SRE not marked as slow. """ theta_dim = 3 x_dim = 2 discard_dims = theta_dim - x_dim num_samples = 1000 num_simulations = 2100 likelihood_shift = -1.0 * ones( x_dim ) # likelihood_mean will be likelihood_shift+theta likelihood_cov = 0.3 * eye(x_dim) prior_mean = zeros(theta_dim) prior_cov = eye(theta_dim) prior = MultivariateNormal(loc=prior_mean, covariance_matrix=prior_cov) simulator, prior = prepare_for_sbi( lambda theta: linear_gaussian( theta, likelihood_shift, likelihood_cov, num_discarded_dims=discard_dims ), prior, ) inference = SNRE_B( classifier="resnet", show_progress_bars=False, ) theta, x = simulate_for_sbi( simulator, prior, num_simulations, simulation_batch_size=100 ) ratio_estimator = inference.append_simulations(theta, x).train() num_trials = 1 x_o = zeros(num_trials, x_dim) target_samples = samples_true_posterior_linear_gaussian_mvn_prior_different_dims( x_o, likelihood_shift, likelihood_cov, prior_mean, prior_cov, num_discarded_dims=discard_dims, num_samples=num_samples, ) potential_fn, theta_transform = ratio_estimator_based_potential( ratio_estimator=ratio_estimator, prior=prior, x_o=x_o ) posterior = MCMCPosterior( potential_fn=potential_fn, theta_transform=theta_transform, proposal=prior, thin=5, num_chains=2, ) samples = posterior.sample((num_samples,)) # Compute the c2st and assert it is near chance level of 0.5. check_c2st(samples, target_samples, alg=f"snre-{num_trials}trials")