def test_sample(self): # setup backend dummy = BackendDummy() # define a uniform prior distribution lb = np.array([-5, 0]) ub = np.array([5, 10]) prior = Uniform(lb, ub, seed=1) # define a Gaussian model model = Gaussian(prior, mu=2.1, sigma=5.0, seed=1) # define sufficient statistics for the model stat_calc = Identity(degree=2, cross=0) # define a distance function dist_calc = Euclidean(stat_calc) # create fake observed data y_obs = model.simulate(1) # use the rejection sampling scheme sampler = RejectionABC(model, dist_calc, dummy, seed=1) journal = sampler.sample(y_obs, 10, 1, 0.1) samples = journal.get_parameters() # test shape of samples samples_shape = np.shape(samples) self.assertEqual(samples_shape, (10, 2)) # Compute posterior mean self.assertEqual((np.average(np.asarray( samples[:, 0])), np.average(np.asarray(samples[:, 1]))), (1.6818856447333246, 8.4384177826766518))
class PenLogRegTests(unittest.TestCase): def setUp(self): self.prior = Uniform([-5.0, 5.0], [5.0, 10.0], seed=1) self.model = Gaussian(self.prior, mu=1.1, sigma=1.0, seed=1) self.stat_calc = Identity(degree=2, cross=0) self.likfun = PenLogReg(self.stat_calc, self.model, n_simulate=100, n_folds=10) def test_likelihood(self): #Checks whether wrong input type produces error message self.assertRaises(TypeError, self.likfun.likelihood, 3.4, [2, 1]) self.assertRaises(TypeError, self.likfun.likelihood, [2, 4], 3.4) # create observed data y_obs = self.model.simulate(1) # create fake simulated data self.model.set_parameters(np.array([1.1, 1.0])) y_sim = self.model.simulate(100) comp_likelihood = self.likfun.likelihood(y_obs, y_sim) expected_likelihood = 4.39965563272 # This checks whether it computes a correct value and dimension is right self.assertLess(comp_likelihood - expected_likelihood, 10e-2)
class SemiautomaticTests(unittest.TestCase): def setUp(self): self.stat_calc = Identity(degree = 1, cross = 0) # define prior and model prior = Uniform([150, 5],[200, 25]) self.model = Gaussian(prior, seed = 1) # define backend self.backend = Backend() # define statistics self.statistics_cal = Identity(degree = 2, cross = False) #Initialize summaryselection self.summaryselection = Semiautomatic(self.model, self.statistics_cal, self.backend, n_samples = 1000, seed = 1) def test_transformation(self): #Transform statistics extraction self.statistics_cal.statistics = lambda x, f2=self.summaryselection.transformation, f1=self.statistics_cal.statistics: f2(f1(x)) y_obs = self.model.simulate(10) extracted_statistics_10 = self.statistics_cal.statistics(y_obs) self.assertEqual(np.shape(extracted_statistics_10), (10,2)) y_obs = self.model.simulate(1) extracted_statistics_1 = self.statistics_cal.statistics(y_obs) self.assertLess(extracted_statistics_1[0,0] - 111.012664458, 10e-2) self.assertLess(extracted_statistics_1[0,1] - (-63.224510811), 10e-2)
def setUp(self): self.prior = Uniform([-5.0, 5.0], [5.0, 10.0], seed=1) self.model = Gaussian(self.prior, mu=1.1, sigma=1.0, seed=1) self.stat_calc = Identity(degree=2, cross=0) self.likfun = PenLogReg(self.stat_calc, self.model, n_simulate=100, n_folds=10)
def setUp(self): self.stat_calc = Identity(degree = 1, cross = 0) # define prior and model prior = Uniform([150, 5],[200, 25]) self.model = Gaussian(prior, seed = 1) # define backend self.backend = Backend() # define statistics self.statistics_cal = Identity(degree = 2, cross = False) #Initialize summaryselection self.summaryselection = Semiautomatic(self.model, self.statistics_cal, self.backend, n_samples = 1000, seed = 1)
def infer_parameters(): # define observation for true parameters mean=170, std=15 y_obs = [ 160.82499176, 167.24266737, 185.71695756, 153.7045709, 163.40568812, 140.70658699, 169.59102084, 172.81041696, 187.38782738, 179.66358934, 176.63417241, 189.16082803, 181.98288443, 170.18565017, 183.78493886, 166.58387299, 161.9521899, 155.69213073, 156.17867343, 144.51580379, 170.29847515, 197.96767899, 153.36646527, 162.22710198, 158.70012047, 178.53470703, 170.77697743, 164.31392633, 165.88595994, 177.38083686, 146.67058471763457, 179.41946565658628, 238.02751620619537, 206.22458790620766, 220.89530574344568, 221.04082532837026, 142.25301427453394, 261.37656571434275, 171.63761180867033, 210.28121820385866, 237.29130237612236, 175.75558340169619, 224.54340549862235, 197.42448680731226, 165.88273684581381, 166.55094082844519, 229.54308602661584, 222.99844054358519, 185.30223966014586, 152.69149367593846, 206.94372818527413, 256.35498655339154, 165.43140916577741, 250.19273595481803, 148.87781549665536, 223.05547559193792, 230.03418198709608, 146.13611923127021, 138.24716809523139, 179.26755740864527, 141.21704876815426, 170.89587081800852, 222.96391329259626, 188.27229523693822, 202.67075179617672, 211.75963110985992, 217.45423324370509 ] # define prior from abcpy.distributions import Uniform prior = Uniform([150, 5], [200, 25], seed=1) # define the model from abcpy.models import Gaussian model = Gaussian(prior, seed=1) # define statistics from abcpy.statistics import Identity statistics_calculator = Identity(degree=2, cross=False) # define distance from abcpy.distances import LogReg distance_calculator = LogReg(statistics_calculator) # define kernel from abcpy.distributions import MultiStudentT mean, cov, df = np.array([.0, .0]), np.eye(2), 3. kernel = MultiStudentT(mean, cov, df, seed=1) # define sampling scheme from abcpy.inferences import PMCABC sampler = PMCABC(model, distance_calculator, kernel, backend, seed=1) # sample from scheme T, n_sample, n_samples_per_param = 3, 250, 10 eps_arr = np.array([.75]) epsilon_percentile = 10 journal = sampler.sample(y_obs, T, eps_arr, n_sample, n_samples_per_param, epsilon_percentile) return journal
class GaussianTests(unittest.TestCase): def setUp(self): self.prior = Uniform([-1.0, 0.0], [1.0, 1.0], seed=1) self.model = Gaussian(self.prior, 0, 1, seed=1) def test_simulate(self): samples = self.model.simulate(10) self.assertIsInstance(samples, list) expected_output = [1.6243453636632417, -0.61175641365007538, -0.5281717522634557, \ -1.0729686221561705, 0.86540762932467852, -2.3015386968802827, \ 1.74481176421648, -0.76120690089510279, 0.31903909605709857, \ -0.24937037547741009] self.assertEqual(samples, expected_output) def test_get_parameters(self): self.model.sample_from_prior() params = self.model.get_parameters() # test shape of parameters param_len = len(params) self.assertEqual(param_len, 2) def test_set_parameters(self): self.assertRaises(TypeError, self.model.set_parameters, 3.4) self.assertFalse(self.model.set_parameters([1, 3, 2])) self.assertFalse(self.model.set_parameters([2, -1]))
def setUp(self): # find spark and initialize it self.backend = BackendDummy() # define a uniform prior distribution lb = np.array([-5, 0]) ub = np.array([5, 10]) prior = Uniform(lb, ub, seed=1) # define a Gaussian model self.model = Gaussian(prior, mu=2.1, sigma=5.0, seed=1) # define a distance function stat_calc = Identity(degree=2, cross=0) self.dist_calc = Euclidean(stat_calc) # create fake observed data self.observation = self.model.simulate(1) # define kernel mean = np.array([-13.0, .0, 7.0]) cov = np.eye(3) self.kernel = MultiNormal(mean, cov, seed=1)
def setUp(self): # define observation for true parameters mean=170, std=15 self.y_obs = [160.82499176] self.model_array = [None] * 2 #Model 1: Gaussian # define prior prior = Uniform([150, 5], [200, 25]) # define the model self.model_array[0] = Gaussian(prior, seed=1) #Model 2: Student t # define prior prior = Uniform([150, 1], [200, 30]) # define the model self.model_array[1] = Student_t(prior, seed=1) # define statistics self.statistics_calc = Identity(degree=2, cross=False) # define backend self.backend = Backend()
def setUp(self): self.prior = Uniform([-1.0, 0.0], [1.0, 1.0], seed=1) self.model = Gaussian(self.prior, 0, 1, seed=1)
def setUp(self): self.prior = Uniform([-5.0, 5.0], [5.0, 10.0], seed=1) self.model = Gaussian(self.prior, mu=1.1, sigma=1.0, seed=1) self.stat_calc = Identity(degree=2, cross=0) self.likfun = SynLiklihood(self.stat_calc)
class RSMCABCTests(unittest.TestCase): def setUp(self): # find spark and initialize it self.backend = BackendDummy() # define a uniform prior distribution lb = np.array([-5, 0]) ub = np.array([5, 10]) prior = Uniform(lb, ub, seed=1) # define a Gaussian model self.model = Gaussian(prior, mu=2.1, sigma=5.0, seed=1) # define a distance function stat_calc = Identity(degree=2, cross=0) self.dist_calc = Euclidean(stat_calc) # create fake observed data self.observation = self.model.simulate(1) # define kernel mean = np.array([-13.0, .0, 7.0]) cov = np.eye(3) self.kernel = MultiNormal(mean, cov, seed=1) def test_sample(self): # use the RSMCABC scheme for T = 1 steps, n_sample, n_simulate = 1, 10, 1 sampler = RSMCABC(self.model, self.dist_calc, self.kernel, self.backend, seed=1) journal = sampler.sample(self.observation, steps, n_sample, n_simulate) samples = (journal.get_parameters(), journal.get_weights()) # Compute posterior mean mu_post_sample, sigma_post_sample, post_weights = np.asarray( samples[0][:, 0]), np.asarray(samples[0][:, 1]), np.asarray( samples[1][:, 0]) mu_post_mean, sigma_post_mean = np.average( mu_post_sample, weights=post_weights), np.average(sigma_post_sample, weights=post_weights) # test shape of sample mu_sample_shape, sigma_sample_shape, weights_sample_shape = np.shape( mu_post_sample), np.shape(mu_post_sample), np.shape(post_weights) self.assertEqual(mu_sample_shape, (10, )) self.assertEqual(sigma_sample_shape, (10, )) self.assertEqual(weights_sample_shape, (10, )) #self.assertEqual((mu_post_mean, sigma_post_mean), (,)) # use the RSMCABC scheme for T = 2 steps, n_sample, n_simulate = 2, 10, 1 sampler = RSMCABC(self.model, self.dist_calc, self.kernel, self.backend, seed=1) journal = sampler.sample(self.observation, steps, n_sample, n_simulate) samples = (journal.get_parameters(), journal.get_weights()) # Compute posterior mean mu_post_sample, sigma_post_sample, post_weights = np.asarray( samples[0][:, 0]), np.asarray(samples[0][:, 1]), np.asarray( samples[1][:, 0]) mu_post_mean, sigma_post_mean = np.average( mu_post_sample, weights=post_weights), np.average(sigma_post_sample, weights=post_weights) # test shape of sample mu_sample_shape, sigma_sample_shape, weights_sample_shape = np.shape( mu_post_sample), np.shape(mu_post_sample), np.shape(post_weights) self.assertEqual(mu_sample_shape, (10, )) self.assertEqual(sigma_sample_shape, (10, )) self.assertEqual(weights_sample_shape, (10, )) self.assertLess(mu_post_mean - (-0.349310337252), 10e-2) self.assertLess(sigma_post_mean - 6.30221177368, 10e-2)
def test_sample(self): # setup backend backend = BackendDummy() # define a uniform prior distribution lb = np.array([-5, 0]) ub = np.array([5, 10]) prior = Uniform(lb, ub, seed=1) # define a Gaussian model model = Gaussian(prior, mu=2.1, sigma=5.0, seed=1) # define sufficient statistics for the model stat_calc = Identity(degree=2, cross=0) # create fake observed data y_obs = model.simulate(1) # Define the likelihood function likfun = SynLiklihood(stat_calc) # use the PMC scheme for T = 1 mean = np.array([-13.0, .0, 7.0]) cov = np.eye(3) kernel = MultiNormal(mean, cov, seed=1) T, n_sample, n_samples_per_param = 1, 10, 100 sampler = PMC(model, likfun, kernel, backend, seed=1) journal = sampler.sample(y_obs, T, n_sample, n_samples_per_param, covFactor=np.array([.1, .1]), iniPoints=None) samples = (journal.get_parameters(), journal.get_weights()) # Compute posterior mean mu_post_sample, sigma_post_sample, post_weights = np.array( samples[0][:, 0]), np.array(samples[0][:, 1]), np.array(samples[1][:, 0]) mu_post_mean, sigma_post_mean = np.average( mu_post_sample, weights=post_weights), np.average(sigma_post_sample, weights=post_weights) # test shape of sample mu_sample_shape, sigma_sample_shape, weights_sample_shape = np.shape( mu_post_sample), np.shape(mu_post_sample), np.shape(post_weights) self.assertEqual(mu_sample_shape, (10, )) self.assertEqual(sigma_sample_shape, (10, )) self.assertEqual(weights_sample_shape, (10, )) self.assertLess(abs(mu_post_mean - (-1.48953333102)), 1e-10) self.assertLess(abs(sigma_post_mean - 6.50695612708), 1e-10) # use the PMC scheme for T = 2 T, n_sample, n_samples_per_param = 2, 10, 100 sampler = PMC(model, likfun, kernel, backend, seed=1) journal = sampler.sample(y_obs, T, n_sample, n_samples_per_param, covFactor=np.array([.1, .1]), iniPoints=None) samples = (journal.get_parameters(), journal.get_weights()) # Compute posterior mean mu_post_sample, sigma_post_sample, post_weights = np.asarray( samples[0][:, 0]), np.asarray(samples[0][:, 1]), np.asarray( samples[1][:, 0]) mu_post_mean, sigma_post_mean = np.average( mu_post_sample, weights=post_weights), np.average(sigma_post_sample, weights=post_weights) # test shape of sample mu_sample_shape, sigma_sample_shape, weights_sample_shape = np.shape( mu_post_sample), np.shape(mu_post_sample), np.shape(post_weights) self.assertEqual(mu_sample_shape, (10, )) self.assertEqual(sigma_sample_shape, (10, )) self.assertEqual(weights_sample_shape, (10, )) self.assertLess(abs(mu_post_mean - (-1.4033145848)), 1e-10) self.assertLess(abs(sigma_post_mean - 7.05175546876), 1e-10)
class PMCABCTests(unittest.TestCase): def setUp(self): # find spark and initialize it self.backend = BackendDummy() # define a uniform prior distribution lb = np.array([-5, 0]) ub = np.array([5, 10]) prior = Uniform(lb, ub, seed=1) # define a Gaussian model self.model = Gaussian(prior, mu=2.1, sigma=5.0, seed=1) # define a distance function stat_calc = Identity(degree=2, cross=0) self.dist_calc = Euclidean(stat_calc) # create fake observed data self.observation = self.model.simulate(1) # define kernel mean = np.array([-13.0, .0, 7.0]) cov = np.eye(3) self.kernel = MultiNormal(mean, cov, seed=1) def test_calculate_weight(self): n_samples = 2 rc = _RemoteContextPMCABC(self.backend, self.model, self.dist_calc, self.kernel, self.observation, n_samples, 1) theta = np.array([1.0]) weight = rc._calculate_weight(theta) self.assertEqual(weight, 0.5) accepted_parameters = np.array([[1.0], [1.0 + np.sqrt(2)]]) accepted_weights = np.array([[.5], [.5]]) accepted_cov_mat = np.array([[1.0]]) rc._update_broadcasts(self.backend, accepted_parameters, accepted_weights, accepted_cov_mat) weight = rc._calculate_weight(theta) expected_weight = (2.0 * np.sqrt(2.0 * np.pi)) / ( (1 + np.exp(-1)) * 100) self.assertEqual(weight, expected_weight) def test_sample(self): # use the PMCABC scheme for T = 1 T, n_sample, n_simulate, eps_arr, eps_percentile = 1, 10, 1, [.1], 10 sampler = PMCABC(self.model, self.dist_calc, self.kernel, self.backend, seed=1) journal = sampler.sample(self.observation, T, eps_arr, n_sample, n_simulate, eps_percentile) samples = (journal.get_parameters(), journal.get_weights()) # Compute posterior mean mu_post_sample, sigma_post_sample, post_weights = np.asarray( samples[0][:, 0]), np.asarray(samples[0][:, 1]), np.asarray( samples[1][:, 0]) mu_post_mean, sigma_post_mean = np.average( mu_post_sample, weights=post_weights), np.average(sigma_post_sample, weights=post_weights) # test shape of sample mu_sample_shape, sigma_sample_shape, weights_sample_shape = np.shape( mu_post_sample), np.shape(mu_post_sample), np.shape(post_weights) self.assertEqual(mu_sample_shape, (10, )) self.assertEqual(sigma_sample_shape, (10, )) self.assertEqual(weights_sample_shape, (10, )) #self.assertEqual((mu_post_mean, sigma_post_mean), (,)) # use the PMCABC scheme for T = 2 T, n_sample, n_simulate, eps_arr, eps_percentile = 2, 10, 1, [.1, .05], 10 sampler = PMCABC(self.model, self.dist_calc, self.kernel, self.backend, seed=1) journal = sampler.sample(self.observation, T, eps_arr, n_sample, n_simulate, eps_percentile) samples = (journal.get_parameters(), journal.get_weights()) # Compute posterior mean mu_post_sample, sigma_post_sample, post_weights = np.asarray( samples[0][:, 0]), np.asarray(samples[0][:, 1]), np.asarray( samples[1][:, 0]) mu_post_mean, sigma_post_mean = np.average( mu_post_sample, weights=post_weights), np.average(sigma_post_sample, weights=post_weights) # test shape of sample mu_sample_shape, sigma_sample_shape, weights_sample_shape = np.shape( mu_post_sample), np.shape(mu_post_sample), np.shape(post_weights) self.assertEqual(mu_sample_shape, (10, )) self.assertEqual(sigma_sample_shape, (10, )) self.assertEqual(weights_sample_shape, (10, )) self.assertLess(mu_post_mean - 3.80593164247, 10e-2) self.assertLess(sigma_post_mean - 7.21421951262, 10e-2)