def setUp(self): init_pdf = pb.UniPdf(np.array([-5.]), np.array([5.])) p_xt_xtp = pb.MLinGaussCPdf(np.array([[2.]]), np.array([[1.]]), np.array([0.])) p_yt_xt = pb.MLinGaussCPdf(np.array([[1.]]), np.array([[1.]]), np.array([0.])) self.pf = pb.ParticleFilter(20, init_pdf, p_xt_xtp, p_yt_xt)
def test_eval_log(self): x = np.array([0.]) cond = np.array([0.]) norm = pb.MLinGaussCPdf(np.array([[1.]]), np.array([[1.]]), np.array([-1.])) expected = np.array([ 1.48671951473e-06, 0.000133830225765, 0.00443184841194, 0.0539909665132, 0.241970724519, 0.398942280401, 0.241970724519, 0.0539909665132, 0.00443184841194, 0.000133830225765, 1.48671951473e-06, ]) for i in xrange(0, 11): # cond is set to [1.], which should produce mean = [0.] x[0] = i - 5. cond[0] = 1. res = exp(norm.eval_log(x, cond)) self.assertApproxEqual(res, expected[i]) # cond is set to [456.78], which should produce mean = [455.78] x[0] = i - 5. + 455.78 cond[0] = 456.78 res = exp(norm.eval_log(x, cond)) self.assertApproxEqual(res, expected[i])
def setUp(self): # constructor parameters: self.A = np.array([[1., 0.], [0., 2.], [-1., -1.]]) self.b = np.array([-0.5, -1., -1.5]) self.covariance = np.array([[1., 0., 0.], [0., 2., 0.], [0., 0., 3.]]) # expected values: self.variance = np.array([1., 2., 3.]) # diagonal elements of covariance self.shape = 3 # shape of random variable (and mean) self.cond_shape = 2 self.test_conds = np.array( [ # array of test conditions (shared by various tests) [0., 0.], [1., 0.], [0., -1.], ]) self.cond_means = np.array( [ # array of mean values that match (first n entries of) test_conds self.b, np.array([1., 0., -1.]) + self.b, # computed from self.A np.array([0., -2., 1.]) + self.b, # computed from self.A ]) self.gauss = pb.MLinGaussCPdf(self.covariance, self.A, self.b)
def test_different_base_class(self): cov = np.array([[1.]]) mean = np.array([-14.568]) condlognorm = pb.MLinGaussCPdf(cov, np.array([[1.]]), np.array([0.]), base_class=pb.LogNormPdf) lognorm = pb.LogNormPdf(mean, cov) self.assertEqual(condlognorm.mean(mean), lognorm.mean()) self.assertEqual(condlognorm.variance(mean), lognorm.variance()) for x in np.array([[-0.4], [2.4], [4.5], [12.5]]): self.assertEqual(condlognorm.eval_log(x, mean), lognorm.eval_log(x)) for i in range(30): # only test that samples are positive self.assertTrue(condlognorm.sample(mean)[0] >= 0)
def test_rvs(self): self.assertEqual(self.gauss.rv.dimension, 3) self.assertEqual(self.gauss.cond_rv.dimension, 2) a, b, c, d = pb.RVComp(2, 'a'), pb.RVComp(1, 'b'), pb.RVComp( 1, 'c'), pb.RVComp(1, 'd') rv = pb.RV(a, b) cond_rv = pb.RV(c, d) gauss = pb.MLinGaussCPdf(self.covariance, self.A, self.b, rv, cond_rv) self.assertTrue(gauss.rv.contains(a)) self.assertTrue(gauss.rv.contains(b)) self.assertFalse(gauss.rv.contains(c)) self.assertFalse(gauss.rv.contains(d)) self.assertFalse(gauss.cond_rv.contains(a)) self.assertFalse(gauss.cond_rv.contains(b)) self.assertTrue(gauss.cond_rv.contains(c)) self.assertTrue(gauss.cond_rv.contains(d))
def setUp(self): ide = np.array([[1.]]) # 1x1 identity matrix self.gauss = pb.MLinGaussCPdf(ide, ide, np.array([0.])) self.uni = pb.UniPdf(np.array([0.]), np.array([2.])) self.prod = pb.ProdCPdf((self.gauss, self.uni))