def test_riemannian_gradient(self): """Test Riemannian gradient of a Gamma node.""" # # Without observations # # Construct model a = np.random.rand() b = np.random.rand() tau = Gamma(a, b) # Random initialization tau.initialize_from_parameters(np.random.rand(), np.random.rand()) # Initial parameters phi0 = tau.phi # Gradient g = tau.get_riemannian_gradient() # Parameters after VB-EM update tau.update() phi1 = tau.phi # Check self.assertAllClose(g[0], phi1[0] - phi0[0]) self.assertAllClose(g[1], phi1[1] - phi0[1]) # # With observations # # Construct model a = np.random.rand() b = np.random.rand() tau = Gamma(a, b) mu = np.random.randn() Y = GaussianARD(mu, tau) Y.observe(np.random.randn()) # Random initialization tau.initialize_from_parameters(np.random.rand(), np.random.rand()) # Initial parameters phi0 = tau.phi # Gradient g = tau.get_riemannian_gradient() # Parameters after VB-EM update tau.update() phi1 = tau.phi # Check self.assertAllClose(g[0], phi1[0] - phi0[0]) self.assertAllClose(g[1], phi1[1] - phi0[1]) pass
def test_lower_bound_contribution(self): a = 15 b = 21 y = 4 x = Gamma(a, b) x.observe(y) testing.assert_allclose( x.lower_bound_contribution(), ( a * np.log(b) + (a - 1) * np.log(y) - b * y - special.gammaln(a) ) ) # Just one latent node so we'll get exact marginal likelihood # # p(Y) = p(Y,X)/p(X|Y) = p(Y|X) * p(X) / p(X|Y) a = 2.3 b = 4.1 x = 1.9 y = 4.8 tau = Gamma(a, b) Y = GaussianARD(x, tau) Y.observe(y) mu = x nu = 2 * a s2 = b / a a_post = a + 0.5 b_post = b + 0.5*(y - x)**2 tau.update() testing.assert_allclose( [-b_post, a_post], tau.phi ) testing.assert_allclose( Y.lower_bound_contribution() + tau.lower_bound_contribution(), # + tau.g, ( special.gammaln((nu+1)/2) - special.gammaln(nu/2) - 0.5 * np.log(nu) - 0.5 * np.log(np.pi) - 0.5 * np.log(s2) - 0.5 * (nu + 1) * np.log( 1 + (y - mu)**2 / (nu * s2) ) ) ) return