def test_normal_lazy_mean_var_diag(): # The lazy `mean_var_diag` should only be called when neither the mean nor the # diagonal of the variance exists. Otherwise, it's more efficient to just construct # the other one. We go over all branches in the `if`-statement. dist = Normal(lambda: B.ones(3, 1), lambda: B.eye(3), mean_var_diag=lambda: (8, 9)) approx(dist.marginals(), (8, 9)) approx(dist.mean, 8) approx(dist.var_diag, 9) dist = Normal(lambda: B.ones(3, 1), lambda: B.eye(3), mean_var_diag=lambda: (8, 9)) approx(dist.mean, B.ones(3, 1)) approx(dist.marginals(), (B.ones(3), B.ones(3))) approx(dist.var_diag, B.ones(3)) dist = Normal(lambda: B.ones(3, 1), lambda: B.eye(3), mean_var_diag=lambda: (8, 9)) approx(dist.var_diag, B.ones(3)) approx(dist.marginals(), (B.ones(3), B.ones(3))) approx(dist.mean, B.ones(3, 1)) dist = Normal(lambda: B.ones(3, 1), lambda: B.eye(3), mean_var_diag=lambda: (8, 9)) approx(dist.var_diag, B.ones(3)) approx(dist.mean, B.ones(3, 1)) approx(dist.marginals(), (B.ones(3), B.ones(3)))
def test_normal(): mean = np.random.randn(3, 1) chol = np.random.randn(3, 3) var = chol.dot(chol.T) dist = Normal(var, mean) dist_sp = multivariate_normal(mean[:, 0], var) # Test second moment. yield assert_allclose, dist.m2, var + mean.dot(mean.T) # Test marginals. marg_mean, lower, upper = dist.marginals() yield assert_allclose, mean.squeeze(), marg_mean yield assert_allclose, lower, marg_mean - 2 * np.diag(var)**.5 yield assert_allclose, upper, marg_mean + 2 * np.diag(var)**.5 # Test `logpdf` and `entropy`. for _ in range(5): x = np.random.randn(3, 10) yield ok, allclose(dist.logpdf(x), dist_sp.logpdf(x.T)), 'logpdf' yield ok, allclose(dist.entropy(), dist_sp.entropy()), 'entropy' # Test that inputs to `logpdf` are converted appropriately. yield assert_allclose, \ dist.logpdf(np.array([0, 1, 2])), \ dist.logpdf([0, 1, 2]) yield assert_allclose, \ dist.logpdf(np.array([0, 1, 2])), \ dist.logpdf((0, 1, 2)) # Test the the output of `logpdf` is flattened appropriately. yield eq, np.shape(dist.logpdf(np.ones((3, 1)))), () yield eq, np.shape(dist.logpdf(np.ones((3, 2)))), (2, ) # Test KL with Monte Carlo estimate. mean2 = np.random.randn(3, 1) chol2 = np.random.randn(3, 3) var2 = chol2.dot(chol2.T) dist2 = Normal(var2, mean2) samples = dist.sample(50000) kl_est = np.mean(dist.logpdf(samples)) - np.mean(dist2.logpdf(samples)) kl = dist.kl(dist2) yield ok, np.abs(kl_est - kl) / np.abs(kl) < 5e-2, 'kl sampled'
def test_normal(): mean = np.random.randn(3, 1) chol = np.random.randn(3, 3) var = chol.dot(chol.T) dist = Normal(var, mean) dist_sp = multivariate_normal(mean[:, 0], var) # Test second moment. allclose(dist.m2, var + mean.dot(mean.T)) # Test marginals. marg_mean, lower, upper = dist.marginals() allclose(mean.squeeze(), marg_mean) allclose(lower, marg_mean - 2 * np.diag(var)**.5) allclose(upper, marg_mean + 2 * np.diag(var)**.5) # Test `logpdf` and `entropy`. for _ in range(5): x = np.random.randn(3, 10) allclose(dist.logpdf(x), dist_sp.logpdf(x.T), desc='logpdf') allclose(dist.entropy(), dist_sp.entropy(), desc='entropy') # Test the the output of `logpdf` is flattened appropriately. assert np.shape(dist.logpdf(np.ones((3, 1)))) == () assert np.shape(dist.logpdf(np.ones((3, 2)))) == (2, ) # Test KL with Monte Carlo estimate. mean2 = np.random.randn(3, 1) chol2 = np.random.randn(3, 3) var2 = chol2.dot(chol2.T) dist2 = Normal(var2, mean2) samples = dist.sample(50000) kl_est = np.mean(dist.logpdf(samples)) - np.mean(dist2.logpdf(samples)) kl = dist.kl(dist2) assert np.abs(kl_est - kl) / np.abs(kl) < 5e-2, 'kl sampled'