def test_normal_sampling(): for mean in [0, 1]: dist = Normal(mean, 3 * B.eye(np.int32, 200)) # Sample without noise. samples = dist.sample(2000) approx(B.mean(samples), mean, atol=5e-2) approx(B.std(samples)**2, 3, atol=5e-2) # Sample with noise samples = dist.sample(2000, noise=2) approx(B.mean(samples), mean, atol=5e-2) approx(B.std(samples)**2, 5, atol=5e-2)
def test_normal_sampling(): # Test sampling and dtype conversion. dist = Normal(3 * np.eye(200, dtype=np.integer)) assert np.abs(np.std(dist.sample(1000))**2 - 3) <= 5e-2, 'full' assert np.abs(np.std(dist.sample(1000, noise=2))**2 - 5) <= 5e-2, 'full 2' # Test `__str__` and `__repr__`. assert str(dist) == RandomVector.__str__(dist) assert repr(dist) == RandomVector.__repr__(dist) # Test zero mean determination. assert Normal(np.eye(3))._zero_mean assert not Normal(np.eye(3), np.random.randn(3, 1))._zero_mean x = np.random.randn(3) assert GP(1)(x)._zero_mean assert not GP(1, 1)(x)._zero_mean assert GP(1, 0)(x)._zero_mean
def test_normal_sampling(): for mean in [0, 1]: dist = Normal(mean, 3 * B.eye(np.int32, 200)) # Sample without noise. samples = dist.sample(2000) approx(B.mean(samples), mean, atol=5e-2) approx(B.std(samples) ** 2, 3, atol=5e-2) # Sample with noise samples = dist.sample(2000, noise=2) approx(B.mean(samples), mean, atol=5e-2) approx(B.std(samples) ** 2, 5, atol=5e-2) state, sample1 = dist.sample(B.create_random_state(B.dtype(dist), seed=0)) state, sample2 = dist.sample(B.create_random_state(B.dtype(dist), seed=0)) assert isinstance(state, B.RandomState) approx(sample1, sample2)
def test_normal_comparison(): # Compare a diagonal normal and dense normal. mean = np.random.randn(3, 1) var_diag = np.random.randn(3)**2 var = np.diag(var_diag) dist1 = Normal(var, mean) dist2 = Normal(Diagonal(var_diag), mean) samples = dist1.sample(100) allclose(dist1.logpdf(samples), dist2.logpdf(samples), desc='logpdf') allclose(dist1.entropy(), dist2.entropy(), desc='entropy') allclose(dist1.kl(dist2), 0.) allclose(dist1.kl(dist1), 0.) allclose(dist2.kl(dist2), 0.) allclose(dist2.kl(dist1), 0.) assert dist1.w2(dist1) <= 1e-3 assert dist1.w2(dist2) <= 1e-3 assert dist2.w2(dist1) <= 1e-3 assert dist2.w2(dist2) <= 1e-3 # Check a uniformly diagonal normal and dense normal. mean = np.random.randn(3, 1) var_diag_scale = np.random.randn()**2 var = np.eye(3) * var_diag_scale dist1 = Normal(var, mean) dist2 = Normal(UniformlyDiagonal(var_diag_scale, 3), mean) samples = dist1.sample(100) allclose(dist1.logpdf(samples), dist2.logpdf(samples), desc='logpdf') allclose(dist1.entropy(), dist2.entropy(), desc='entropy') allclose(dist1.kl(dist2), 0.) allclose(dist1.kl(dist1), 0.) allclose(dist2.kl(dist2), 0.) allclose(dist2.kl(dist1), 0.) assert dist1.w2(dist1) <= 1e-3 assert dist1.w2(dist2) <= 1e-3 assert dist2.w2(dist1) <= 1e-3 assert dist2.w2(dist2) <= 1e-3
def test_normal_comparison(): # Compare a diagonal normal and dense normal. mean = np.random.randn(3, 1) var_diag = np.random.randn(3)**2 var = np.diag(var_diag) dist1 = Normal(var, mean) dist2 = Normal(Diagonal(var_diag), mean) samples = dist1.sample(100) yield ok, allclose(dist1.logpdf(samples), dist2.logpdf(samples)), 'logpdf' yield ok, allclose(dist1.entropy(), dist2.entropy()), 'entropy' yield ok, allclose(dist1.kl(dist2), 0.), 'kl 1' yield ok, allclose(dist1.kl(dist1), 0.), 'kl 2' yield ok, allclose(dist2.kl(dist2), 0.), 'kl 3' yield ok, allclose(dist2.kl(dist1), 0.), 'kl 4' yield le, dist1.w2(dist1), 5e-4, 'w2 1' yield le, dist1.w2(dist2), 5e-4, 'w2 2' yield le, dist2.w2(dist1), 5e-4, 'w2 3' yield le, dist2.w2(dist2), 5e-4, 'w2 4' # Check a uniformly diagonal normal and dense normal. mean = np.random.randn(3, 1) var_diag_scale = np.random.randn()**2 var = np.eye(3) * var_diag_scale dist1 = Normal(var, mean) dist2 = Normal(UniformlyDiagonal(var_diag_scale, 3), mean) samples = dist1.sample(100) yield ok, allclose(dist1.logpdf(samples), dist2.logpdf(samples)), 'logpdf' yield ok, allclose(dist1.entropy(), dist2.entropy()), 'entropy' yield ok, allclose(dist1.kl(dist2), 0.), 'kl 1' yield ok, allclose(dist1.kl(dist1), 0.), 'kl 2' yield ok, allclose(dist2.kl(dist2), 0.), 'kl 3' yield ok, allclose(dist2.kl(dist1), 0.), 'kl 4' yield le, dist1.w2(dist1), 5e-4, 'w2 1' yield le, dist1.w2(dist2), 5e-4, 'w2 2' yield le, dist2.w2(dist1), 5e-4, 'w2 3' yield le, dist2.w2(dist2), 5e-4, 'w2 4'
def test_normal(): mean = np.random.randn(3, 1) chol = np.random.randn(3, 3) var = chol.dot(chol.T) dist = Normal(var, mean) dist_sp = multivariate_normal(mean[:, 0], var) # Test second moment. yield assert_allclose, dist.m2, var + mean.dot(mean.T) # Test marginals. marg_mean, lower, upper = dist.marginals() yield assert_allclose, mean.squeeze(), marg_mean yield assert_allclose, lower, marg_mean - 2 * np.diag(var)**.5 yield assert_allclose, upper, marg_mean + 2 * np.diag(var)**.5 # Test `logpdf` and `entropy`. for _ in range(5): x = np.random.randn(3, 10) yield ok, allclose(dist.logpdf(x), dist_sp.logpdf(x.T)), 'logpdf' yield ok, allclose(dist.entropy(), dist_sp.entropy()), 'entropy' # Test that inputs to `logpdf` are converted appropriately. yield assert_allclose, \ dist.logpdf(np.array([0, 1, 2])), \ dist.logpdf([0, 1, 2]) yield assert_allclose, \ dist.logpdf(np.array([0, 1, 2])), \ dist.logpdf((0, 1, 2)) # Test the the output of `logpdf` is flattened appropriately. yield eq, np.shape(dist.logpdf(np.ones((3, 1)))), () yield eq, np.shape(dist.logpdf(np.ones((3, 2)))), (2, ) # Test KL with Monte Carlo estimate. mean2 = np.random.randn(3, 1) chol2 = np.random.randn(3, 3) var2 = chol2.dot(chol2.T) dist2 = Normal(var2, mean2) samples = dist.sample(50000) kl_est = np.mean(dist.logpdf(samples)) - np.mean(dist2.logpdf(samples)) kl = dist.kl(dist2) yield ok, np.abs(kl_est - kl) / np.abs(kl) < 5e-2, 'kl sampled'
def test_normal(): mean = np.random.randn(3, 1) chol = np.random.randn(3, 3) var = chol.dot(chol.T) dist = Normal(var, mean) dist_sp = multivariate_normal(mean[:, 0], var) # Test second moment. allclose(dist.m2, var + mean.dot(mean.T)) # Test marginals. marg_mean, lower, upper = dist.marginals() allclose(mean.squeeze(), marg_mean) allclose(lower, marg_mean - 2 * np.diag(var)**.5) allclose(upper, marg_mean + 2 * np.diag(var)**.5) # Test `logpdf` and `entropy`. for _ in range(5): x = np.random.randn(3, 10) allclose(dist.logpdf(x), dist_sp.logpdf(x.T), desc='logpdf') allclose(dist.entropy(), dist_sp.entropy(), desc='entropy') # Test the the output of `logpdf` is flattened appropriately. assert np.shape(dist.logpdf(np.ones((3, 1)))) == () assert np.shape(dist.logpdf(np.ones((3, 2)))) == (2, ) # Test KL with Monte Carlo estimate. mean2 = np.random.randn(3, 1) chol2 = np.random.randn(3, 3) var2 = chol2.dot(chol2.T) dist2 = Normal(var2, mean2) samples = dist.sample(50000) kl_est = np.mean(dist.logpdf(samples)) - np.mean(dist2.logpdf(samples)) kl = dist.kl(dist2) assert np.abs(kl_est - kl) / np.abs(kl) < 5e-2, 'kl sampled'
def test_normal_sampling(): # Test sampling and dtype conversion. dist = Normal(3 * np.eye(200, dtype=np.integer)) yield le, np.abs(np.std(dist.sample(1000))**2 - 3), 5e-2, 'full' yield le, np.abs(np.std(dist.sample(1000, noise=2)) ** 2 - 5), 5e-2, \ 'full 2' dist = Normal(Diagonal(3 * np.ones(200, dtype=np.integer))) yield le, np.abs(np.std(dist.sample(1000))**2 - 3), 5e-2, 'diag' yield le, np.abs(np.std(dist.sample(1000, noise=2)) ** 2 - 5), 5e-2, \ 'diag 2' dist = Normal(UniformlyDiagonal(3, 200)) yield le, np.abs(np.std(dist.sample(1000))**2 - 3), 5e-2, 'unif' yield le, np.abs(np.std(dist.sample(1000, noise=2)) ** 2 - 5), 5e-2, \ 'unif 2' # Test `__str__` and `__repr__`. yield eq, str(dist), RandomVector.__str__(dist) yield eq, repr(dist), RandomVector.__repr__(dist)