def test_construction(): model = Graph() p = GP(EQ(), graph=model) x = np.random.randn(10, 1) p.mean(x) p.mean(p(x)) p.kernel(x) p.kernel(p(x)) p.kernel(x, x) p.kernel(p(x), x) p.kernel(x, p(x)) p.kernel(p(x), p(x)) p.kernel.elwise(x) p.kernel.elwise(p(x)) p.kernel.elwise(x, x) p.kernel.elwise(p(x), x) p.kernel.elwise(x, p(x)) p.kernel.elwise(p(x), p(x)) # Test resolution of kernel and mean. k = EQ() m = TensorProductMean(lambda x: x ** 2) assert isinstance(GP(k, graph=model).mean, ZeroMean) assert isinstance(GP(k, 5, graph=model).mean, ScaledMean) assert isinstance(GP(k, 1, graph=model).mean, OneMean) assert isinstance(GP(k, 0, graph=model).mean, ZeroMean) assert isinstance(GP(k, m, graph=model).mean, TensorProductMean) assert isinstance(GP(k, graph=model).kernel, EQ) assert isinstance(GP(5, graph=model).kernel, ScaledKernel) assert isinstance(GP(1, graph=model).kernel, OneKernel) assert isinstance(GP(0, graph=model).kernel, ZeroKernel) # Test construction of finite-dimensional distribution. d = GP(k, m, graph=model)(x) allclose(d.var, k(x)) allclose(d.mean, m(x))
def test_marginals(): model = Graph() p = GP(EQ(), TensorProductMean(lambda x: x ** 2), graph=model) x = np.linspace(0, 5, 10) # Check that `marginals` outputs the right thing. mean, lower, upper = p(x).marginals() var = B.diag(p.kernel(x)) yield assert_allclose, mean, p.mean(x)[:, 0] yield assert_allclose, lower, p.mean(x)[:, 0] - 2 * var ** .5 yield assert_allclose, upper, p.mean(x)[:, 0] + 2 * var ** .5 # Test correctness. y = p(x).sample() mean, lower, upper = (p | (x, y))(x).marginals() yield assert_allclose, mean, y[:, 0] yield le, B.mean(B.abs(upper - lower)), 1e-5 mean, lower, upper = (p | (x, y))(x + 100).marginals() yield assert_allclose, mean, p.mean(x + 100)[:, 0] yield assert_allclose, upper - lower, 4 * np.ones(10)
def test_sum_other(): model = Graph() p1 = GP(EQ(), TensorProductMean(lambda x: x ** 2), graph=model) p2 = p1 + 5. p3 = 5. + p1 p4 = model.sum(5., p1) x = np.random.randn(5, 1) yield assert_allclose, p1.mean(x) + 5., p2.mean(x) yield assert_allclose, p1.mean(x) + 5., p3.mean(x) yield assert_allclose, p1.mean(x) + 5., p4.mean(x) yield assert_allclose, p1.kernel(x), p2.kernel(x) yield assert_allclose, p1.kernel(x), p3.kernel(x) yield assert_allclose, p1.kernel(x), p4.kernel(x) yield assert_allclose, p1.kernel(p2(x), p3(x)), \ p1.kernel(x) yield assert_allclose, p1.kernel(p2(x), p4(x)), \ p1.kernel(x) # Check that a `GP` cannot be summed with a `Normal`. yield raises, NotImplementedError, lambda: p1 + Normal(np.eye(3)) yield raises, NotImplementedError, lambda: Normal(np.eye(3)) + p1
def test_mul_other(): model = Graph() p1 = GP(EQ(), TensorProductMean(lambda x: x ** 2), graph=model) p2 = 5. * p1 p3 = p1 * 5. x = np.random.randn(5, 1) yield assert_allclose, 5. * p1.mean(x), p2.mean(x) yield assert_allclose, 5. * p1.mean(x), p3.mean(x) yield assert_allclose, 25. * p1.kernel(x), p2.kernel(x) yield assert_allclose, 25. * p1.kernel(x), p3.kernel(x) yield assert_allclose, model.kernels[p2, p3](x, x), 25. * p1.kernel(x) # Check that a `GP` cannot be multiplied with a `Normal`. yield raises, NotImplementedError, lambda: p1 * Normal(np.eye(3)) yield raises, NotImplementedError, lambda: Normal(np.eye(3)) * p1
def test_mul_other(): model = Graph() p1 = GP(EQ(), TensorProductMean(lambda x: x ** 2), graph=model) p2 = 5. * p1 p3 = p1 * 5. x = np.random.randn(5, 1) allclose(5. * p1.mean(x), p2.mean(x)) allclose(5. * p1.mean(x), p3.mean(x)) allclose(25. * p1.kernel(x), p2.kernel(x)) allclose(25. * p1.kernel(x), p3.kernel(x)) allclose(model.kernels[p2, p3](x, x), 25. * p1.kernel(x)) # Check that a `GP` cannot be multiplied with a `Normal`. with pytest.raises(NotImplementedError): p1 * Normal(np.eye(3)) with pytest.raises(NotImplementedError): Normal(np.eye(3)) * p1