Exemple #1
0
def test_case_additive_model():
    # Test some additive model.

    model = Graph()
    p1 = GP(EQ(), graph=model)
    p2 = GP(EQ(), graph=model)
    p3 = p1 + p2

    n = 5
    x = np.linspace(0, 10, n)[:, None]
    y1 = p1(x).sample()
    y2 = p2(x).sample()

    # First, test independence:
    yield assert_allclose, model.kernels[p2, p1](x), np.zeros((n, n))
    yield assert_allclose, model.kernels[p1, p2](x), np.zeros((n, n))

    # Now run through some test cases:

    obs = Obs(p1(x), y1)
    post = (p3 | obs) | ((p2 | obs)(x), y2)
    yield assert_allclose, post(x).mean, y1 + y2

    obs = Obs(p2(x), y2)
    post = (p3 | obs) | ((p1 | obs)(x), y1)
    yield assert_allclose, post(x).mean, y1 + y2

    obs = Obs(p1(x), y1)
    post = (p2 | obs) | ((p3 | obs)(x), y1 + y2)
    yield assert_allclose, post(x).mean, y2

    obs = Obs(p3(x), y1 + y2)
    post = (p2 | obs) | ((p1 | obs)(x), y1)
    yield assert_allclose, post(x).mean, y2

    yield assert_allclose, p3.condition(x, y1 + y2)(x).mean, y1 + y2
Exemple #2
0
def test_observations_and_conditioning():
    model = Graph()
    p1 = GP(EQ(), graph=model)
    p2 = GP(EQ(), graph=model)
    p = p1 + p2
    x = np.linspace(0, 5, 10)
    y = p(x).sample()
    y1 = p1(x).sample()

    # Test all ways of conditioning, including shorthands.
    obs1 = Obs(p(x), y)
    obs2 = Obs(x, y, ref=p)
    yield assert_allclose, obs1.y, obs2.y
    yield assert_allclose, obs1.K_x, obs2.K_x

    obs3 = Obs((p(x), y), (p1(x), y1))
    obs4 = Obs((x, y), (p1(x), y1), ref=p)
    yield assert_allclose, obs3.y, obs4.y
    yield assert_allclose, obs3.K_x, obs4.K_x

    def assert_equal_mean_var(x, *ys):
        for y in ys:
            yield assert_allclose, x.mean, y.mean
            yield assert_allclose, x.var, y.var

    for test in assert_equal_mean_var(
            p.condition(x, y)(x),
            p.condition(p(x), y)(x), (p | (x, y))(x), (p | (p(x), y))(x),
            p.condition(obs1)(x),
            p.condition(obs2)(x), (p | obs1)(x), (p | obs2)(x)):
        yield test

    for test in assert_equal_mean_var(
            p.condition((x, y), (p1(x), y1))(x),
            p.condition((p(x), y), (p1(x), y1))(x),
        (p | [(x, y), (p1(x), y1)])(x), (p | [(p(x), y), (p1(x), y1)])(x),
            p.condition(obs3)(x),
            p.condition(obs4)(x), (p | obs3)(x), (p | obs4)(x)):
        yield test

    # Check conditioning multiple processes at once.
    p1_post, p2_post, p_post = (p1, p2, p) | obs1
    p1_post, p2_post, p_post = p1_post(x), p2_post(x), p_post(x)
    p1_post2, p2_post2, p_post2 = (p1 | obs1)(x), (p2 | obs1)(x), (p | obs1)(x)

    yield assert_allclose, p1_post.mean, p1_post2.mean
    yield assert_allclose, p1_post.var, p1_post2.var
    yield assert_allclose, p2_post.mean, p2_post2.mean
    yield assert_allclose, p2_post.var, p2_post2.var
    yield assert_allclose, p_post.mean, p_post2.mean
    yield assert_allclose, p_post.var, p_post2.var

    # Test `At` check.
    yield raises, ValueError, lambda: Obs(0, 0)
    yield raises, ValueError, lambda: Obs((0, 0), (0, 0))
    yield raises, ValueError, lambda: SparseObs(0, p, (0, 0))
Exemple #3
0
def test_case_blr():
    model = Graph()
    x = np.linspace(0, 10, 100)

    slope = GP(1, graph=model)
    intercept = GP(1, graph=model)
    f = slope * (lambda x: x) + intercept
    y = f + 1e-2 * GP(Delta(), graph=model)

    # Sample observations, true slope, and intercept.
    y_obs, true_slope, true_intercept = \
        model.sample(y(x), slope(0), intercept(0))

    # Predict.
    post_slope, post_intercept = (slope, intercept) | Obs(y(x), y_obs)
    mean_slope, mean_intercept = post_slope(0).mean, post_intercept(0).mean

    yield le, np.abs(true_slope[0, 0] - mean_slope[0, 0]), 5e-2
    yield le, np.abs(true_intercept[0, 0] - mean_intercept[0, 0]), 5e-2
Exemple #4
0
def test_sparse_conditioning():
    model = Graph()
    f = GP(EQ().stretch(3), graph=model)
    e = GP(1e-2 * Delta(), graph=model)
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)

    y = f(x).sample()

    # Test that noise matrix must indeed be diagonal.
    yield raises, RuntimeError, lambda: SparseObs(f(x), f, f(x), y).elbo

    # Test posterior.
    post_sparse = (f | SparseObs(f(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 1', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    post_sparse = (f | SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2))(x_new)
    post_ref = (f | ((2 * f + 2 + e)(x), 2 * y + 2))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 2', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    post_sparse = (f | SparseObs((2 * f + 2)(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 3', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    # Test ELBO.
    e = GP(1e-2 * Delta(), graph=model)
    yield assert_allclose, \
          SparseObs(f(x), e, f(x), y).elbo, \
          (f + e)(x).logpdf(y)
    yield assert_allclose, \
          SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2).elbo, \
          (2 * f + 2 + e)(x).logpdf(2 * y + 2)
    yield assert_allclose, \
          SparseObs((2 * f + 2)(x), e, f(x), y).elbo, \
          (f + e)(x).logpdf(y)

    # Test multiple observations.
    x1 = np.linspace(0, 5, 10)
    x2 = np.linspace(10, 15, 10)
    x_new = np.linspace(6, 9, 10)
    x_ind = np.concatenate((x1, x2, x_new), axis=0)
    y1, y2 = model.sample((f + e)(x1), (f + e)(x2))

    post_sparse = (f | SparseObs(f(x_ind),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    post_ref = (f | Obs(((f + e)(x1), y1), ((f + e)(x2), y2)))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean
    yield assert_allclose, post_sparse.var, post_ref.var

    # Test multiple observations and multiple inducing points.
    post_sparse = (f | SparseObs((f(x1), f(x2), f(x_new)),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 4', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    # Test multiple inducing points.
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)
    x_ind1 = x[:5]
    x_ind2 = x[5:]
    y = model.sample((f + e)(x))

    post_sparse = (f | SparseObs((f(x_ind1), f(x_ind2)), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 5', 1e-4, 1e-4
    yield assert_allclose, post_sparse.var, post_ref.var
Exemple #5
0
def test_sparse_conditioning():
    model = Graph()
    f = GP(EQ().stretch(3), graph=model)
    e = GP(1e-2 * Delta(), graph=model)
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)

    y = f(x).sample()

    # Test that noise matrix must indeed be diagonal.
    with pytest.raises(RuntimeError):
        SparseObs(f(x), f, f(x), y).elbo

    # Test posterior.
    post_sparse = (f | SparseObs(f(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 1', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    post_sparse = (f | SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2))(x_new)
    post_ref = (f | ((2 * f + 2 + e)(x), 2 * y + 2))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 2', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    post_sparse = (f | SparseObs((2 * f + 2)(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 3', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    # Test ELBO.
    e = GP(1e-2 * Delta(), graph=model)
    allclose(SparseObs(f(x), e, f(x), y).elbo, (f + e)(x).logpdf(y))
    allclose(SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2).elbo,
             (2 * f + 2 + e)(x).logpdf(2 * y + 2))
    allclose(SparseObs((2 * f + 2)(x), e, f(x), y).elbo, (f + e)(x).logpdf(y))

    # Test multiple observations.
    x1 = np.linspace(0, 5, 10)
    x2 = np.linspace(10, 15, 10)
    x_new = np.linspace(6, 9, 10)
    x_ind = np.concatenate((x1, x2, x_new), axis=0)
    y1, y2 = model.sample((f + e)(x1), (f + e)(x2))

    post_sparse = (f | SparseObs(f(x_ind),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    post_ref = (f | Obs(((f + e)(x1), y1), ((f + e)(x2), y2)))(x_new)
    allclose(post_sparse.mean, post_ref.mean)
    allclose(post_sparse.var, post_ref.var)

    # Test multiple observations and multiple inducing points.
    post_sparse = (f | SparseObs((f(x1), f(x2), f(x_new)),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 4', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    # Test multiple inducing points.
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)
    x_ind1 = x[:5]
    x_ind2 = x[5:]
    y = model.sample((f + e)(x))

    post_sparse = (f | SparseObs((f(x_ind1), f(x_ind2)), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 5', atol=1e-4,
             rtol=1e-4)
    allclose(post_sparse.var, post_ref.var)

    # Test caching of mean.
    obs = SparseObs(f(x), e, f(x), y)
    mu = obs.mu
    allclose(mu, obs.mu)

    # Test caching of corrective kernel parameter.
    obs = SparseObs(f(x), e, f(x), y)
    A = obs.A
    allclose(A, obs.A)

    # Test caching of elbo.
    obs = SparseObs(f(x), e, f(x), y)
    elbo = obs.elbo
    allclose(elbo, obs.elbo)

    # Test that `Graph.logpdf` takes an `SparseObservations` object.
    obs = SparseObs(f(x), e, f(x), y)
    allclose(model.logpdf(obs), (f + e)(x).logpdf(y))
Exemple #6
0
def test_observations_and_conditioning():
    model = Graph()
    p1 = GP(EQ(), graph=model)
    p2 = GP(EQ(), graph=model)
    p = p1 + p2
    x = np.linspace(0, 5, 10)
    y = p(x).sample()
    y1 = p1(x).sample()

    # Test all ways of conditioning, including shorthands.
    obs1 = Obs(p(x), y)
    obs2 = Obs(x, y, ref=p)
    allclose(obs1.y, obs2.y)
    allclose(obs1.K_x, obs2.K_x)

    obs3 = Obs((p(x), y), (p1(x), y1))
    obs4 = Obs((x, y), (p1(x), y1), ref=p)
    allclose(obs3.y, obs4.y)
    allclose(obs3.K_x, obs4.K_x)

    def assert_equal_mean_var(x, *ys):
        for y in ys:
            allclose(x.mean, y.mean)
            allclose(x.var, y.var)

    assert_equal_mean_var(p.condition(x, y)(x),
                          p.condition(p(x), y)(x),
                          (p | (x, y))(x),
                          (p | (p(x), y))(x),
                          p.condition(obs1)(x),
                          p.condition(obs2)(x),
                          (p | obs1)(x),
                          (p | obs2)(x))

    assert_equal_mean_var(p.condition((x, y), (p1(x), y1))(x),
                          p.condition((p(x), y), (p1(x), y1))(x),
                          (p | [(x, y), (p1(x), y1)])(x),
                          (p | [(p(x), y), (p1(x), y1)])(x),
                          p.condition(obs3)(x),
                          p.condition(obs4)(x),
                          (p | obs3)(x),
                          (p | obs4)(x))

    # Check conditioning multiple processes at once.
    p1_post, p2_post, p_post = (p1, p2, p) | obs1
    p1_post, p2_post, p_post = p1_post(x), p2_post(x), p_post(x)
    p1_post2, p2_post2, p_post2 = (p1 | obs1)(x), (p2 | obs1)(x), (p | obs1)(x)

    allclose(p1_post.mean, p1_post2.mean)
    allclose(p1_post.var, p1_post2.var)
    allclose(p2_post.mean, p2_post2.mean)
    allclose(p2_post.var, p2_post2.var)
    allclose(p_post.mean, p_post2.mean)
    allclose(p_post.var, p_post2.var)

    # Test `At` check.
    with pytest.raises(ValueError):
        Obs(0, 0)
    with pytest.raises(ValueError):
        Obs((0, 0), (0, 0))
    with pytest.raises(ValueError):
        SparseObs(0, p, (0, 0))

    # Test that `Graph.logpdf` takes an `Observations` object.
    obs = Obs(p(x), y)
    assert model.logpdf(obs) == p(x).logpdf(y)