示例#1
0
def test_sparse_conditioning():
    model = Graph()
    f = GP(EQ().stretch(3), graph=model)
    e = GP(1e-2 * Delta(), graph=model)
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)

    y = f(x).sample()

    # Test that noise matrix must indeed be diagonal.
    yield raises, RuntimeError, lambda: SparseObs(f(x), f, f(x), y).elbo

    # Test posterior.
    post_sparse = (f | SparseObs(f(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 1', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    post_sparse = (f | SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2))(x_new)
    post_ref = (f | ((2 * f + 2 + e)(x), 2 * y + 2))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 2', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    post_sparse = (f | SparseObs((2 * f + 2)(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 3', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    # Test ELBO.
    e = GP(1e-2 * Delta(), graph=model)
    yield assert_allclose, \
          SparseObs(f(x), e, f(x), y).elbo, \
          (f + e)(x).logpdf(y)
    yield assert_allclose, \
          SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2).elbo, \
          (2 * f + 2 + e)(x).logpdf(2 * y + 2)
    yield assert_allclose, \
          SparseObs((2 * f + 2)(x), e, f(x), y).elbo, \
          (f + e)(x).logpdf(y)

    # Test multiple observations.
    x1 = np.linspace(0, 5, 10)
    x2 = np.linspace(10, 15, 10)
    x_new = np.linspace(6, 9, 10)
    x_ind = np.concatenate((x1, x2, x_new), axis=0)
    y1, y2 = model.sample((f + e)(x1), (f + e)(x2))

    post_sparse = (f | SparseObs(f(x_ind),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    post_ref = (f | Obs(((f + e)(x1), y1), ((f + e)(x2), y2)))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean
    yield assert_allclose, post_sparse.var, post_ref.var

    # Test multiple observations and multiple inducing points.
    post_sparse = (f | SparseObs((f(x1), f(x2), f(x_new)),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 4', 1e-6, 1e-6
    yield assert_allclose, post_sparse.var, post_ref.var

    # Test multiple inducing points.
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)
    x_ind1 = x[:5]
    x_ind2 = x[5:]
    y = model.sample((f + e)(x))

    post_sparse = (f | SparseObs((f(x_ind1), f(x_ind2)), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    yield assert_allclose, post_sparse.mean, post_ref.mean, \
          'means 5', 1e-4, 1e-4
    yield assert_allclose, post_sparse.var, post_ref.var
示例#2
0
def test_delta_evaluations(x1_x2):
    k = Delta()
    x1, x2 = x1_x2
    n1 = B.shape(B.uprank(x1))[0]
    n2 = B.shape(B.uprank(x2))[0]

    # Check uniqueness checks.
    allclose(k(x1), B.eye(n1))
    allclose(k(x1, x2), B.zeros(n1, n2))

    # Standard tests:
    standard_kernel_tests(k)

    # Test `Unique` inputs.
    assert isinstance(k(Unique(x1), Unique(x1.copy())), Zero)
    assert isinstance(k(Unique(x1), Unique(x1)), UniformlyDiagonal)
    assert isinstance(k(Unique(x1), x1), Zero)
    assert isinstance(k(x1, Unique(x1)), Zero)

    assert isinstance(k.elwise(Unique(x1), Unique(x1.copy())), Zero)
    assert isinstance(k.elwise(Unique(x1), Unique(x1)), One)
    assert isinstance(k.elwise(Unique(x1), x1), Zero)
    assert isinstance(k.elwise(x1, Unique(x1)), Zero)
示例#3
0
def test_delta_evaluations(x1, w1, x2, w2):
    k = Delta()
    n1 = num_elements(x1)
    n2 = num_elements(x2)

    # Check uniqueness checks.
    approx(k(x1), B.eye(n1))
    approx(k(x1, x2), B.zeros(n1, n2))

    # Standard tests:
    standard_kernel_tests(k)

    # Test `Unique` inputs.
    assert isinstance(k(Unique(x1), Unique(x1.copy())), Zero)
    assert isinstance(k(Unique(x1), Unique(x1)), Diagonal)
    assert isinstance(k(Unique(x1), x1), Zero)
    assert isinstance(k(x1, Unique(x1)), Zero)

    approx(k.elwise(Unique(x1), Unique(x1.copy())), B.zeros(n1, 1))
    approx(k.elwise(Unique(x1), Unique(x1)), B.ones(n1, 1))
    approx(k.elwise(Unique(x1), x1), B.zeros(n1, 1))

    # Test `WeightedUnique` inputs.
    assert isinstance(k(WeightedUnique(x1, w1), WeightedUnique(x1.copy(), w1)),
                      Zero)
    assert isinstance(k(WeightedUnique(x1, w1), WeightedUnique(x1, w1)),
                      Diagonal)
    assert isinstance(k(WeightedUnique(x1, w1), x1), Zero)
    assert isinstance(k(x1, WeightedUnique(x1, w1)), Zero)

    approx(k.elwise(WeightedUnique(x1, w1), WeightedUnique(x1.copy(), w1)),
           B.zeros(n1, 1))
    approx(k.elwise(WeightedUnique(x1, w1), WeightedUnique(x1, w1)),
           B.ones(n1, 1))
    approx(k.elwise(WeightedUnique(x1, w1), x1), B.zeros(n1, 1))
    approx(k.elwise(x1, WeightedUnique(x1, w1)), B.zeros(n1, 1))
    approx(k.elwise(x1, WeightedUnique(x1, w1)), B.zeros(n1, 1))
示例#4
0
def test_sparse_conditioning():
    model = Graph()
    f = GP(EQ().stretch(3), graph=model)
    e = GP(1e-2 * Delta(), graph=model)
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)

    y = f(x).sample()

    # Test that noise matrix must indeed be diagonal.
    with pytest.raises(RuntimeError):
        SparseObs(f(x), f, f(x), y).elbo

    # Test posterior.
    post_sparse = (f | SparseObs(f(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 1', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    post_sparse = (f | SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2))(x_new)
    post_ref = (f | ((2 * f + 2 + e)(x), 2 * y + 2))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 2', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    post_sparse = (f | SparseObs((2 * f + 2)(x), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 3', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    # Test ELBO.
    e = GP(1e-2 * Delta(), graph=model)
    allclose(SparseObs(f(x), e, f(x), y).elbo, (f + e)(x).logpdf(y))
    allclose(SparseObs(f(x), e, (2 * f + 2)(x), 2 * y + 2).elbo,
             (2 * f + 2 + e)(x).logpdf(2 * y + 2))
    allclose(SparseObs((2 * f + 2)(x), e, f(x), y).elbo, (f + e)(x).logpdf(y))

    # Test multiple observations.
    x1 = np.linspace(0, 5, 10)
    x2 = np.linspace(10, 15, 10)
    x_new = np.linspace(6, 9, 10)
    x_ind = np.concatenate((x1, x2, x_new), axis=0)
    y1, y2 = model.sample((f + e)(x1), (f + e)(x2))

    post_sparse = (f | SparseObs(f(x_ind),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    post_ref = (f | Obs(((f + e)(x1), y1), ((f + e)(x2), y2)))(x_new)
    allclose(post_sparse.mean, post_ref.mean)
    allclose(post_sparse.var, post_ref.var)

    # Test multiple observations and multiple inducing points.
    post_sparse = (f | SparseObs((f(x1), f(x2), f(x_new)),
                                 (e, f(Unique(x1)), y1),
                                 (e, f(Unique(x2)), y2)))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 4', atol=1e-6,
             rtol=1e-6)
    allclose(post_sparse.var, post_ref.var)

    # Test multiple inducing points.
    x = np.linspace(0, 5, 10)
    x_new = np.linspace(6, 10, 10)
    x_ind1 = x[:5]
    x_ind2 = x[5:]
    y = model.sample((f + e)(x))

    post_sparse = (f | SparseObs((f(x_ind1), f(x_ind2)), e, f(x), y))(x_new)
    post_ref = (f | ((f + e)(x), y))(x_new)
    allclose(post_sparse.mean, post_ref.mean, desc='means 5', atol=1e-4,
             rtol=1e-4)
    allclose(post_sparse.var, post_ref.var)

    # Test caching of mean.
    obs = SparseObs(f(x), e, f(x), y)
    mu = obs.mu
    allclose(mu, obs.mu)

    # Test caching of corrective kernel parameter.
    obs = SparseObs(f(x), e, f(x), y)
    A = obs.A
    allclose(A, obs.A)

    # Test caching of elbo.
    obs = SparseObs(f(x), e, f(x), y)
    elbo = obs.elbo
    allclose(elbo, obs.elbo)

    # Test that `Graph.logpdf` takes an `SparseObservations` object.
    obs = SparseObs(f(x), e, f(x), y)
    allclose(model.logpdf(obs), (f + e)(x).logpdf(y))
示例#5
0
def test_delta():
    k = Delta()
    x1 = np.random.randn(10, 2)
    x2 = np.random.randn(5, 2)

    # Verify that the kernel has the right properties.
    yield eq, k.stationary, True
    yield eq, k.var, 1
    yield eq, k.length_scale, 0
    yield eq, k.period, np.inf
    yield eq, str(k), 'Delta()'

    # Check equality.
    yield eq, Delta(), Delta()
    yield neq, Delta(), Delta(epsilon=k.epsilon * 10)
    yield neq, Delta(), EQ()

    # Check caching.
    yield ok, allclose(k(x1), np.eye(10)), 'same'
    yield ok, allclose(k(x1, x2), np.zeros((10, 5))), 'others'

    # Standard tests:
    for x in kernel_generator(k):
        yield x

    # Test `Unique` inputs.
    yield assert_instance, k(Unique(x1), Unique(x1.copy())), Zero
    yield assert_instance, k(Unique(x1), Unique(x1)), UniformlyDiagonal
    yield assert_instance, k(Unique(x1), x1), Zero
    yield assert_instance, k(x1, Unique(x1)), Zero

    yield assert_instance, k.elwise(Unique(x1), Unique(x1.copy())), Zero
    yield assert_instance, k.elwise(Unique(x1), Unique(x1)), One
    yield assert_instance, k.elwise(Unique(x1), x1), Zero
    yield assert_instance, k.elwise(x1, Unique(x1)), Zero