コード例 #1
0
def test_forward(model_class, X, y, kernel, likelihood):
    if model_class is SparseGPRegression or model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X, likelihood)
    else:
        gp = model_class(X, y, kernel, likelihood)

    # test shape
    Xnew = torch.tensor([[2.0, 3.0, 1.0]])
    loc0, cov0 = gp(Xnew, full_cov=True)
    loc1, var1 = gp(Xnew, full_cov=False)
    assert loc0.dim() == y.dim()
    assert loc0.shape[-1] == Xnew.shape[0]
    # test latent shape
    assert loc0.shape[:-1] == y.shape[:-1]
    assert cov0.shape[:-2] == y.shape[:-1]
    assert cov0.shape[-1] == cov0.shape[-2]
    assert cov0.shape[-1] == Xnew.shape[0]
    assert_equal(loc0, loc1)
    n = Xnew.shape[0]
    cov0_diag = torch.stack([mat.diag() for mat in cov0.view(-1, n, n)
                             ]).reshape(var1.shape)
    assert_equal(cov0_diag, var1)

    # test trivial forward: Xnew = X
    loc, cov = gp(X, full_cov=True)
    if model_class is VariationalGP or model_class is VariationalSparseGP:
        assert_equal(loc.norm().item(), 0)
        assert_equal(cov, torch.eye(cov.shape[-1]).expand(cov.shape))
    else:
        assert_equal(loc, y)
        assert_equal(cov.norm().item(), 0)

    # test same input forward: Xnew[0,:] = Xnew[1,:] = ...
    Xnew = torch.tensor([[2.0, 3.0, 1.0]]).expand(10, 3)
    loc, cov = gp(Xnew, full_cov=True)
    loc_diff = loc - loc[..., :1].expand(y.shape[:-1] + (10, ))
    assert_equal(loc_diff.norm().item(), 0)
    cov_diff = cov - cov[..., :1, :1].expand(y.shape[:-1] + (10, 10))
    assert_equal(cov_diff.norm().item(), 0)

    # test noise kernel forward: kernel = WhiteNoise
    gp.kernel = WhiteNoise(input_dim=3, variance=torch.tensor(10.))
    loc, cov = gp(X, full_cov=True)
    assert_equal(loc.norm().item(), 0)
    assert_equal(cov, torch.eye(cov.shape[-1]).expand(cov.shape) * 10)
コード例 #2
0
ファイル: test_conditional.py プロジェクト: zippeurfou/pyro
X = torch.tensor([[1., 5.], [2., 1.], [3., 2.]])
kernel = Matern52(input_dim=2)
Kff = kernel(X) + torch.eye(3) * 1e-6
Lff = Kff.potrf(upper=False)
pyro.set_rng_seed(123)
f_loc = torch.rand(3)
f_scale_tril = torch.rand(3, 3).tril(-1) + torch.rand(3).exp().diag()
f_cov = f_scale_tril.matmul(f_scale_tril.t())

TEST_CASES = [
    T(Xnew, X, kernel, torch.zeros(3), Lff, torch.zeros(2), None),
    T(Xnew, X, kernel, torch.zeros(3), None, torch.zeros(2), None),
    T(Xnew, X, kernel, f_loc, Lff, None, kernel(Xnew)),
    T(X, X, kernel, f_loc, f_scale_tril, f_loc, f_cov),
    T(X, X, kernel, f_loc, None, f_loc, torch.zeros(3, 3)),
    T(Xnew, X, WhiteNoise(input_dim=2), f_loc, f_scale_tril, torch.zeros(2),
      torch.eye(2)),
    T(Xnew, X, WhiteNoise(input_dim=2), f_loc, None, torch.zeros(2),
      torch.eye(2)),
]

TEST_IDS = [str(i) for i in range(len(TEST_CASES))]


@pytest.mark.parametrize("Xnew, X, kernel, f_loc, f_scale_tril, loc, cov",
                         TEST_CASES,
                         ids=TEST_IDS)
def test_conditional(Xnew, X, kernel, f_loc, f_scale_tril, loc, cov):
    loc0, cov0 = conditional(Xnew,
                             X,
                             kernel,
コード例 #3
0
 T(Cosine(3, variance, lengthscale), X=X, Z=Z, K_sum=-0.193233),
 T(Linear(3, variance), X=X, Z=Z, K_sum=291),
 T(Exponential(3, variance, lengthscale), X=X, Z=Z, K_sum=2.685679),
 T(Matern32(3, variance, lengthscale), X=X, Z=Z, K_sum=3.229314),
 T(Matern52(3, variance, lengthscale), X=X, Z=Z, K_sum=3.391847),
 T(Periodic(3, variance, lengthscale, period=torch.ones(1)),
   X=X,
   Z=Z,
   K_sum=18),
 T(Polynomial(3, variance, degree=2), X=X, Z=Z, K_sum=7017),
 T(RationalQuadratic(3, variance, lengthscale, scale_mixture=torch.ones(1)),
   X=X,
   Z=Z,
   K_sum=5.684670),
 T(RBF(3, variance, lengthscale), X=X, Z=Z, K_sum=3.681117),
 T(WhiteNoise(3, variance, lengthscale), X=X, Z=Z, K_sum=0),
 T(WhiteNoise(3, variance, lengthscale), X=X, Z=None, K_sum=6),
 T(
     Coregionalize(3, components=torch.eye(3, 3)),
     X=torch.tensor([[1., 0., 0.], [0.5, 0., 0.5]]),
     Z=torch.tensor([[1., 0., 0.], [0., 1., 0.]]),
     K_sum=2.25,
 ),
 T(
     Coregionalize(3, rank=2),
     X=torch.tensor([[1., 0., 0.], [0.5, 0., 0.5]]),
     Z=torch.tensor([[1., 0., 0.], [0., 1., 0.]]),
     K_sum=None,  # kernel is randomly initialized
 ),
 T(
     Coregionalize(3),
コード例 #4
0
ファイル: test_kernels.py プロジェクト: youngshingjun/pyro
     X=X, Z=Z, K_sum=18
 ),
 T(
     Polynomial(3, variance, degree=2),
     X=X, Z=Z, K_sum=7017
 ),
 T(
     RationalQuadratic(3, variance, lengthscale, scale_mixture=torch.ones(1)),
     X=X, Z=Z, K_sum=5.684670
 ),
 T(
     RBF(3, variance, lengthscale),
     X=X, Z=Z, K_sum=3.681117
 ),
 T(
     WhiteNoise(3, variance, lengthscale),
     X=X, Z=Z, K_sum=0
 ),
 T(
     WhiteNoise(3, variance, lengthscale),
     X=X, Z=None, K_sum=6
 ),
 T(
     Coregionalize(3, components=torch.eye(3, 3)),
     X=torch.tensor([[1., 0., 0.],
                     [0.5, 0., 0.5]]),
     Z=torch.tensor([[1., 0., 0.],
                     [0., 1., 0.]]),
     K_sum=2.25,
 ),
 T(