Exemple #1
0
def test_inference_deepGP():
    gp1 = GPRegression(
        X, None,
        RBF(input_dim=3,
            variance=torch.tensor(3.),
            lengthscale=torch.tensor(2.)))
    Z, _ = gp1.model()
    gp2 = VariationalSparseGP(Z, y2D, Matern32(input_dim=3), Z.clone(),
                              Gaussian(torch.tensor(1e-6)))

    class DeepGP(torch.nn.Module):
        def __init__(self, gp1, gp2):
            super(DeepGP, self).__init__()
            self.gp1 = gp1
            self.gp2 = gp2

        def model(self):
            Z, _ = self.gp1.model()
            self.gp2.set_data(Z, y2D)
            self.gp2.model()

        def guide(self):
            self.gp1.guide()
            self.gp2.guide()

    deepgp = DeepGP(gp1, gp2)
    train(deepgp, num_steps=1)
Exemple #2
0
def test_mean_function_VGP_whiten():
    X, y, Xnew, ynew, kernel, mean_fn = _pre_test_mean_function()
    likelihood = Gaussian()
    gpmodule = VariationalGP(X, y, kernel, likelihood, mean_function=mean_fn,
                             whiten=True)
    optimizer = torch.optim.Adam(gpmodule.parameters(), lr=0.1)
    train(gpmodule, optimizer)
    _post_test_mean_function(gpmodule, Xnew, ynew)
Exemple #3
0
def test_mean_function_VSGP():
    X, y, Xnew, ynew, kernel, mean_fn = _pre_test_mean_function()
    Xu = X[::20].clone()
    likelihood = Gaussian()
    gpmodule = VariationalSparseGP(X, y, kernel, Xu, likelihood, mean_function=mean_fn)
    optimizer = torch.optim.Adam(gpmodule.parameters(), lr=0.02)
    train(gpmodule, optimizer)
    _post_test_mean_function(gpmodule, Xnew, ynew)
Exemple #4
0
def test_inference_with_whiten(model_class, X, y, kernel, likelihood):
    # regression models don't use whiten
    if model_class is GPRegression or model_class is SparseGPRegression:
        return
    elif model_class is VariationalGP:
        gp = model_class(X, y, kernel, likelihood, whiten=True)
    else:  # model_class is SparseVariationalGP
        gp = model_class(X, y, kernel, X.clone(), likelihood, whiten=True)

    train(gp, num_steps=1)
Exemple #5
0
def test_inference_with_empty_latent_shape(model_class, X, y, kernel, likelihood):
    # regression models don't use latent_shape (default=torch.Size([]))
    if model_class is GPRegression or model_class is SparseGPRegression:
        return
    elif model_class is VariationalGP:
        gp = model_class(X, y, kernel, likelihood, latent_shape=torch.Size([]))
    else:  # model_class is SparseVariationalGP
        gp = model_class(X, y, kernel, X.clone(), likelihood, latent_shape=torch.Size([]))

    train(gp, num_steps=1)
Exemple #6
0
def test_mean_function_SGPR_FITC():
    X, y, Xnew, ynew, kernel, mean_fn = _pre_test_mean_function()
    Xu = X[::20].clone()
    gpmodule = SparseGPRegression(X,
                                  y,
                                  kernel,
                                  Xu,
                                  mean_function=mean_fn,
                                  approx="FITC")
    train(gpmodule)
    _post_test_mean_function(gpmodule, Xnew, ynew)
Exemple #7
0
def test_gplvm(model_class, X, y, kernel, likelihood):
    if model_class is SparseGPRegression or model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X.clone(), likelihood)
    else:
        gp = model_class(X, y, kernel, likelihood)

    gplvm = GPLVM(gp)
    # test inference
    train(gplvm, num_steps=1)
    # test forward
    gplvm(Xnew=X)
Exemple #8
0
def test_inference_with_empty_latent_shape(model_class, X, y, kernel, likelihood):
    if isinstance(likelihood, MultiClass):
        latent_shape = torch.Size([likelihood.num_classes])
    else:
        latent_shape = torch.Size([])
    if model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X, likelihood, latent_shape=latent_shape)
    else:
        gp = model_class(X, y, kernel, likelihood, latent_shape=latent_shape)

    train(gp, num_steps=1)
Exemple #9
0
def test_inference(model_class, X, y, kernel, likelihood):
    if isinstance(likelihood, MultiClass):
        latent_shape = y.shape[:-1] + (likelihood.num_classes,)
    else:
        latent_shape = y.shape[:-1]
    if model_class is VariationalSparseGP:
        gp = model_class(X, y, kernel, X, likelihood, latent_shape=latent_shape)
    else:
        gp = model_class(X, y, kernel, likelihood, latent_shape=latent_shape)

    train(gp, num_steps=1)
Exemple #10
0
def test_inference_whiten_vsgp():
    N = 1000
    X = dist.Uniform(torch.zeros(N), torch.ones(N)*5).sample()
    y = 0.5 * torch.sin(3*X) + dist.Normal(torch.zeros(N), torch.ones(N)*0.5).sample()
    kernel = RBF(input_dim=1)
    Xu = torch.arange(0., 5.5, 0.5)

    vsgp = VariationalSparseGP(X, y, kernel, Xu, Gaussian(), whiten=True)
    train(vsgp)

    Xnew = torch.arange(0., 5.05, 0.05)
    loc, var = vsgp(Xnew, full_cov=False)
    target = 0.5 * torch.sin(3*Xnew)

    assert_equal((loc - target).abs().mean().item(), 0, prec=0.07)
Exemple #11
0
def test_inference_sgpr():
    N = 1000
    X = dist.Uniform(torch.zeros(N), torch.ones(N)*5).sample()
    y = 0.5 * torch.sin(3*X) + dist.Normal(torch.zeros(N), torch.ones(N)*0.5).sample()
    kernel = RBF(input_dim=1)
    Xu = torch.arange(0., 5.5, 0.5)

    sgpr = SparseGPRegression(X, y, kernel, Xu)
    train(sgpr)

    Xnew = torch.arange(0., 5.05, 0.05)
    loc, var = sgpr(Xnew, full_cov=False)
    target = 0.5 * torch.sin(3*Xnew)

    assert_equal((loc - target).abs().mean().item(), 0, prec=0.07)
Exemple #12
0
def test_inference(model_class, X, y, kernel, likelihood):
    # skip variational GP models because variance/lengthscale highly
    # depend on variational parameters
    if model_class is VariationalGP or model_class is VariationalSparseGP:
        return
    elif model_class is GPRegression:
        gp = model_class(X, y, RBF(input_dim=3), likelihood)
    else:  # model_class is SparseGPRegression
        gp = model_class(X, y, RBF(input_dim=3), X, likelihood)
        # fix inducing points because variance/lengthscale highly depend on it
        gp.Xu.requires_grad_(False)

    generator = dist.MultivariateNormal(torch.zeros(X.shape[0]), kernel(X))
    target_y = generator(sample_shape=torch.Size([1000])).detach()
    gp.set_data(X, target_y)

    train(gp)

    y_cov = gp.kernel(X)
    target_y_cov = kernel(X)
    assert_equal(y_cov, target_y_cov, prec=0.1)
Exemple #13
0
def test_mean_function_VGP():
    X, y, Xnew, ynew, kernel, mean_fn = _pre_test_mean_function()
    likelihood = Gaussian()
    gpmodule = VariationalGP(X, y, kernel, likelihood, mean_function=mean_fn)
    train(gpmodule)
    _post_test_mean_function(gpmodule, Xnew, ynew)
Exemple #14
0
def test_mean_function_GPR():
    X, y, Xnew, ynew, kernel, mean_fn = _pre_test_mean_function()
    gpmodule = GPRegression(X, y, kernel, mean_function=mean_fn)
    train(gpmodule)
    _post_test_mean_function(gpmodule, Xnew, ynew)