def test_mean_function_SGPR_FITC(): X, y, Xnew, ynew, kernel, mean_fn = _pre_test_mean_function() Xu = X[::20].clone() model = SparseGPRegression(X, y, kernel, Xu, mean_function=mean_fn, approx="FITC") model.optimize(optim.Adam({"lr": 0.01})) _post_test_mean_function(model, Xnew, ynew)
def test_inference_sgpr(): N = 1000 X = dist.Uniform(torch.zeros(N), torch.ones(N)*5).sample() y = 0.5 * torch.sin(3*X) + dist.Normal(torch.zeros(N), torch.ones(N)*0.5).sample() kernel = RBF(input_dim=1) Xu = torch.arange(0, 5.5, 0.5) sgpr = SparseGPRegression(X, y, kernel, Xu) sgpr.optimize(optim.Adam({"lr": 0.01}), num_steps=1000) Xnew = torch.arange(0, 5.05, 0.05) loc, var = sgpr(Xnew, full_cov=False) target = 0.5 * torch.sin(3*Xnew) assert_equal((loc - target).abs().mean().item(), 0, prec=0.07)
def test_inference_sgpr(): N = 1000 X = dist.Uniform(torch.zeros(N), torch.ones(N) * 5).sample() y = 0.5 * torch.sin(3 * X) + dist.Normal(torch.zeros(N), torch.ones(N) * 0.5).sample() kernel = RBF(input_dim=1) Xu = torch.arange(0, 5.5, 0.5) sgpr = SparseGPRegression(X, y, kernel, Xu) sgpr.optimize(optim.Adam({"lr": 0.01}), num_steps=1000) Xnew = torch.arange(0, 5.05, 0.05) loc, var = sgpr(Xnew, full_cov=False) target = 0.5 * torch.sin(3 * Xnew) assert_equal((loc - target).abs().mean().item(), 0, prec=0.07)