def test_posterior_with_exact_computations_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): with gpytorch.settings.fast_computations( covar_root_decomposition=False, log_prob=False): self.test_posterior_latent_gp_and_likelihood_with_optimization( cuda=True)
def test_sgpr_mean_abs_error_cuda(self): # Suppress numerical warnings warnings.simplefilter("ignore", NumericalWarning) if not torch.cuda.is_available(): return with least_used_cuda_device(): self.test_sgpr_mean_abs_error(cuda=True)
def test_sgpr_mean_abs_error_cuda(self): # Suppress numerical warnings warnings.simplefilter("ignore", NumericalWarning) if not torch.cuda.is_available(): return with least_used_cuda_device(): train_x, train_y, test_x, test_y = make_data(cuda=True) likelihood = GaussianLikelihood().cuda() gp_model = GPRegressionModel(train_x, train_y, likelihood).cuda() mll = gpytorch.mlls.ExactMarginalLogLikelihood( likelihood, gp_model) # Optimize the model gp_model.train() likelihood.train() optimizer = optim.Adam(gp_model.parameters(), lr=0.1) optimizer.n_iter = 0 for _ in range(25): optimizer.zero_grad() output = gp_model(train_x) loss = -mll(output, train_y) loss.backward() optimizer.n_iter += 1 optimizer.step() for param in gp_model.parameters(): self.assertTrue(param.grad is not None) self.assertGreater(param.grad.norm().item(), 0) # Test the model gp_model.eval() likelihood.eval() test_preds = likelihood(gp_model(test_x)).mean mean_abs_error = torch.mean(torch.abs(test_y - test_preds)) self.assertLess(mean_abs_error.squeeze().item(), 0.02) # Test variances test_vars = likelihood(gp_model(test_x)).variance self.assertAllClose( test_vars, likelihood(gp_model(test_x)).covariance_matrix.diagonal( dim1=-1, dim2=-2)) self.assertGreater(test_vars.min().item() + 0.05, likelihood.noise.item()) self.assertLess( test_vars.max().item() - 0.05, likelihood.noise.item() + gp_model.covar_module.base_kernel.outputscale.item())
def test_regression_error_cuda(self): if not torch.cuda.is_available(): return with least_used_cuda_device(): train_x, train_y = train_data(cuda=True) likelihood = GaussianLikelihood().cuda() inducing_points = torch.linspace(0, 1, 25).unsqueeze(-1).repeat(2, 1, 1) model = SVGPRegressionModel(inducing_points).cuda() mll = gpytorch.mlls.VariationalELBO(likelihood, model, num_data=train_y.size(-1)) # Find optimal model hyperparameters model.train() likelihood.train() optimizer = optim.Adam([{ "params": model.parameters() }, { "params": likelihood.parameters() }], lr=0.01) for _ in range(150): optimizer.zero_grad() output = model(train_x) loss = -mll(output, train_y) loss = loss.sum() loss.backward() optimizer.step() for param in model.parameters(): self.assertTrue(param.grad is not None) self.assertGreater(param.grad.norm().item(), 0) for param in likelihood.parameters(): self.assertTrue(param.grad is not None) self.assertGreater(param.grad.norm().item(), 0) # Set back to eval mode model.eval() likelihood.eval() test_preds = likelihood(model(train_x)).mean.squeeze() mean_abs_error = torch.mean( torch.abs(train_y[0, :] - test_preds[0, :]) / 2) mean_abs_error2 = torch.mean( torch.abs(train_y[1, :] - test_preds[1, :]) / 2) self.assertLess(mean_abs_error.item(), 1e-1) self.assertLess(mean_abs_error2.item(), 1e-1)
def test_kissgp_gp_mean_abs_error_cuda(self): if not torch.cuda.is_available(): return with least_used_cuda_device(): train_x, train_y, test_x, test_y = make_data(cuda=True) likelihood = FixedNoiseGaussianLikelihood(torch.ones(100) * 0.001).cuda() gp_model = GPRegressionModel(train_x, train_y, likelihood).cuda() mll = gpytorch.mlls.ExactMarginalLogLikelihood( likelihood, gp_model) # Optimize the model gp_model.train() likelihood.train() optimizer = optim.Adam(list(gp_model.parameters()) + list(likelihood.parameters()), lr=0.1) optimizer.n_iter = 0 with gpytorch.settings.debug(False): for _ in range(25): optimizer.zero_grad() output = gp_model(train_x) loss = -mll(output, train_y) loss.backward() optimizer.n_iter += 1 optimizer.step() for param in gp_model.parameters(): self.assertTrue(param.grad is not None) self.assertGreater(param.grad.norm().item(), 0) for param in likelihood.parameters(): self.assertTrue(param.grad is not None) self.assertGreater(param.grad.norm().item(), 0) # Test the model gp_model.eval() likelihood.eval() test_preds = likelihood(gp_model(test_x)).mean mean_abs_error = torch.mean(torch.abs(test_y - test_preds)) self.assertLess(mean_abs_error.squeeze().item(), 0.02)
def test_gp_posterior_mean_skip_variances_slow_cuda(self): if not torch.cuda.is_available(): return with least_used_cuda_device(): train_x, test_x, train_y, _ = self._get_data(cuda=True) likelihood = GaussianLikelihood() gp_model = ExactGPModel(train_x, train_y, likelihood) gp_model.cuda() likelihood.cuda() # Compute posterior distribution gp_model.eval() likelihood.eval() with gpytorch.settings.fast_pred_var(False): with gpytorch.settings.skip_posterior_variances(True): mean_skip_var = gp_model(test_x).mean mean = gp_model(test_x).mean likelihood_mean = likelihood(gp_model(test_x)).mean self.assertTrue(torch.allclose(mean_skip_var, mean)) self.assertTrue(torch.allclose(mean_skip_var, likelihood_mean))
def test_lkj_covariance_prior_batch_log_prob_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_lkj_covariance_prior_batch_log_prob(cuda=True)
def test_smoothed_box_prior_batch_log_prob_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): return self.test_smoothed_box_prior_batch_log_prob(cuda=True)
def test_grid_gp_mean_abs_error_2d_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_grid_gp_mean_abs_error(cuda=True, num_dim=2)
def test_multivariate_normal_prior_log_prob_log_transform_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): return self.test_multivariate_normal_prior_log_prob_log_transform( cuda=True)
def test_regression_error_skip_logdet_forward_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_regression_error(skip_logdet_forward=True, cuda=True)
def test_gauss_hermite_quadrature_1D_normal_batch_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_gauss_hermite_quadrature_1D_normal_nonbatch( cuda=True)
def test_prior_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_prior(cuda=True)
def test_fantasy_updates_batch_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_fantasy_updates_batch(cuda=True)
def test_lkj_cholesky_factor_prior_log_prob_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_lkj_cholesky_factor_prior_log_prob(cuda=True)
def test_simple_model_list_gp_regression_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_simple_model_list_gp_regression(cuda=True)
def test_psd_safe_cholesky_pd_cuda(self, cuda=False): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_psd_safe_cholesky_pd(cuda=True)
def test_normal_prior_log_prob_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): return self.test_normal_prior_log_prob(cuda=True)
def test_kl_divergence_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_kl_divergence(cuda=True)
def test_multivariate_normal_non_lazy_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_multivariate_normal_non_lazy(cuda=True)
def test_posterior_latent_gp_and_likelihood_with_optimization_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_posterior_latent_gp_and_likelihood_with_optimization( cuda=True)
def test_multivariate_normal_batch_correlated_samples_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_multivariate_normal_batch_correlated_samples( cuda=True)
def test_posterior_latent_gp_and_likelihood_fast_pred_var_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_posterior_latent_gp_and_likelihood_fast_pred_var( cuda=True)
def test_from_independent_mvns_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_from_independent_mvns(cuda=True)
def test_regression_error_cuda(self): if not torch.cuda.is_available(): return with least_used_cuda_device(): return self.test_regression_error(cuda=True)
def test_multitask_multivariate_normal_exceptions_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_multitask_multivariate_normal_exceptions(cuda=True)
def test_multitask_gp_mean_abs_error_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): self.test_multitask_gp_mean_abs_error(cuda=True)
def test_half_cauchy_prior_log_prob_log_transform_cuda(self): if torch.cuda.is_available(): with least_used_cuda_device(): return self.test_half_cauchy_prior_log_prob_log_transform( cuda=True)