def test_fit_gpytorch_model_singular(self): options = {"disp": False, "maxiter": 5} for dtype in (torch.float, torch.double): X_train = torch.ones(2, 2, device=self.device, dtype=dtype) Y_train = torch.zeros(2, 1, device=self.device, dtype=dtype) test_likelihood = GaussianLikelihood( noise_constraint=GreaterThan(-1e-7, transform=None, initial_value=0.0) ) gp = SingleTaskGP(X_train, Y_train, likelihood=test_likelihood) mll = ExactMarginalLogLikelihood(gp.likelihood, gp) mll.to(device=self.device, dtype=dtype) # this will do multiple retries (and emit warnings, which is desired) with warnings.catch_warnings(record=True) as ws, settings.debug(True): fit_gpytorch_model(mll, options=options, max_retries=2) self.assertTrue( any(issubclass(w.category, NumericalWarning) for w in ws) ) # ensure that we fail if noise ensures that jitter does not help gp.likelihood = GaussianLikelihood( noise_constraint=Interval(-2, -1, transform=None, initial_value=-1.5) ) mll = ExactMarginalLogLikelihood(gp.likelihood, gp) mll.to(device=self.device, dtype=dtype) with self.assertRaises(NotPSDError): fit_gpytorch_model(mll, options=options, max_retries=2) # ensure we can handle NaNErrors in the optimizer with mock.patch.object(SingleTaskGP, "__call__", side_effect=NanError): gp = SingleTaskGP(X_train, Y_train, likelihood=test_likelihood) mll = ExactMarginalLogLikelihood(gp.likelihood, gp) mll.to(device=self.device, dtype=dtype) fit_gpytorch_model( mll, options={"disp": False, "maxiter": 1}, max_retries=1 )
def test_fit_gpytorch_model_singular(self): options = {"disp": False, "maxiter": 5} for dtype in (torch.float, torch.double): X_train = torch.ones(2, 2, device=self.device, dtype=dtype) Y_train = torch.zeros(2, 1, device=self.device, dtype=dtype) test_likelihood = GaussianLikelihood( noise_constraint=GreaterThan(-1e-7, transform=None, initial_value=0.0) ) gp = SingleTaskGP(X_train, Y_train, likelihood=test_likelihood) mll = ExactMarginalLogLikelihood(gp.likelihood, gp) mll.to(device=self.device, dtype=dtype) # this will do multiple retries (and emit warnings, which is desired) with warnings.catch_warnings(record=True) as ws, settings.debug(True): fit_gpytorch_model(mll, options=options, max_retries=2) self.assertTrue( any(issubclass(w.category, NumericalWarning) for w in ws) ) # ensure that we fail if noise ensures that jitter does not help gp.likelihood = GaussianLikelihood( noise_constraint=Interval(-2, -1, transform=None, initial_value=-1.5) ) mll = ExactMarginalLogLikelihood(gp.likelihood, gp) mll.to(device=self.device, dtype=dtype) with self.assertLogs(level="DEBUG") as logs: fit_gpytorch_model(mll, options=options, max_retries=2) self.assertTrue(any("NotPSDError" in log for log in logs.output)) # ensure we can handle NaNErrors in the optimizer with mock.patch.object(SingleTaskGP, "__call__", side_effect=NanError): gp = SingleTaskGP(X_train, Y_train, likelihood=test_likelihood) mll = ExactMarginalLogLikelihood(gp.likelihood, gp) mll.to(device=self.device, dtype=dtype) fit_gpytorch_model( mll, options={"disp": False, "maxiter": 1}, max_retries=1 ) # ensure we catch NotPSDErrors with mock.patch.object(SingleTaskGP, "__call__", side_effect=NotPSDError): mll = self._getModel() with self.assertLogs(level="DEBUG") as logs: fit_gpytorch_model(mll, max_retries=2) for retry in [1, 2]: self.assertTrue( any( f"Fitting failed on try {retry} due to a NotPSDError." in log for log in logs.output ) ) # Failure due to optimization warning def optimize_w_warning(mll, **kwargs): warnings.warn("Dummy warning.", OptimizationWarning) return mll, None mll = self._getModel() with self.assertLogs(level="DEBUG") as logs, settings.debug(True): fit_gpytorch_model(mll, optimizer=optimize_w_warning, max_retries=2) self.assertTrue( any("Fitting failed on try 1." in log for log in logs.output) )