def covar_cache(self):
        # Code adapted from the source of
        # gpytorch.models.exact_prediction_strategies.DefaultPredictionStrategy

        inputs_shape = self.train_prior_dist.mean.shape
        tm1_noise_shape = torch.Size(inputs_shape[:-1] +
                                     (inputs_shape[-1] - 1, ))
        noise_covar = self.likelihood._shaped_noise_covar(
            base_shape=tm1_noise_shape)
        train_train_nonoise_covar = self.train_prior_dist.lazy_covariance_matrix
        train_train_covar = BidiagonalQuadraticLazyTensor(
            bidiagonal_tensor=self.discount_tensor,
            center_tensor=train_train_nonoise_covar) + noise_covar
        # train_train_covar = self.discount_tensor.matmul(
        #     # train_train_nonoise_covar.matmul(
        #     #     self.discount_tensor.transpose(-1, -2)
        #     self.discount_tensor.transpose(-1, -2).left_matmul(
        #         train_train_nonoise_covar
        #     )
        # ) + noise_covar

        train_train_covar_inv_root = delazify(
            self.discount_tensor.transpose(-1, -2).matmul(
                train_train_covar.root_inv_decomposition().root))
        return self._exact_predictive_covar_inv_quad_form_cache(
            train_train_covar_inv_root, self._last_test_train_covar)
Пример #2
0
    def test_getitem_tensor_index(self):
        lazy_tensor = self.create_lazy_tensor()
        evaluated = self.evaluate_lazy_tensor(lazy_tensor)

        # Non-batch case
        if lazy_tensor.ndimension() == 2:
            index = (torch.tensor([0, 0, 1, 2]), torch.tensor([0, 1, 0, 2]))
            res, actual = lazy_tensor[index], evaluated[index]
            self.assertAllClose(res, actual)
            index = (torch.tensor([0, 0, 1, 2]), slice(None, None, None))
            res, actual = gpytorch.delazify(lazy_tensor[index]), evaluated[index]
            self.assertAllClose(res, actual)
            index = (slice(None, None, None), torch.tensor([0, 0, 1, 2]))
            res, actual = gpytorch.delazify(lazy_tensor[index]), evaluated[index]
            self.assertAllClose(res, actual)
            index = (torch.tensor([0, 0, 1, 2]), Ellipsis)
            res, actual = gpytorch.delazify(lazy_tensor[index]), evaluated[index]
            self.assertAllClose(res, actual)
            index = (Ellipsis, torch.tensor([0, 0, 1, 2]))
            res, actual = gpytorch.delazify(lazy_tensor[index]), evaluated[index]
            self.assertAllClose(res, actual)
            index = (Ellipsis, torch.tensor([0, 0, 1, 2]), torch.tensor([0, 1, 0, 2]))
            res, actual = lazy_tensor[index], evaluated[index]
            self.assertAllClose(res, actual)

        # Batch case
        else:
            for batch_index in product(
                [torch.tensor([0, 1, 1, 0]), slice(None, None, None)], repeat=(lazy_tensor.dim() - 2)
            ):
                index = (*batch_index, torch.tensor([0, 1, 0, 2]), torch.tensor([1, 2, 0, 1]))
                res, actual = lazy_tensor[index], evaluated[index]
                self.assertAllClose(res, actual)
                index = (*batch_index, torch.tensor([0, 1, 0, 2]), slice(None, None, None))
                res, actual = gpytorch.delazify(lazy_tensor[index]), evaluated[index]
                self.assertAllClose(res, actual)
                index = (*batch_index, slice(None, None, None), torch.tensor([0, 1, 2, 1]))
                res, actual = gpytorch.delazify(lazy_tensor[index]), evaluated[index]
                self.assertAllClose(res, actual)
                index = (*batch_index, slice(None, None, None), slice(None, None, None))
                res, actual = lazy_tensor[index].evaluate(), evaluated[index]
                self.assertAllClose(res, actual)

            # Ellipsis
            res = lazy_tensor.__getitem__((Ellipsis, torch.tensor([0, 1, 0, 2]), torch.tensor([1, 2, 0, 1])))
            actual = evaluated.__getitem__((Ellipsis, torch.tensor([0, 1, 0, 2]), torch.tensor([1, 2, 0, 1])))
            self.assertAllClose(res, actual)
            res = gpytorch.delazify(
                lazy_tensor.__getitem__((torch.tensor([0, 1, 0, 1]), Ellipsis, torch.tensor([1, 2, 0, 1])))
            )
            actual = evaluated.__getitem__((torch.tensor([0, 1, 0, 1]), Ellipsis, torch.tensor([1, 2, 0, 1])))
            self.assertAllClose(res, actual)
    def test_getitem_tensor_index(self):
        lazy_tensor = self.create_lazy_tensor()
        evaluated = self.evaluate_lazy_tensor(lazy_tensor)

        # Non-batch case
        if lazy_tensor.ndimension() == 2:
            index = (torch.tensor([0, 0, 1, 2]), torch.tensor([0, 1, 0, 2]))
            res, actual = lazy_tensor[index], evaluated[index]
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)
            index = (torch.tensor([0, 0, 1, 2]), slice(None, None, None))
            res, actual = gpytorch.delazify(
                lazy_tensor[index]), evaluated[index]
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)
            index = (slice(None, None, None), torch.tensor([0, 0, 1, 2]))
            res, actual = gpytorch.delazify(
                lazy_tensor[index]), evaluated[index]
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)
            index = (torch.tensor([0, 0, 1, 2]), Ellipsis)
            res, actual = gpytorch.delazify(
                lazy_tensor[index]), evaluated[index]
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)
            index = (Ellipsis, torch.tensor([0, 0, 1, 2]))
            res, actual = gpytorch.delazify(
                lazy_tensor[index]), evaluated[index]
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)
            index = (Ellipsis, torch.tensor([0, 0, 1,
                                             2]), torch.tensor([0, 1, 0, 2]))
            res, actual = lazy_tensor[index], evaluated[index]
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)

        # Batch case
        else:
            for batch_index in product(
                [torch.tensor([0, 1, 1, 0]),
                 slice(None, None, None)],
                    repeat=(lazy_tensor.dim() - 2)):
                index = (*batch_index, torch.tensor([0, 1, 0, 2]),
                         torch.tensor([1, 2, 0, 1]))
                res, actual = lazy_tensor[index], evaluated[index]
                self.assertEqual(res.shape, actual.shape)
                self.assertLess(((res - actual).abs() /
                                 actual.abs().clamp(1, 1e5)).max().item(),
                                1e-1)
                index = (*batch_index, torch.tensor([0, 1, 0, 2]),
                         slice(None, None, None))
                res, actual = gpytorch.delazify(
                    lazy_tensor[index]), evaluated[index]
                self.assertEqual(res.shape, actual.shape)
                self.assertLess(((res - actual).abs() /
                                 actual.abs().clamp(1, 1e5)).max().item(),
                                1e-1)
                index = (*batch_index, slice(None, None,
                                             None), torch.tensor([0, 1, 2, 1]))
                res, actual = gpytorch.delazify(
                    lazy_tensor[index]), evaluated[index]
                self.assertEqual(res.shape, actual.shape)
                self.assertLess(((res - actual).abs() /
                                 actual.abs().clamp(1, 1e5)).max().item(),
                                1e-1)
                index = (*batch_index, slice(None, None,
                                             None), slice(None, None, None))
                res, actual = lazy_tensor[index].evaluate(), evaluated[index]
                self.assertEqual(res.shape, actual.shape)
                self.assertLess(((res - actual).abs() /
                                 actual.abs().clamp(1, 1e5)).max().item(),
                                1e-1)

            # Ellipsis
            res = lazy_tensor.__getitem__(
                (Ellipsis, torch.tensor([0, 1, 0,
                                         2]), torch.tensor([1, 2, 0, 1])))
            actual = evaluated.__getitem__(
                (Ellipsis, torch.tensor([0, 1, 0,
                                         2]), torch.tensor([1, 2, 0, 1])))
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)
            res = gpytorch.delazify(
                lazy_tensor.__getitem__(
                    (torch.tensor([0, 1, 0,
                                   1]), Ellipsis, torch.tensor([1, 2, 0, 1]))))
            actual = evaluated.__getitem__(
                (torch.tensor([0, 1, 0,
                               1]), Ellipsis, torch.tensor([1, 2, 0, 1])))
            self.assertEqual(res.shape, actual.shape)
            self.assertLess(((res - actual).abs() /
                             actual.abs().clamp(1, 1e5)).max().item(), 1e-1)