예제 #1
0
    def test_expected_improvement(self):
        for dtype in (torch.float, torch.double):
            mean = torch.tensor([[-0.5]], device=self.device, dtype=dtype)
            variance = torch.ones(1, 1, device=self.device, dtype=dtype)
            mm = MockModel(MockPosterior(mean=mean, variance=variance))

            # basic test
            module = ExpectedImprovement(model=mm, best_f=0.0)
            X = torch.empty(1, 1, device=self.device, dtype=dtype)  # dummy
            ei = module(X)
            ei_expected = torch.tensor(0.19780,
                                       device=self.device,
                                       dtype=dtype)
            self.assertTrue(torch.allclose(ei, ei_expected, atol=1e-4))

            # test maximize
            module = ExpectedImprovement(model=mm, best_f=0.0, maximize=False)
            X = torch.empty(1, 1, device=self.device, dtype=dtype)  # dummy
            ei = module(X)
            ei_expected = torch.tensor(0.6978, device=self.device, dtype=dtype)
            self.assertTrue(torch.allclose(ei, ei_expected, atol=1e-4))
            with self.assertRaises(UnsupportedError):
                module.set_X_pending(None)

            # test posterior transform (single-output)
            mean = torch.tensor([0.5], device=self.device, dtype=dtype)
            covar = torch.tensor([[0.16]], device=self.device, dtype=dtype)
            mvn = MultivariateNormal(mean, covar)
            p = GPyTorchPosterior(mvn)
            mm = MockModel(p)
            weights = torch.tensor([0.5], device=self.device, dtype=dtype)
            transform = ScalarizedPosteriorTransform(weights)
            ei = ExpectedImprovement(model=mm,
                                     best_f=0.0,
                                     posterior_transform=transform)
            X = torch.rand(1, 2, device=self.device, dtype=dtype)
            ei_expected = torch.tensor(0.2601, device=self.device, dtype=dtype)
            torch.allclose(ei(X), ei_expected, atol=1e-4)

            # test posterior transform (multi-output)
            mean = torch.tensor([[-0.25, 0.5]],
                                device=self.device,
                                dtype=dtype)
            covar = torch.tensor([[[0.5, 0.125], [0.125, 0.5]]],
                                 device=self.device,
                                 dtype=dtype)
            mvn = MultitaskMultivariateNormal(mean, covar)
            p = GPyTorchPosterior(mvn)
            mm = MockModel(p)
            weights = torch.tensor([2.0, 1.0], device=self.device, dtype=dtype)
            transform = ScalarizedPosteriorTransform(weights)
            ei = ExpectedImprovement(model=mm,
                                     best_f=0.0,
                                     posterior_transform=transform)
            X = torch.rand(1, 2, device=self.device, dtype=dtype)
            ei_expected = torch.tensor(0.6910, device=self.device, dtype=dtype)
            torch.allclose(ei(X), ei_expected, atol=1e-4)
예제 #2
0
    def test_expected_improvement(self, cuda=False):
        device = torch.device("cuda") if cuda else torch.device("cpu")
        for dtype in (torch.float, torch.double):
            mean = torch.tensor([[-0.5]], device=device, dtype=dtype)
            variance = torch.ones(1, 1, device=device, dtype=dtype)
            mm = MockModel(MockPosterior(mean=mean, variance=variance))

            module = ExpectedImprovement(model=mm, best_f=0.0)
            X = torch.empty(1, 1, device=device, dtype=dtype)  # dummy
            ei = module(X)
            ei_expected = torch.tensor(0.19780, device=device, dtype=dtype)
            self.assertTrue(torch.allclose(ei, ei_expected, atol=1e-4))

            module = ExpectedImprovement(model=mm, best_f=0.0, maximize=False)
            X = torch.empty(1, 1, device=device, dtype=dtype)  # dummy
            ei = module(X)
            ei_expected = torch.tensor(0.6978, device=device, dtype=dtype)
            self.assertTrue(torch.allclose(ei, ei_expected, atol=1e-4))
            with self.assertRaises(UnsupportedError):
                module.set_X_pending(None)