コード例 #1
0
 def test_constrained_expected_improvement_batch(self):
     for dtype in (torch.float, torch.double):
         mean = torch.tensor(
             [[-0.5, 0.0, 5.0, 0.0], [0.0, 0.0, 5.0, 0.0],
              [0.5, 0.0, 5.0, 0.0]],
             device=self.device,
             dtype=dtype,
         ).unsqueeze(dim=-2)
         variance = torch.ones(3, 4, device=self.device,
                               dtype=dtype).unsqueeze(dim=-2)
         N = torch.distributions.Normal(loc=0.0, scale=1.0)
         a = N.icdf(
             torch.tensor(0.75))  # get a so that P(-a <= N <= a) = 0.5
         mm = MockModel(MockPosterior(mean=mean, variance=variance))
         module = ConstrainedExpectedImprovement(
             model=mm,
             best_f=0.0,
             objective_index=0,
             constraints={
                 1: [None, 0],
                 2: [5.0, None],
                 3: [-a, a]
             },
         )
         X = torch.empty(3, 1, 1, device=self.device, dtype=dtype)  # dummy
         ei = module(X)
         self.assertTrue(ei.shape == torch.Size([3]))
         ei_expected_unconstrained = torch.tensor(
             [0.19780, 0.39894, 0.69780], device=self.device, dtype=dtype)
         ei_expected = ei_expected_unconstrained * 0.5 * 0.5 * 0.5
         self.assertTrue(torch.allclose(ei, ei_expected, atol=1e-4))
    def __init__(self, model_list: List[Model], constraints,
                 options: dict) -> None:

        # best_f = torch.min(model_list.models[0].train_targets)

        # Initialize parent classes inthe following order:
        ConstrainedExpectedImprovement.__init__(self,
                                                model=model_list,
                                                best_f=0.0,
                                                objective_index=0,
                                                constraints=constraints,
                                                maximize=False)

        AcquisitionBaseToolsConstrained.__init__(
            self,
            model_list=model_list,
            Nrestarts_eta_c=options.optimization.Nrestarts)

        logger.info("Starting EIC ...")

        self.dim = model_list.models[idxm['cons']].dim
        self.Nrestarts = options.optimization.Nrestarts
        self.algo_name = options.optimization.algo_name
        self.constrained_opt = OptimizationNonLinear(
            dim=self.dim,
            fun_obj=self.forward,
            algo_str=self.algo_name,
            bounds=[[0.0] * self.dim, [1.0] * self.dim],
            minimize=False,
            what2optimize_str="EIC acquisition")
        # self.use_nlopt = False
        self.disp_info_scipy_opti = options.optimization.disp_info_scipy_opti

        self._rho_conserv = options.prob_satisfaction
        self.x_next, self.alpha_next = None, None
        self.only_prob = False
コード例 #3
0
    def test_constrained_expected_improvement(self):
        for dtype in (torch.float, torch.double):
            # one constraint
            mean = torch.tensor([[-0.5, 0.0]], device=self.device,
                                dtype=dtype).unsqueeze(dim=-2)
            variance = torch.ones(1, 2, device=self.device,
                                  dtype=dtype).unsqueeze(dim=-2)
            mm = MockModel(MockPosterior(mean=mean, variance=variance))
            module = ConstrainedExpectedImprovement(model=mm,
                                                    best_f=0.0,
                                                    objective_index=0,
                                                    constraints={1: [None, 0]})
            X = torch.empty(1, 1, device=self.device, dtype=dtype)  # dummy
            ei = module(X)
            ei_expected_unconstrained = torch.tensor(0.19780,
                                                     device=self.device,
                                                     dtype=dtype)
            ei_expected = ei_expected_unconstrained * 0.5
            self.assertTrue(torch.allclose(ei, ei_expected, atol=1e-4))

            # check that error raised if no constraints
            with self.assertRaises(ValueError):
                module = ConstrainedExpectedImprovement(model=mm,
                                                        best_f=0.0,
                                                        objective_index=0,
                                                        constraints={})

            # check that error raised if objective is a constraint
            with self.assertRaises(ValueError):
                module = ConstrainedExpectedImprovement(
                    model=mm,
                    best_f=0.0,
                    objective_index=0,
                    constraints={0: [None, 0]})

            # check that error raised if constraint lower > upper
            with self.assertRaises(ValueError):
                module = ConstrainedExpectedImprovement(
                    model=mm,
                    best_f=0.0,
                    objective_index=0,
                    constraints={0: [1, 0]})

            # three constraints
            N = torch.distributions.Normal(loc=0.0, scale=1.0)
            a = N.icdf(
                torch.tensor(0.75))  # get a so that P(-a <= N <= a) = 0.5
            mean = torch.tensor([[-0.5, 0.0, 5.0, 0.0]],
                                device=self.device,
                                dtype=dtype).unsqueeze(dim=-2)
            variance = torch.ones(1, 4, device=self.device,
                                  dtype=dtype).unsqueeze(dim=-2)
            mm = MockModel(MockPosterior(mean=mean, variance=variance))
            module = ConstrainedExpectedImprovement(
                model=mm,
                best_f=0.0,
                objective_index=0,
                constraints={
                    1: [None, 0],
                    2: [5.0, None],
                    3: [-a, a]
                },
            )
            X = torch.empty(1, 1, device=self.device, dtype=dtype)  # dummy
            ei = module(X)
            ei_expected_unconstrained = torch.tensor(0.19780,
                                                     device=self.device,
                                                     dtype=dtype)
            ei_expected = ei_expected_unconstrained * 0.5 * 0.5 * 0.5
            self.assertTrue(torch.allclose(ei, ei_expected, atol=1e-4))
            # test maximize
            module_min = ConstrainedExpectedImprovement(
                model=mm,
                best_f=0.0,
                objective_index=0,
                constraints={1: [None, 0]},
                maximize=False,
            )
            ei_min = module_min(X)
            ei_expected_unconstrained_min = torch.tensor(0.6978,
                                                         device=self.device,
                                                         dtype=dtype)
            ei_expected_min = ei_expected_unconstrained_min * 0.5
            self.assertTrue(torch.allclose(ei_min, ei_expected_min, atol=1e-4))
            # test invalid onstraints
            with self.assertRaises(ValueError):
                ConstrainedExpectedImprovement(
                    model=mm,
                    best_f=0.0,
                    objective_index=0,
                    constraints={1: [1.0, -1.0]},
                )