예제 #1
0
 def test_list_surrogate_choice(self, _, mock_extract_training_data):
     model = BoTorchModel()
     model.fit(
         Xs=self.Xs,
         Ys=self.Ys,
         Yvars=self.Yvars,
         search_space_digest=self.search_space_digest,
         metric_names=self.metric_names_for_list_surrogate,
         candidate_metadata=self.candidate_metadata,
     )
     # A list surrogate should be chosen, since Xs are not all the same.
     self.assertIsInstance(model.surrogate.model, ModelListGP)
     for submodel in model.surrogate.model.models:
         # There are fidelity features and nonempty Yvars, so
         # fixed noise MFGP should be chosen.
         self.assertIsInstance(submodel, FixedNoiseMultiFidelityGP)
     model.gen(
         n=1,
         bounds=self.bounds,
         objective_weights=self.objective_weights,
         outcome_constraints=self.outcome_constraints,
         linear_constraints=self.linear_constraints,
         fixed_features=self.fixed_features,
         pending_observations=self.pending_observations,
         model_gen_options=self.model_gen_options,
         rounding_func=self.rounding_func,
         target_fidelities=self.search_space_digest.target_fidelities,
     )
     mock_extract_training_data.assert_called_once()
     self.assertIsInstance(
         mock_extract_training_data.call_args[1]["surrogate"],
         ListSurrogate)
예제 #2
0
 def test_list_surrogate_choice(self, _, mock_extract_training_data):
     model = BoTorchModel()
     model.fit(
         Xs=self.Xs,
         Ys=self.Ys,
         Yvars=self.Yvars,
         bounds=self.bounds,
         task_features=self.task_features,
         feature_names=self.feature_names,
         metric_names=self.metric_names_for_list_surrogate,
         fidelity_features=self.fidelity_features,
         target_fidelities=self.target_fidelities,
         candidate_metadata=self.candidate_metadata,
     )
     model.gen(
         n=1,
         bounds=self.bounds,
         objective_weights=self.objective_weights,
         outcome_constraints=self.outcome_constraints,
         linear_constraints=self.linear_constraints,
         fixed_features=self.fixed_features,
         pending_observations=self.pending_observations,
         model_gen_options=self.model_gen_options,
         rounding_func=self.rounding_func,
         target_fidelities=self.target_fidelities,
     )
     mock_extract_training_data.assert_called_once()
     self.assertIsInstance(
         mock_extract_training_data.call_args[1]["surrogate"], ListSurrogate
     )
예제 #3
0
파일: test_model.py 프로젝트: viotemp1/Ax
 def test_gen(
     self,
     mock_choose_botorch_acqf_class,
     mock_inequality_constraints,
     mock_rounding,
     mock_kg,
     mock_construct_options,
 ):
     mock_kg.return_value.optimize.return_value = (
         torch.tensor([1.0]),
         torch.tensor([2.0]),
     )
     model = BoTorchModel(
         surrogate=self.surrogate,
         acquisition_class=KnowledgeGradient,
         acquisition_options=self.acquisition_options,
     )
     model.surrogate.construct(training_data=self.training_data,
                               fidelity_features=self.fidelity_features)
     model._botorch_acqf_class = None
     model.gen(
         n=1,
         bounds=self.bounds,
         objective_weights=self.objective_weights,
         outcome_constraints=self.outcome_constraints,
         linear_constraints=self.linear_constraints,
         fixed_features=self.fixed_features,
         pending_observations=self.pending_observations,
         model_gen_options=self.model_gen_options,
         rounding_func=self.rounding_func,
         target_fidelities=self.target_fidelities,
     )
     # Assert `construct_acquisition_and_optimizer_options` called with kwargs
     mock_construct_options.assert_called_with(
         acqf_options=self.acquisition_options,
         model_gen_options=self.model_gen_options,
     )
     # Assert `choose_botorch_acqf_class` is called
     mock_choose_botorch_acqf_class.assert_called_once()
     self.assertEqual(model._botorch_acqf_class, qKnowledgeGradient)
     # Assert `acquisition_class` called with kwargs
     mock_kg.assert_called_with(
         surrogate=self.surrogate,
         botorch_acqf_class=model.botorch_acqf_class,
         bounds=self.bounds,
         objective_weights=self.objective_weights,
         outcome_constraints=self.outcome_constraints,
         linear_constraints=self.linear_constraints,
         fixed_features=self.fixed_features,
         pending_observations=self.pending_observations,
         target_fidelities=self.target_fidelities,
         options=self.acquisition_options,
     )
     # Assert `optimize` called with kwargs
     mock_kg.return_value.optimize.assert_called_with(
         bounds=ANY,
         n=1,
         inequality_constraints=[],
         fixed_features=self.fixed_features,
         rounding_func="func",
         optimizer_options=self.optimizer_options,
     )
예제 #4
0
    def test_gen(
        self,
        mock_choose_botorch_acqf_class,
        mock_inequality_constraints,
        mock_rounding,
        mock_kg,
        mock_construct_options,
    ):
        mock_kg.return_value.optimize.return_value = (
            torch.tensor([1.0]),
            torch.tensor([2.0]),
        )
        model = BoTorchModel(
            surrogate=self.surrogate,
            acquisition_class=KnowledgeGradient,
            acquisition_options=self.acquisition_options,
        )
        model.surrogate.construct(
            training_data=self.training_data,
            fidelity_features=self.search_space_digest.fidelity_features,
        )
        model._botorch_acqf_class = None
        # Assert that error is raised if we haven't fit the model
        with self.assertRaises(RuntimeError):
            model.gen(
                n=1,
                bounds=self.search_space_digest.bounds,
                objective_weights=self.objective_weights,
                outcome_constraints=self.outcome_constraints,
                linear_constraints=self.linear_constraints,
                fixed_features=self.fixed_features,
                pending_observations=self.pending_observations,
                model_gen_options=self.model_gen_options,
                rounding_func=self.rounding_func,
                target_fidelities=self.search_space_digest.target_fidelities,
            )
        # Add search space digest reference to make the model think it's been fit
        model._search_space_digest = self.search_space_digest
        model.gen(
            n=1,
            bounds=self.search_space_digest.bounds,
            objective_weights=self.objective_weights,
            outcome_constraints=self.outcome_constraints,
            linear_constraints=self.linear_constraints,
            fixed_features=self.fixed_features,
            pending_observations=self.pending_observations,
            model_gen_options=self.model_gen_options,
            rounding_func=self.rounding_func,
            target_fidelities=self.search_space_digest.target_fidelities,
        )

        # Assert `construct_acquisition_and_optimizer_options` called with kwargs
        mock_construct_options.assert_called_with(
            acqf_options=self.acquisition_options,
            model_gen_options=self.model_gen_options,
        )
        # Assert `choose_botorch_acqf_class` is called
        mock_choose_botorch_acqf_class.assert_called_once()
        self.assertEqual(model._botorch_acqf_class, qKnowledgeGradient)
        # Assert `acquisition_class` called with kwargs
        mock_kg.assert_called_with(
            surrogate=self.surrogate,
            botorch_acqf_class=model.botorch_acqf_class,
            search_space_digest=self.search_space_digest,
            objective_weights=self.objective_weights,
            objective_thresholds=self.objective_thresholds,
            outcome_constraints=self.outcome_constraints,
            linear_constraints=self.linear_constraints,
            fixed_features=self.fixed_features,
            pending_observations=self.pending_observations,
            options=self.acquisition_options,
        )
        # Assert `optimize` called with kwargs
        mock_kg.return_value.optimize.assert_called_with(
            n=1,
            search_space_digest=self.search_space_digest,
            inequality_constraints=[],
            fixed_features=self.fixed_features,
            rounding_func="func",
            optimizer_options=self.optimizer_options,
        )
예제 #5
0
    def test_MOO(self, _):
        # Add mock for qNEHVI input constructor to catch arguments passed to it.
        qNEHVI_input_constructor = get_acqf_input_constructor(
            qNoisyExpectedHypervolumeImprovement)
        mock_input_constructor = mock.MagicMock(
            qNEHVI_input_constructor, side_effect=qNEHVI_input_constructor)
        _register_acqf_input_constructor(
            acqf_cls=qNoisyExpectedHypervolumeImprovement,
            input_constructor=mock_input_constructor,
        )

        model = BoTorchModel()
        model.fit(
            Xs=self.moo_training_data.Xs,
            Ys=self.moo_training_data.Ys,
            Yvars=self.moo_training_data.Yvars,
            search_space_digest=self.search_space_digest,
            metric_names=self.moo_metric_names,
            candidate_metadata=self.candidate_metadata,
        )
        self.assertIsInstance(model.surrogate.model, FixedNoiseGP)
        _, _, gen_metadata, _ = model.gen(
            n=1,
            bounds=self.search_space_digest.bounds,
            objective_weights=self.moo_objective_weights,
            objective_thresholds=self.moo_objective_thresholds,
            outcome_constraints=self.outcome_constraints,
            linear_constraints=self.linear_constraints,
            fixed_features=self.fixed_features,
            pending_observations=self.pending_observations,
            model_gen_options=self.model_gen_options,
            rounding_func=self.rounding_func,
            target_fidelities=self.mf_search_space_digest.target_fidelities,
        )
        ckwargs = mock_input_constructor.call_args[1]
        self.assertIs(model.botorch_acqf_class,
                      qNoisyExpectedHypervolumeImprovement)
        mock_input_constructor.assert_called_once()
        m = ckwargs["model"]
        self.assertIsInstance(m, FixedNoiseGP)
        self.assertEqual(m.num_outputs, 2)
        training_data = ckwargs["training_data"]
        for attr in ("Xs", "Ys", "Yvars"):
            self.assertTrue(
                all(
                    torch.equal(x1, x2) for x1, x2 in zip(
                        getattr(training_data, attr),
                        getattr(self.moo_training_data, attr),
                    )))
        self.assertTrue(
            torch.equal(ckwargs["objective_thresholds"],
                        self.moo_objective_thresholds[:2]))
        self.assertIsNone(ckwargs["outcome_constraints"], )
        self.assertIsNone(ckwargs["X_pending"], )
        obj_t = gen_metadata["objective_thresholds"]
        self.assertTrue(
            torch.equal(obj_t[:2], self.moo_objective_thresholds[:2]))
        self.assertTrue(np.isnan(obj_t[2].item()))

        self.assertIsInstance(
            ckwargs.get("objective"),
            WeightedMCMultiOutputObjective,
        )
        self.assertTrue(
            torch.equal(
                mock_input_constructor.call_args[1].get("objective").weights,
                self.moo_objective_weights[:2],
            ))
        expected_X_baseline = _filter_X_observed(
            Xs=self.moo_training_data.Xs,
            objective_weights=self.moo_objective_weights,
            outcome_constraints=self.outcome_constraints,
            bounds=self.search_space_digest.bounds,
            linear_constraints=self.linear_constraints,
            fixed_features=self.fixed_features,
        )
        self.assertTrue(
            torch.equal(
                mock_input_constructor.call_args[1].get("X_baseline"),
                expected_X_baseline,
            ))
        # test inferred objective_thresholds
        with ExitStack() as es:
            _mock_model_infer_objective_thresholds = es.enter_context(
                mock.patch(
                    "ax.models.torch.botorch_modular.acquisition."
                    "infer_objective_thresholds",
                    return_value=torch.tensor([9.9, 3.3,
                                               float("nan")]),
                ))

            objective_weights = torch.tensor([-1.0, -1.0, 0.0])
            outcome_constraints = (
                torch.tensor([[1.0, 0.0, 0.0]]),
                torch.tensor([[10.0]]),
            )
            linear_constraints = (
                torch.tensor([[1.0, 0.0, 0.0]]),
                torch.tensor([[2.0]]),
            )
            _, _, gen_metadata, _ = model.gen(
                n=1,
                bounds=self.search_space_digest.bounds,
                objective_weights=objective_weights,
                outcome_constraints=outcome_constraints,
                linear_constraints=linear_constraints,
                fixed_features=self.fixed_features,
                pending_observations=self.pending_observations,
                model_gen_options=self.model_gen_options,
                rounding_func=self.rounding_func,
                target_fidelities=self.mf_search_space_digest.
                target_fidelities,
            )
            expected_X_baseline = _filter_X_observed(
                Xs=self.moo_training_data.Xs,
                objective_weights=objective_weights,
                outcome_constraints=outcome_constraints,
                bounds=self.search_space_digest.bounds,
                linear_constraints=linear_constraints,
                fixed_features=self.fixed_features,
            )
            ckwargs = _mock_model_infer_objective_thresholds.call_args[1]
            self.assertTrue(
                torch.equal(
                    ckwargs["objective_weights"],
                    objective_weights,
                ))
            oc = ckwargs["outcome_constraints"]
            self.assertTrue(torch.equal(oc[0], outcome_constraints[0]))
            self.assertTrue(torch.equal(oc[1], outcome_constraints[1]))
            m = ckwargs["model"]
            self.assertIsInstance(m, FixedNoiseGP)
            self.assertEqual(m.num_outputs, 2)
            self.assertIn("objective_thresholds", gen_metadata)
            obj_t = gen_metadata["objective_thresholds"]
            self.assertTrue(torch.equal(obj_t[:2], torch.tensor([9.9, 3.3])))
            self.assertTrue(np.isnan(obj_t[2].item()))

        # Avoid polluting the registry for other tests; re-register correct input
        # contructor for qNEHVI.
        _register_acqf_input_constructor(
            acqf_cls=qNoisyExpectedHypervolumeImprovement,
            input_constructor=qNEHVI_input_constructor,
        )