Exemple #1
0
 def test_choose_model_class_task_features(self):
     # Only a single task feature can be used.
     with self.assertRaisesRegex(NotImplementedError,
                                 "Only a single task feature"):
         choose_model_class(
             Yvars=self.Yvars,
             search_space_digest=SearchSpaceDigest(feature_names=[],
                                                   bounds=[],
                                                   task_features=[1, 2]),
         )
     # With fidelity features and unknown variances, use SingleTaskMultiFidelityGP.
     self.assertEqual(
         MultiTaskGP,
         choose_model_class(
             Yvars=self.none_Yvars,
             search_space_digest=SearchSpaceDigest(feature_names=[],
                                                   bounds=[],
                                                   task_features=[1]),
         ),
     )
     # With fidelity features and known variances, use FixedNoiseMultiFidelityGP.
     self.assertEqual(
         FixedNoiseMultiTaskGP,
         choose_model_class(
             Yvars=self.Yvars,
             search_space_digest=SearchSpaceDigest(feature_names=[],
                                                   bounds=[],
                                                   task_features=[1]),
         ),
     )
Exemple #2
0
 def test_choose_model_class(self):
     # Mix of known and unknown variances.
     with self.assertRaisesRegex(
             ValueError,
             "Variances should all be specified, or none should be."):
         choose_model_class(
             Yvars=[torch.tensor([[0.0], [np.nan]])],
             search_space_digest=SearchSpaceDigest(
                 feature_names=[],
                 bounds=[],
             ),
         )
     # Without fidelity/task features but with Yvar specifications, use FixedNoiseGP.
     self.assertEqual(
         FixedNoiseGP,
         choose_model_class(
             Yvars=self.Yvars,
             search_space_digest=SearchSpaceDigest(
                 feature_names=[],
                 bounds=[],
             ),
         ),
     )
     # W/out fidelity/task features and w/out Yvar specifications, use SingleTaskGP.
     self.assertEqual(
         SingleTaskGP,
         choose_model_class(
             Yvars=[torch.tensor([[float("nan")], [float("nan")]])],
             search_space_digest=SearchSpaceDigest(
                 feature_names=[],
                 bounds=[],
             ),
         ),
     )
 def setUp(self):
     self.outcomes = ["outcome_1", "outcome_2"]
     self.mll_class = SumMarginalLogLikelihood
     self.dtype = torch.float
     self.search_space_digest = SearchSpaceDigest(feature_names=[],
                                                  bounds=[],
                                                  task_features=[0])
     self.task_features = [0]
     Xs1, Ys1, Yvars1, bounds, _, _, _ = get_torch_test_data(
         dtype=self.dtype,
         task_features=self.search_space_digest.task_features)
     Xs2, Ys2, Yvars2, _, _, _, _ = get_torch_test_data(
         dtype=self.dtype,
         task_features=self.search_space_digest.task_features)
     self.botorch_submodel_class_per_outcome = {
         self.outcomes[0]:
         choose_model_class(Yvars=Yvars1,
                            search_space_digest=self.search_space_digest),
         self.outcomes[1]:
         choose_model_class(Yvars=Yvars2,
                            search_space_digest=self.search_space_digest),
     }
     self.expected_submodel_type = FixedNoiseMultiTaskGP
     for submodel_cls in self.botorch_submodel_class_per_outcome.values():
         self.assertEqual(submodel_cls, FixedNoiseMultiTaskGP)
     self.Xs = Xs1 + Xs2
     self.Ys = Ys1 + Ys2
     self.Yvars = Yvars1 + Yvars2
     self.training_data = [
         TrainingData(X=X, Y=Y, Yvar=Yvar)
         for X, Y, Yvar in zip(self.Xs, self.Ys, self.Yvars)
     ]
     self.submodel_options_per_outcome = {
         self.outcomes[0]: {
             RANK: 1
         },
         self.outcomes[1]: {
             RANK: 2
         },
     }
     self.surrogate = ListSurrogate(
         botorch_submodel_class_per_outcome=self.
         botorch_submodel_class_per_outcome,
         mll_class=self.mll_class,
         submodel_options_per_outcome=self.submodel_options_per_outcome,
     )
     self.bounds = [(0.0, 1.0), (1.0, 4.0)]
     self.feature_names = ["x1", "x2"]
Exemple #4
0
 def _autoset_surrogate(
     self,
     Xs: List[Tensor],
     Ys: List[Tensor],
     Yvars: List[Tensor],
     task_features: List[int],
     fidelity_features: List[int],
     metric_names: List[str],
 ) -> None:
     """Sets a default surrogate on this model if one was not explicitly
     provided.
     """
     # To determine whether to use `ListSurrogate`, we need to check for
     # the batched multi-output case, so we first see which model would
     # be chosen given the Yvars and the properties of data.
     botorch_model_class = choose_model_class(
         Yvars=Yvars,
         task_features=task_features,
         fidelity_features=fidelity_features,
     )
     if use_model_list(Xs=Xs, botorch_model_class=botorch_model_class):
         # If using `ListSurrogate` / `ModelListGP`, pick submodels for each
         # outcome.
         botorch_submodel_class_per_outcome = {
             metric_name: choose_model_class(
                 Yvars=[Yvar],
                 task_features=task_features,
                 fidelity_features=fidelity_features,
             )
             for Yvar, metric_name in zip(Yvars, metric_names)
         }
         self._surrogate = ListSurrogate(
             botorch_submodel_class_per_outcome=
             botorch_submodel_class_per_outcome,
             **self.surrogate_options,
         )
     else:
         # Using regular `Surrogate`, so botorch model picked at the beginning
         # of the function is the one we should use.
         self._surrogate = Surrogate(
             botorch_model_class=botorch_model_class,
             **self.surrogate_options)
Exemple #5
0
 def test_choose_model_class_fidelity_features(self):
     # Only a single fidelity feature can be used.
     with self.assertRaisesRegex(NotImplementedError,
                                 "Only a single fidelity feature"):
         choose_model_class(Yvars=self.Yvars,
                            task_features=[],
                            fidelity_features=[1, 2])
     # No support for non-empty task & fidelity features yet.
     with self.assertRaisesRegex(NotImplementedError,
                                 "Multi-task multi-fidelity"):
         choose_model_class(Yvars=self.Yvars,
                            task_features=[1],
                            fidelity_features=[1])
     # With fidelity features and unknown variances, use SingleTaskMultiFidelityGP.
     self.assertEqual(
         SingleTaskMultiFidelityGP,
         choose_model_class(Yvars=self.none_Yvars,
                            task_features=[],
                            fidelity_features=[2]),
     )
     # With fidelity features and known variances, use FixedNoiseMultiFidelityGP.
     self.assertEqual(
         FixedNoiseMultiFidelityGP,
         choose_model_class(Yvars=self.Yvars,
                            task_features=[],
                            fidelity_features=[2]),
     )
Exemple #6
0
 def test_choose_model_class_discrete_features(self):
     # With discrete features, use MixedSingleTaskyGP.
     self.assertEqual(
         MixedSingleTaskGP,
         choose_model_class(
             Yvars=self.none_Yvars,
             search_space_digest=SearchSpaceDigest(
                 feature_names=[],
                 bounds=[],
                 task_features=[],
                 categorical_features=[1],
             ),
         ),
     )
Exemple #7
0
 def test_choose_model_class(self):
     # Task features is not implemented yet.
     with self.assertRaisesRegex(
         NotImplementedError, "do not support `task_features`"
     ):
         choose_model_class(
             Xs=self.Xs,
             Ys=self.Ys,
             Yvars=self.Yvars,
             task_features=[1],
             fidelity_features=[],
         )
     # Only a single fidelity feature can be used.
     with self.assertRaisesRegex(
         NotImplementedError, "only a single fidelity parameter"
     ):
         choose_model_class(
             Xs=self.Xs,
             Ys=self.Ys,
             Yvars=self.Yvars,
             task_features=self.task_features,
             fidelity_features=[1, 2],
         )
     # With fidelity features, use SingleTaskMultiFidelityGP.
     self.assertEqual(
         SingleTaskMultiFidelityGP,
         choose_model_class(
             Xs=self.Xs,
             Ys=self.Ys,
             Yvars=self.Yvars,
             task_features=self.task_features,
             fidelity_features=[2],
         ),
     )
     # Without fidelity features but with Yvar specifications, use FixedNoiseGP.
     self.assertEqual(
         FixedNoiseGP,
         choose_model_class(
             Xs=self.Xs,
             Ys=self.Ys,
             Yvars=self.Yvars,
             task_features=self.task_features,
             fidelity_features=[],
         ),
     )
     # Without fidelity features and without Yvar specifications, use SingleTaskGP.
     self.assertEqual(
         SingleTaskGP,
         choose_model_class(
             Xs=self.Xs,
             Ys=self.Ys,
             Yvars=[torch.tensor([[float("nan")], [float("nan")]])],
             task_features=self.task_features,
             fidelity_features=[],
         ),
     )
     # Mix of known and unknown variances.
     with self.assertRaisesRegex(
         ValueError, "Variances should all be specified, or none should be."
     ):
         choose_model_class(
             Xs=self.Xs,
             Ys=self.Ys,
             Yvars=[torch.tensor([[0.0], [float("nan")]])],
             task_features=self.task_features,
             fidelity_features=[],
         )
Exemple #8
0
    def fit(
        self,
        Xs: List[Tensor],
        Ys: List[Tensor],
        Yvars: List[Tensor],
        bounds: List[Tuple[float, float]],
        task_features: List[int],
        feature_names: List[str],
        metric_names: List[str],
        fidelity_features: List[int],
        target_fidelities: Optional[Dict[int, float]] = None,
        candidate_metadata: Optional[List[List[TCandidateMetadata]]] = None,
    ) -> None:
        # Ensure that parts of data all have equal lengths.
        validate_data_format(Xs=Xs,
                             Ys=Ys,
                             Yvars=Yvars,
                             metric_names=metric_names)

        # Choose `Surrogate` and undelying `Model` based on properties of data.
        if not self._surrogate:
            model_class = choose_model_class(
                Xs=Xs,
                Ys=Ys,
                Yvars=Yvars,
                task_features=task_features,
                fidelity_features=fidelity_features,
            )
            mll_class = choose_mll_class(
                model_class=model_class,
                state_dict=self.surrogate_fit_options.get(
                    Keys.STATE_DICT, None),
                refit=self.surrogate_fit_options.get(Keys.REFIT_ON_UPDATE,
                                                     True),
            )
            self._surrogate = Surrogate(botorch_model_class=model_class,
                                        mll_class=mll_class)

        # Construct `TrainingData` based on properties of data and type of `Model`.
        training_data = construct_training_data(
            Xs=Xs,
            Ys=Ys,
            Yvars=Yvars,
            model_class=self.surrogate.botorch_model_class)

        # Fit the model.
        if self.surrogate_fit_options.get(
                Keys.REFIT_ON_UPDATE,
                True) and not self.surrogate_fit_options.get(
                    Keys.WARM_START_REFITTING, True):
            self.surrogate_fit_options[Keys.STATE_DICT] = None
        self.surrogate.fit(
            training_data=training_data,
            bounds=bounds,
            task_features=task_features,
            feature_names=feature_names,
            fidelity_features=fidelity_features,
            target_fidelities=target_fidelities,
            metric_names=metric_names,
            candidate_metadata=candidate_metadata,
            state_dict=self.surrogate_fit_options.get(Keys.STATE_DICT, None),
            refit=self.surrogate_fit_options.get(Keys.REFIT_ON_UPDATE, True),
        )