def _fit( self, model: Any, search_space: SearchSpace, observation_features: List[ObservationFeatures], observation_data: List[ObservationData], ) -> None: # Convert observations to arrays self.parameters = list(search_space.parameters.keys()) all_metric_names: Set[str] = set() for od in observation_data: all_metric_names.update(od.metric_names) self.outcomes = list(all_metric_names) # Convert observations to arrays Xs_array, Ys_array, Yvars_array, in_design = _convert_observations( observation_data=observation_data, observation_features=observation_features, outcomes=self.outcomes, parameters=self.parameters, ) self.training_in_design = in_design # Extract bounds and task features bounds, task_features = get_bounds_and_task(search_space, self.parameters) # Fit self._model_fit( model=model, Xs=Xs_array, Ys=Ys_array, Yvars=Yvars_array, bounds=bounds, task_features=task_features, feature_names=self.parameters, )
def _gen( self, n: int, search_space: SearchSpace, pending_observations: Dict[str, List[ObservationFeatures]], fixed_features: ObservationFeatures, optimization_config: Optional[OptimizationConfig], model_gen_options: Optional[TConfig], ) -> Tuple[ List[ObservationFeatures], List[float], Optional[ObservationFeatures], TGenMetadata, ]: """Generate new candidates according to a search_space.""" # Extract parameter values bounds, _, _ = get_bounds_and_task(search_space, self.parameters) # Get fixed features fixed_features_dict = get_fixed_features(fixed_features, self.parameters) # Extract param constraints linear_constraints = extract_parameter_constraints( search_space.parameter_constraints, self.parameters ) # Generate the candidates X, w = self.model.gen( n=n, bounds=bounds, linear_constraints=linear_constraints, fixed_features=fixed_features_dict, model_gen_options=model_gen_options, rounding_func=transform_callback(self.parameters, self.transforms), ) observation_features = parse_observation_features(X, self.parameters) return observation_features, w.tolist(), None, {}
def _update( self, search_space: SearchSpace, observation_features: List[ObservationFeatures], observation_data: List[ObservationData], ) -> None: """Apply terminal transform for update data, and pass along to model.""" Xs_array, Ys_array, Yvars_array, candidate_metadata = _convert_observations( observation_data=observation_data, observation_features=observation_features, outcomes=self.outcomes, parameters=self.parameters, ) bounds, task_features, target_fidelities = get_bounds_and_task( search_space=search_space, param_names=self.parameters) # Update in-design status for these new points. self._model_update( Xs=Xs_array, Ys=Ys_array, Yvars=Yvars_array, candidate_metadata=candidate_metadata, bounds=bounds, task_features=task_features, feature_names=self.parameters, metric_names=self.outcomes, fidelity_features=list(target_fidelities.keys()), target_fidelities=target_fidelities, )
def testGetBoundsAndTask(self): bounds, task_features, target_fidelities = get_bounds_and_task( self.search_space, ["x", "y", "z"]) self.assertEqual(bounds, [(0.0, 1.0), (1.0, 2.0), (0.0, 5.0)]) self.assertEqual(task_features, []) self.assertEqual(target_fidelities, {1: 2.0}) bounds, task_features, target_fidelities = get_bounds_and_task( self.search_space, ["x", "z"]) self.assertEqual(target_fidelities, {}) # Test that Int param is treated as task feature search_space = SearchSpace(self.parameters) search_space._parameters["x"] = RangeParameter("x", ParameterType.INT, lower=1, upper=4) bounds, task_features, target_fidelities = get_bounds_and_task( search_space, ["x", "y", "z"]) self.assertEqual(task_features, [0]) # Test validation search_space._parameters["x"] = ChoiceParameter( "x", ParameterType.FLOAT, [0.1, 0.4]) with self.assertRaises(ValueError): get_bounds_and_task(search_space, ["x", "y", "z"]) search_space._parameters["x"] = RangeParameter("x", ParameterType.FLOAT, lower=1.0, upper=4.0, log_scale=True) with self.assertRaises(ValueError): get_bounds_and_task(search_space, ["x", "y", "z"])
def _fit( self, model: Any, search_space: SearchSpace, observation_features: List[ObservationFeatures], observation_data: List[ObservationData], ) -> None: # Convert observations to arrays self.parameters = list(search_space.parameters.keys()) # move fidelity parameters to the last columns for para in search_space.parameters: if search_space.parameters[para].is_fidelity: self.parameters.remove(para) self.parameters.append(para) all_metric_names: Set[str] = set() for od in observation_data: all_metric_names.update(od.metric_names) self.outcomes = sorted(list(all_metric_names)) # Deterministic order. # Convert observations to arrays Xs_array, Ys_array, Yvars_array = _convert_observations( observation_data=observation_data, observation_features=observation_features, outcomes=self.outcomes, parameters=self.parameters, ) # Extract bounds and task features bounds, task_features, fidelity_features = get_bounds_and_task( search_space, self.parameters ) # Fit self._model_fit( model=model, Xs=Xs_array, Ys=Ys_array, Yvars=Yvars_array, bounds=bounds, task_features=task_features, feature_names=self.parameters, fidelity_features=fidelity_features, )
def _fit( self, model: Any, search_space: SearchSpace, observation_features: List[ObservationFeatures], observation_data: List[ObservationData], ) -> None: # Convert observations to arrays self.parameters = list(search_space.parameters.keys()) all_metric_names: Set[str] = set() for od in observation_data: all_metric_names.update(od.metric_names) self.outcomes = sorted(all_metric_names) # Deterministic order # Convert observations to arrays Xs_array, Ys_array, Yvars_array, candidate_metadata = _convert_observations( observation_data=observation_data, observation_features=observation_features, outcomes=self.outcomes, parameters=self.parameters, ) # Extract bounds and task features bounds, task_features, target_fidelities = get_bounds_and_task( search_space=search_space, param_names=self.parameters ) # Fit self._model_fit( model=model, Xs=Xs_array, Ys=Ys_array, Yvars=Yvars_array, bounds=bounds, task_features=task_features, feature_names=self.parameters, metric_names=self.outcomes, fidelity_features=list(target_fidelities.keys()), candidate_metadata=candidate_metadata, )
def _cross_validate( self, search_space: SearchSpace, obs_feats: List[ObservationFeatures], obs_data: List[ObservationData], cv_test_points: List[ObservationFeatures], ) -> List[ObservationData]: """Make predictions at cv_test_points using only the data in obs_feats and obs_data. """ Xs_train, Ys_train, Yvars_train, candidate_metadata = _convert_observations( observation_data=obs_data, observation_features=obs_feats, outcomes=self.outcomes, parameters=self.parameters, ) bounds, task_features, target_fidelities = get_bounds_and_task( search_space=search_space, param_names=self.parameters) X_test = np.array([[obsf.parameters[p] for p in self.parameters] for obsf in cv_test_points]) # Use the model to do the cross validation f_test, cov_test = self._model_cross_validate( Xs_train=Xs_train, Ys_train=Ys_train, Yvars_train=Yvars_train, X_test=X_test, bounds=bounds, task_features=task_features, feature_names=self.parameters, metric_names=self.outcomes, fidelity_features=list(target_fidelities.keys()), ) # Convert array back to ObservationData return array_to_observation_data(f=f_test, cov=cov_test, outcomes=self.outcomes)
def _gen( self, n: int, search_space: SearchSpace, pending_observations: Dict[str, List[ObservationFeatures]], fixed_features: ObservationFeatures, model_gen_options: Optional[TConfig] = None, optimization_config: Optional[OptimizationConfig] = None, ) -> Tuple[List[ObservationFeatures], List[float], Optional[ObservationFeatures], TGenMetadata, ]: """Generate new candidates according to search_space and optimization_config. The outcome constraints should be transformed to no longer be relative. """ # Validation if not self.parameters: # pragma: no cover raise ValueError(FIT_MODEL_ERROR.format(action="_gen")) # Extract bounds bounds, _, target_fidelities = get_bounds_and_task( search_space=search_space, param_names=self.parameters) target_fidelities = { i: float(v) for i, v in target_fidelities.items() # pyre-ignore [6] } if optimization_config is None: raise ValueError( "ArrayModelBridge requires an OptimizationConfig to be specified" ) if self.outcomes is None or len( self.outcomes) == 0: # pragma: no cover raise ValueError( "No outcomes found during model fit--data are missing.") validate_optimization_config(optimization_config, self.outcomes) objective_weights = extract_objective_weights( objective=optimization_config.objective, outcomes=self.outcomes) outcome_constraints = extract_outcome_constraints( outcome_constraints=optimization_config.outcome_constraints, outcomes=self.outcomes, ) extra_model_gen_kwargs = self._get_extra_model_gen_kwargs( optimization_config=optimization_config) linear_constraints = extract_parameter_constraints( search_space.parameter_constraints, self.parameters) fixed_features_dict = get_fixed_features(fixed_features, self.parameters) pending_array = pending_observations_as_array(pending_observations, self.outcomes, self.parameters) # Generate the candidates X, w, gen_metadata, candidate_metadata = self._model_gen( n=n, bounds=bounds, objective_weights=objective_weights, outcome_constraints=outcome_constraints, linear_constraints=linear_constraints, fixed_features=fixed_features_dict, pending_observations=pending_array, model_gen_options=model_gen_options, rounding_func=transform_callback(self.parameters, self.transforms), target_fidelities=target_fidelities, **extra_model_gen_kwargs, ) # Transform array to observations observation_features = parse_observation_features( X=X, param_names=self.parameters, candidate_metadata=candidate_metadata) xbest = self._model_best_point( bounds=bounds, objective_weights=objective_weights, outcome_constraints=outcome_constraints, linear_constraints=linear_constraints, fixed_features=fixed_features_dict, model_gen_options=model_gen_options, target_fidelities=target_fidelities, ) best_obsf = (None if xbest is None else ObservationFeatures( parameters={ p: float(xbest[i]) for i, p in enumerate(self.parameters) })) return observation_features, w.tolist(), best_obsf, gen_metadata