def _gen( self, n: int, search_space: SearchSpace, pending_observations: Dict[str, List[ObservationFeatures]], fixed_features: ObservationFeatures, optimization_config: Optional[OptimizationConfig], model_gen_options: Optional[TConfig], ) -> Tuple[ List[ObservationFeatures], List[float], Optional[ObservationFeatures], TGenMetadata, ]: """Generate new candidates according to a search_space.""" # Extract parameter values bounds, _, _ = get_bounds_and_task(search_space, self.parameters) # Get fixed features fixed_features_dict = get_fixed_features(fixed_features, self.parameters) # Extract param constraints linear_constraints = extract_parameter_constraints( search_space.parameter_constraints, self.parameters ) # Generate the candidates X, w = self.model.gen( n=n, bounds=bounds, linear_constraints=linear_constraints, fixed_features=fixed_features_dict, model_gen_options=model_gen_options, rounding_func=transform_callback(self.parameters, self.transforms), ) observation_features = parse_observation_features(X, self.parameters) return observation_features, w.tolist(), None, {}
def _gen( self, n: int, search_space: SearchSpace, pending_observations: Dict[str, List[ObservationFeatures]], fixed_features: ObservationFeatures, model_gen_options: Optional[TConfig] = None, optimization_config: Optional[OptimizationConfig] = None, ) -> Tuple[List[ObservationFeatures], List[float], Optional[ObservationFeatures], TGenMetadata, ]: """Generate new candidates according to search_space and optimization_config. The outcome constraints should be transformed to no longer be relative. """ array_model_gen_args = self._get_transformed_model_gen_args( search_space=search_space, pending_observations=pending_observations, fixed_features=fixed_features, model_gen_options=model_gen_options, optimization_config=optimization_config, ) # Generate the candidates search_space_digest = array_model_gen_args.search_space_digest # TODO: pass array_model_gen_args to _model_gen X, w, gen_metadata, candidate_metadata = self._model_gen( n=n, bounds=search_space_digest.bounds, objective_weights=array_model_gen_args.objective_weights, outcome_constraints=array_model_gen_args.outcome_constraints, linear_constraints=array_model_gen_args.linear_constraints, fixed_features=array_model_gen_args.fixed_features, pending_observations=array_model_gen_args.pending_observations, model_gen_options=model_gen_options, rounding_func=array_model_gen_args.rounding_func, target_fidelities=search_space_digest.target_fidelities, **array_model_gen_args.extra_model_gen_kwargs, ) # Transform array to observations observation_features = parse_observation_features( X=X, param_names=self.parameters, candidate_metadata=candidate_metadata) xbest = self._model_best_point( bounds=search_space_digest.bounds, objective_weights=array_model_gen_args.objective_weights, outcome_constraints=array_model_gen_args.outcome_constraints, linear_constraints=array_model_gen_args.linear_constraints, fixed_features=array_model_gen_args.fixed_features, model_gen_options=model_gen_options, target_fidelities=search_space_digest.target_fidelities, ) best_obsf = (None if xbest is None else ObservationFeatures( parameters={ p: float(xbest[i]) for i, p in enumerate(self.parameters) })) return observation_features, w.tolist(), best_obsf, gen_metadata
def untransform_objective_thresholds( self, objective_thresholds: Tensor, objective_weights: Tensor, bounds: List[Tuple[Union[int, float], Union[int, float]]], fixed_features: Optional[Dict[int, float]], ) -> List[ObjectiveThreshold]: objective_thresholds_np = objective_thresholds.cpu().numpy() # pyre-ignore [16] objective_indices = objective_weights.nonzero().view(-1).tolist() objective_names = [self.outcomes[i] for i in objective_indices] # create an ObservationData object for untransforming the objective thresholds observation_data = [ ObservationData( metric_names=objective_names, means=objective_thresholds_np[objective_indices].copy(), covariance=np.zeros( (len(objective_indices), len(objective_indices))), ) ] # Untransform objective thresholds. Note: there is one objective threshold # for every outcome. # Construct dummy observation features X = [bound[0] for bound in bounds] fixed_features = fixed_features or {} for i, val in fixed_features.items(): X[i] = val observation_features = parse_observation_features( X=np.array([X]), param_names=self.parameters, ) # Apply reverse transforms, in reverse order for t in reversed(self.transforms.values()): observation_data = t.untransform_observation_data( observation_data=observation_data, observation_features=observation_features, ) observation_features = t.untransform_observation_features( observation_features=observation_features, ) observation_data = observation_data[0] oc = not_none(self._optimization_config) metrics_names_to_metric = oc.metrics obj_thresholds = [] for idx, (name, bound) in enumerate( zip(observation_data.metric_names, observation_data.means)): if not np.isnan(bound): obj_weight = objective_weights[objective_indices[idx]] op = (ComparisonOp.LEQ if torch.sign(obj_weight) == -1.0 else ComparisonOp.GEQ) obj_thresholds.append( ObjectiveThreshold( metric=metrics_names_to_metric[name], bound=bound, relative=False, op=op, )) return obj_thresholds
def _gen( self, n: int, search_space: SearchSpace, pending_observations: Dict[str, List[ObservationFeatures]], fixed_features: ObservationFeatures, model_gen_options: Optional[TConfig] = None, optimization_config: Optional[OptimizationConfig] = None, ) -> Tuple[List[ObservationFeatures], List[float], Optional[ObservationFeatures], TGenMetadata, ]: """Generate new candidates according to search_space and optimization_config. The outcome constraints should be transformed to no longer be relative. """ # Validation if not self.parameters: # pragma: no cover raise ValueError(FIT_MODEL_ERROR.format(action="_gen")) # Extract bounds bounds, _, target_fidelities = get_bounds_and_task( search_space=search_space, param_names=self.parameters) target_fidelities = { i: float(v) for i, v in target_fidelities.items() # pyre-ignore [6] } if optimization_config is None: raise ValueError( "ArrayModelBridge requires an OptimizationConfig to be specified" ) if self.outcomes is None or len( self.outcomes) == 0: # pragma: no cover raise ValueError( "No outcomes found during model fit--data are missing.") validate_optimization_config(optimization_config, self.outcomes) objective_weights = extract_objective_weights( objective=optimization_config.objective, outcomes=self.outcomes) outcome_constraints = extract_outcome_constraints( outcome_constraints=optimization_config.outcome_constraints, outcomes=self.outcomes, ) extra_model_gen_kwargs = self._get_extra_model_gen_kwargs( optimization_config=optimization_config) linear_constraints = extract_parameter_constraints( search_space.parameter_constraints, self.parameters) fixed_features_dict = get_fixed_features(fixed_features, self.parameters) pending_array = pending_observations_as_array(pending_observations, self.outcomes, self.parameters) # Generate the candidates X, w, gen_metadata, candidate_metadata = self._model_gen( n=n, bounds=bounds, objective_weights=objective_weights, outcome_constraints=outcome_constraints, linear_constraints=linear_constraints, fixed_features=fixed_features_dict, pending_observations=pending_array, model_gen_options=model_gen_options, rounding_func=transform_callback(self.parameters, self.transforms), target_fidelities=target_fidelities, **extra_model_gen_kwargs, ) # Transform array to observations observation_features = parse_observation_features( X=X, param_names=self.parameters, candidate_metadata=candidate_metadata) xbest = self._model_best_point( bounds=bounds, objective_weights=objective_weights, outcome_constraints=outcome_constraints, linear_constraints=linear_constraints, fixed_features=fixed_features_dict, model_gen_options=model_gen_options, target_fidelities=target_fidelities, ) best_obsf = (None if xbest is None else ObservationFeatures( parameters={ p: float(xbest[i]) for i, p in enumerate(self.parameters) })) return observation_features, w.tolist(), best_obsf, gen_metadata