def is_noiseless(model: Model) -> bool: """Check if a given (single-task) botorch model is noiseless""" if isinstance(model, ModelListGP): raise ModelError( "Checking for noisless models only applies to sub-models of ModelListGP" ) return model.__class__ in NOISELESS_MODELS
def _generate_weights( self, objective_weights: np.ndarray, outcome_constraints: Optional[Tuple[np.ndarray, np.ndarray]] = None, ) -> List[float]: samples, fraction_all_infeasible = self._produce_samples( num_samples=self.num_samples, objective_weights=objective_weights, outcome_constraints=outcome_constraints, ) if fraction_all_infeasible > 0.99: raise ModelError(TS_NO_FEASIBLE_ARMS_ERROR) num_valid_samples = samples.shape[1] while num_valid_samples < self.num_samples: num_additional_samples = (self.num_samples - num_valid_samples) / ( 1 - fraction_all_infeasible) num_additional_samples = int( np.maximum(num_additional_samples, 100)) new_samples, _ = self._produce_samples( num_samples=num_additional_samples, objective_weights=objective_weights, outcome_constraints=outcome_constraints, ) samples = np.concatenate([samples, new_samples], axis=1) num_valid_samples = samples.shape[1] winner_indices = np.argmax(samples, axis=0) # (num_samples,) winner_counts = np.zeros(len(self.X)) # (k,) for index in winner_indices: winner_counts[index] += 1 weights = winner_counts / winner_counts.sum() return weights.tolist()
def gen( self, n: int, parameter_values: List[TParamValueList], objective_weights: Optional[np.ndarray], outcome_constraints: Optional[Tuple[np.ndarray, np.ndarray]] = None, fixed_features: Optional[Dict[int, TParamValue]] = None, pending_observations: Optional[List[List[TParamValueList]]] = None, model_gen_options: Optional[TConfig] = None, ) -> Tuple[List[TParamValueList], List[float], TGenMetadata]: if objective_weights is None: raise ValueError("ThompsonSampler requires objective weights.") arms = self.X k = len(arms) weights = self._generate_weights( objective_weights=objective_weights, outcome_constraints=outcome_constraints) min_weight = self.min_weight if self.min_weight is not None else 2.0 / k # Second entry is used for tie-breaking weighted_arms = [ (weights[i], np.random.random(), arms[i]) for i in range(k) # pyre-fixme[58]: `>` is not supported for operand types `float` and # `Optional[float]`. if weights[i] > min_weight ] if len(weighted_arms) == 0: raise ModelError( TS_MIN_WEIGHT_ERROR.format(min_weight=min_weight, max_weight=max(weights))) weighted_arms.sort(reverse=True) top_weighted_arms = weighted_arms[:n] if n > 0 else weighted_arms top_arms = [arm for _, _, arm in top_weighted_arms] top_weights = [weight for weight, _, _ in top_weighted_arms] # N TS arms should have total weight N if self.uniform_weights: top_weights = [1.0 for _ in top_weights] else: top_weights = [(x * len(top_weights)) / sum(top_weights) for x in top_weights] return top_arms, top_weights, { "arms_to_weights": list(zip(arms, weights)) }
def gen( self, n: int, parameter_values: List[TParamValueList], objective_weights: Optional[np.ndarray], outcome_constraints: Optional[Tuple[np.ndarray, np.ndarray]] = None, fixed_features: Optional[Dict[int, TParamValue]] = None, pending_observations: Optional[List[List[TParamValueList]]] = None, model_gen_options: Optional[TConfig] = None, ) -> Tuple[List[TParamValueList], List[float], TGenMetadata]: if objective_weights is None: raise ValueError("ThompsonSampler requires objective weights.") arms = self.X # pyre-fixme[6]: Expected `Sized` for 1st param but got `None`. k = len(arms) weights = self._generate_weights( objective_weights=objective_weights, outcome_constraints=outcome_constraints ) min_weight = self.min_weight if self.min_weight is not None else 2.0 / k # Second entry is used for tie-breaking weighted_arms = [ # pyre-fixme[16]: `None` has no attribute `__getitem__`. (weights[i], np.random.random(), arms[i]) for i in range(k) # pyre-fixme[6]: Expected `float` for 1st param but got `Optional[float]`. if weights[i] > min_weight ] if len(weighted_arms) == 0: raise ModelError( TS_MIN_WEIGHT_ERROR.format( min_weight=min_weight, max_weight=max(weights) ) ) weighted_arms.sort(reverse=True) top_weighted_arms = weighted_arms[:n] if n > 0 else weighted_arms top_arms = [arm for _, _, arm in top_weighted_arms] top_weights = [weight for weight, _, _ in top_weighted_arms] if self.uniform_weights: top_weights = [1 / len(top_arms) for _ in top_arms] return top_arms, [x / sum(top_weights) for x in top_weights], {}