コード例 #1
0
 def _validate_range_param(
     self,
     lower: TParamValue,
     upper: TParamValue,
     log_scale: bool,
     parameter_type: Optional[ParameterType] = None,
 ) -> None:
     if parameter_type and parameter_type not in (
         ParameterType.INT,
         ParameterType.FLOAT,
     ):
         raise UserInputError("RangeParameter type must be int or float.")
     # pyre-fixme[58]: `>=` is not supported for operand types `Union[None, bool,
     #  float, int, str]` and `Union[None, bool, float, int, str]`.
     if lower >= upper:
         raise UserInputError(
             f"Upper bound of {self.name} must be strictly larger than lower."
             f"Got: ({lower}, {upper})."
         )
     # pyre-fixme[58]: `<=` is not supported for operand types `Union[None, bool,
     #  float, int, str]` and `int`.
     if log_scale and lower <= 0:
         raise UserInputError("Cannot take log when min <= 0.")
     if not (self.is_valid_type(lower)) or not (self.is_valid_type(upper)):
         raise UserInputError(
             f"[{lower}, {upper}] is an invalid range for this parameter."
         )
コード例 #2
0
 def __init__(self,
              steps: List[GenerationStep],
              name: Optional[str] = None) -> None:
     assert isinstance(steps, list) and all(
         isinstance(s, GenerationStep)
         for s in steps), "Steps must be a GenerationStep list."
     self._name = name
     self._steps = steps
     self._uses_registered_models = True
     self._generator_runs = []
     for idx, step in enumerate(self._steps):
         if step.num_trials == -1:
             if idx < len(self._steps) - 1:
                 raise UserInputError(  # pragma: no cover
                     "Only last step in generation strategy can have `num_trials` "
                     "set to -1 to indicate that the model in the step should "
                     "be used to generate new trials indefinitely.")
         elif step.num_trials < 1:  # pragma: no cover
             raise UserInputError(
                 "`num_trials` must be positive or -1 (indicating unlimited) "
                 "for all generation steps.")
         if step.max_parallelism is not None and step.max_parallelism < 1:
             raise UserInputError(
                 "Maximum parallelism should be None (if no limit) or a positive"
                 f" number. Got: {step.max_parallelism} for step {step.model_name}."
             )
         step.index = idx
         if not isinstance(step.model, ModelRegistryBase):
             self._uses_registered_models = False
     if not self._uses_registered_models:
         logger.info("Using model via callable function, "
                     "so optimization is not resumable if interrupted.")
     self._curr = steps[0]
     self._seen_trial_indices_by_status = None
コード例 #3
0
    def __init__(
        self,
        name: str,
        parameter_type: ParameterType,
        values: List[TParamValue],
        is_ordered: Optional[bool] = None,
        is_task: bool = False,
        is_fidelity: bool = False,
        target_value: Optional[TParamValue] = None,
        sort_values: Optional[bool] = None,
        dependents: Optional[Dict[TParamValue, List[str]]] = None,
    ) -> None:
        if is_fidelity and (target_value is None):
            raise UserInputError(
                "`target_value` should not be None for the fidelity parameter: "
                "{}".format(name)
            )

        self._name = name
        self._parameter_type = parameter_type
        self._is_task = is_task
        self._is_fidelity = is_fidelity
        self._target_value = self.cast(target_value)
        # A choice parameter with only one value is a FixedParameter.
        if not len(values) > 1:
            raise UserInputError(f"{self._name}({values}): {FIXED_CHOICE_PARAM_ERROR}")
        self._values = self._cast_values(values)
        self._is_ordered = (
            is_ordered
            if is_ordered is not None
            else self._get_default_bool_and_warn(param_string="is_ordered")
        )
        # sort_values defaults to True if the parameter is not a string
        self._sort_values = (
            sort_values
            if sort_values is not None
            else self._get_default_bool_and_warn(param_string="sort_values")
        )
        if self.sort_values:
            # pyre-ignore[6]: values/self._values expects List[Union[None, bool, float,
            # int, str]] but sorted() takes/returns
            # List[Variable[_typeshed.SupportsLessThanT (bound to
            # _typeshed.SupportsLessThan)]]
            self._values = self._cast_values(sorted(values))
        if dependents:
            for value in dependents:
                if value not in self.values:
                    raise UserInputError(
                        f"Value {value} in `dependents` "
                        f"argument is not among the parameter values: {self.values}."
                    )
        # NOTE: We don't need to check that dependent parameters actually exist as
        # that is done in `HierarchicalSearchSpace` constructor.
        self._dependents = dependents
コード例 #4
0
    def _validate_hierarchical_structure(self) -> None:
        """Validate the structure of this hierarchical search space, ensuring that all
        subtrees are independent (not sharing any parameters) and that all parameters
        are reachable and part of the tree.
        """
        def _check_subtree(root: Parameter) -> Set[str]:
            logger.debug(f"Verifying subtree with root {root}...")
            visited = {root.name}
            # Base case: validate leaf node.
            if not root.is_hierarchical:
                return visited  # TODO: Should there be other validation?

            # Recursive case: validate each subtree.
            visited_in_subtrees = (  # Generator of sets of visited parameter names.
                _check_subtree(root=self[param_name])
                for deps in root.dependents.values() for param_name in deps)
            # Check that subtrees are disjoint and return names of visited params.
            visited.update(
                reduce(
                    lambda set1, set2: _disjoint_union(set1=set1, set2=set2),
                    visited_in_subtrees,
                    next(visited_in_subtrees),
                ))
            logger.debug(f"Visited parameters {visited} in subtree.")
            return visited

        # Verify that all nodes have been reached.
        visited = _check_subtree(root=self._root)
        if len(self._all_parameter_names - visited) != 0:
            raise UserInputError(
                f"Parameters {self._all_parameter_names - visited} are not reachable "
                "from the root. Please check that the hierachical search space provided"
                " is represented as a valid tree with a single root.")
        logger.debug(f"Visited all parameters in the tree: {visited}.")
コード例 #5
0
def _disjoint_union(set1: Set[str], set2: Set[str]) -> Set[str]:
    if not set1.isdisjoint(set2):
        raise UserInputError(
            "Two subtrees in the search space contain the same parameters: "
            f"{set1.intersection(set2)}.")
    logger.debug(f"Subtrees {set1} and {set2} are disjoint.")
    return set1.union(set2)
コード例 #6
0
ファイル: model_spec.py プロジェクト: facebook/Ax
 def __post_init__(self) -> None:
     if self.model_enum is not None:
         raise UserInputError(
             "Use regular `ModelSpec` when it's possible to describe the "
             "model as `ModelRegistryBase` subclass enum member."
         )
     if self.factory_function is None:
         raise UserInputError(
             "Please specify a valid function returning a `ModelBridge` instance "
             "as the required `factory_function` argument to "
             "`FactoryFunctionModelSpec`."
         )
     warnings.warn(
         "Using a factory function to describe the model, so optimization state "
         "cannot be stored and optimization is not resumable if interrupted."
     )
コード例 #7
0
    def __init__(
        self,
        name: str,
        parameter_type: ParameterType,
        value: TParamValue,
        is_fidelity: bool = False,
        target_value: Optional[TParamValue] = None,
    ) -> None:
        """Initialize FixedParameter

        Args:
            name: Name of the parameter.
            parameter_type: Enum indicating the type of parameter
                value (e.g. string, int).
            value: The fixed value of the parameter.
            is_fidelity: Whether this parameter is a fidelity parameter.
            target_value: Target value of this parameter if it is a fidelity.
        """
        if is_fidelity and (target_value is None):
            raise UserInputError(
                "`target_value` should not be None for the fidelity parameter: "
                "{}".format(name))

        self._name = name
        self._parameter_type = parameter_type
        self._value = self.cast(value)
        self._is_fidelity = is_fidelity
        self._target_value = self.cast(target_value)
コード例 #8
0
    def __init__(
        self,
        search_space: SearchSpace,
        observation_features: List[ObservationFeatures],
        observation_data: List[ObservationData],
        modelbridge: Optional["modelbridge_module.base.ModelBridge"] = None,
        config: Optional[TConfig] = None,
    ) -> None:
        if len(observation_data) == 0:
            raise ValueError(
                "Winsorize transform requires non-empty observation data.")
        if config is None:
            raise ValueError(
                "Transform config for `Winsorize` transform must be specified and "
                "non-empty when using winsorization.")
        all_metric_values = get_data(observation_data=observation_data)

        # Check for legacy config
        use_legacy = False
        old_present = set(OLD_KEYS).intersection(config.keys())
        if old_present:
            warnings.warn(
                "Winsorization received an out-of-date `transform_config`, containing "
                f"the following deprecated keys: {old_present}. Please update the "
                "config according to the docs of "
                "`ax.modelbridge.transforms.winsorize.Winsorize`.",
                DeprecationWarning,
            )
            use_legacy = True

        # Get winsorization and optimization configs
        winsorization_config = config.get("winsorization_config", {})
        opt_config = config.get("optimization_config", {})
        if "optimization_config" in config:
            if not isinstance(opt_config, OptimizationConfig):
                raise UserInputError(
                    "Expected `optimization_config` of type `OptimizationConfig` but "
                    f"got type `{type(opt_config)}.")
            opt_config = checked_cast(OptimizationConfig, opt_config)

        self.cutoffs = {}
        for metric_name, metric_values in all_metric_values.items():
            if use_legacy:
                self.cutoffs[
                    metric_name] = _get_cutoffs_from_legacy_transform_config(
                        metric_name=metric_name,
                        metric_values=metric_values,
                        transform_config=config,
                    )
            else:
                self.cutoffs[metric_name] = _get_cutoffs_from_transform_config(
                    metric_name=metric_name,
                    metric_values=metric_values,
                    winsorization_config=winsorization_config,  # pyre-ignore[6]
                    optimization_config=opt_config,  # pyre-ignore[6]
                )
コード例 #9
0
ファイル: managed_loop.py プロジェクト: facebook/Ax
 def _get_weights_by_arm(
         self, trial: BaseTrial) -> Iterable[Tuple[Arm, Optional[float]]]:
     if isinstance(trial, Trial):
         if trial.arm is not None:
             return [(not_none(trial.arm), None)]
         return []
     elif isinstance(trial, BatchTrial):
         return trial.normalized_arm_weights().items()
     else:
         raise UserInputError(f"Invalid trial type: {type(trial)}")
コード例 #10
0
    def __init__(
        self,
        name: str,
        parameter_type: ParameterType,
        value: TParamValue,
        is_fidelity: bool = False,
        target_value: Optional[TParamValue] = None,
        dependents: Optional[Dict[TParamValue, List[str]]] = None,
    ) -> None:
        """Initialize FixedParameter

        Args:
            name: Name of the parameter.
            parameter_type: Enum indicating the type of parameter
                value (e.g. string, int).
            value: The fixed value of the parameter.
            is_fidelity: Whether this parameter is a fidelity parameter.
            target_value: Target value of this parameter if it is a fidelity.
            dependents: Optional mapping for parameters in hierarchical search
                spaces; format is { value -> list of dependent parameter names }.
        """
        if is_fidelity and (target_value is None):
            raise UserInputError(
                "`target_value` should not be None for the fidelity parameter: "
                "{}".format(name)
            )

        self._name = name
        self._parameter_type = parameter_type
        self._value = self.cast(value)
        self._is_fidelity = is_fidelity
        self._target_value = self.cast(target_value)
        # NOTE: We don't need to check that dependent parameters actually exist as
        # that is done in `HierarchicalSearchSpace` constructor.
        if dependents:
            if len(dependents) > 1 or next(iter(dependents.keys())) != self.value:
                raise UserInputError(
                    "The only expected key in `dependents` for fixed parameter "
                    f"{self.name}: {self.value}; got: {dependents}."
                )
        self._dependents = dependents
コード例 #11
0
    def set_values(self, values: List[TParamValue]) -> "ChoiceParameter":
        """Set the list of allowed values for parameter.

        Cast all input values to the parameter type.

        Args:
            values: New list of allowed values.
        """
        # A choice parameter with only one value is a FixedParameter.
        if not len(values) > 1:
            raise UserInputError(FIXED_CHOICE_PARAM_ERROR)
        self._values = self._cast_values(values)
        return self
コード例 #12
0
    def __init__(
        self,
        name: str,
        parameter_type: ParameterType,
        lower: float,
        upper: float,
        log_scale: bool = False,
        logit_scale: bool = False,
        digits: Optional[int] = None,
        is_fidelity: bool = False,
        target_value: Optional[TParamValue] = None,
    ) -> None:
        """Initialize RangeParameter

        Args:
            name: Name of the parameter.
            parameter_type: Enum indicating the type of parameter
                value (e.g. string, int).
            lower: Lower bound of the parameter range (inclusive).
            upper: Upper bound of the parameter range (inclusive).
            log_scale: Whether to sample in the log space when drawing
                random values of the parameter.
            logit_scale: Whether to sample in logit space when drawing
                random values of the parameter.
            digits: Number of digits to round values to for float type.
            is_fidelity: Whether this parameter is a fidelity parameter.
            target_value: Target value of this parameter if it is a fidelity.
        """
        if is_fidelity and (target_value is None):
            raise UserInputError(
                "`target_value` should not be None for the fidelity parameter: "
                "{}".format(name)
            )

        self._name = name
        self._parameter_type = parameter_type
        self._digits = digits
        self._lower = self.cast(lower)
        self._upper = self.cast(upper)
        self._log_scale = log_scale
        self._logit_scale = logit_scale
        self._is_fidelity = is_fidelity
        self._target_value = self.cast(target_value)

        self._validate_range_param(
            parameter_type=parameter_type,
            lower=lower,
            upper=upper,
            log_scale=log_scale,
            logit_scale=logit_scale,
        )
コード例 #13
0
ファイル: parameter.py プロジェクト: viotemp1/Ax
    def __init__(
        self,
        name: str,
        parameter_type: ParameterType,
        values: List[TParamValue],
        is_ordered: bool = False,
        is_task: bool = False,
        is_fidelity: bool = False,
        target_value: Optional[TParamValue] = None,
    ) -> None:
        """Initialize ChoiceParameter.

        Args:
            name: Name of the parameter.
            parameter_type: Enum indicating the type of parameter
                value (e.g. string, int).
            values: List of allowed values for the parameter.
            is_ordered: If False, the parameter is a categorical variable.
            is_task: Treat the parameter as a task parameter for modeling.
            is_fidelity: Whether this parameter is a fidelity parameter.
            target_value: Target value of this parameter if it's fidelity.
        """
        if is_fidelity and (target_value is None):
            raise UserInputError(
                "`target_value` should not be None for the fidelity parameter: "
                "{}".format(name)
            )

        self._name = name
        self._parameter_type = parameter_type
        self._is_ordered = is_ordered
        self._is_task = is_task
        self._is_fidelity = is_fidelity
        self._target_value = self.cast(target_value)
        # A choice parameter with only one value is a FixedParameter.
        if not len(values) > 1:
            raise UserInputError(FIXED_CHOICE_PARAM_ERROR)
        self._values = self._cast_values(values)
コード例 #14
0
def check_objective_thresholds_match_objectives(
    objectives_by_name: Dict[str, Objective],
    objective_thresholds: List[ObjectiveThreshold],
) -> None:
    """Error if thresholds on objective_metrics bound from the wrong direction or
    if there is a mismatch between objective thresholds and objectives.
    """
    obj_thresh_metrics = set()
    for threshold in objective_thresholds:
        metric_name = threshold.metric.name
        if metric_name not in objectives_by_name:
            raise UserInputError(
                f"Objective threshold {threshold} is on metric '{metric_name}', "
                f"but that metric is not among the objectives.")
        if metric_name in obj_thresh_metrics:
            raise UserInputError(
                "More than one objective threshold specified for metric "
                f"{metric_name}.")
        obj_thresh_metrics.add(metric_name)

        if metric_name in objectives_by_name:
            minimize = objectives_by_name[metric_name].minimize
            bounded_above = threshold.op == ComparisonOp.LEQ
            is_aligned = minimize == bounded_above
            if not is_aligned:
                raise UserInputError(
                    f"Objective threshold on {metric_name} bounds from "
                    f"{'above' if bounded_above else 'below'} "
                    f"but {metric_name} is being "
                    f"{'minimized' if minimize else 'maximized'}.")

    obj_metrics = set(objectives_by_name.keys())
    if objective_thresholds and obj_thresh_metrics.symmetric_difference(
            obj_metrics):
        raise UserInputError(
            f"Objective thresholds: {obj_thresh_metrics} do not match objectives: "
            f"{obj_metrics}.")
コード例 #15
0
ファイル: managed_loop.py プロジェクト: facebook/Ax
 def _get_new_trial(self) -> BaseTrial:
     if self.arms_per_trial == 1:
         return self.experiment.new_trial(
             generator_run=self.generation_strategy.gen(
                 experiment=self.experiment,
                 pending_observations=get_pending_observation_features(
                     experiment=self.experiment),
             ))
     elif self.arms_per_trial > 1:
         return self.experiment.new_batch_trial(
             generator_run=self.generation_strategy.gen(
                 experiment=self.experiment, n=self.arms_per_trial))
     else:
         raise UserInputError(
             f"Invalid number of arms per trial: {self.arms_per_trial}")
コード例 #16
0
    def set_digits(self, digits: int) -> "RangeParameter":
        self._digits = digits

        # Re-scale min and max to new digits definition
        cast_lower = self.cast(self._lower)
        cast_upper = self.cast(self._upper)
        # pyre-fixme[58]: `>=` is not supported for operand types `Union[None, bool,
        #  float, int, str]` and `Union[None, bool, float, int, str]`.
        if cast_lower >= cast_upper:
            raise UserInputError(
                f"Lower bound {cast_lower} is >= upper bound {cast_upper}.")

        self._lower = cast_lower
        self._upper = cast_upper
        return self
コード例 #17
0
ファイル: pareto_frontier.py プロジェクト: facebook/Ax
def _validate_and_maybe_get_default_metric_names(
    metric_names: Optional[Tuple[str, str]],
    optimization_config: Optional[OptimizationConfig],
) -> Tuple[str, str]:
    # Default metric_names is all metrics, producing an error if more than 2
    if metric_names is None:
        if not_none(optimization_config).is_moo_problem:
            multi_objective = checked_cast(
                MultiObjective,
                not_none(optimization_config).objective)
            metric_names = tuple(obj.metric.name
                                 for obj in multi_objective.objectives)
        else:
            raise UserInputError(
                "Inference of `metric_names` failed. Expected `MultiObjective` but "
                f"got {not_none(optimization_config).objective}. Please specify "
                "`metric_names` of length 2 or provide an experiment whose "
                "`optimization_config` has 2 objective metrics.")
    if metric_names is not None and len(metric_names) == 2:
        return metric_names
    raise UserInputError(
        f"Expected 2 metrics but got {len(metric_names or [])}: {metric_names}. "
        "Please specify `metric_names` of length 2 or provide an experiment whose "
        "`optimization_config` has 2 objective metrics.")
コード例 #18
0
ファイル: managed_loop.py プロジェクト: facebook/Ax
    def _call_evaluation_function(
            self,
            parameterization: TParameterization,
            weight: Optional[float] = None) -> TEvaluationOutcome:
        signature = inspect.signature(self.evaluation_function)
        num_evaluation_function_params = len(signature.parameters.items())
        if num_evaluation_function_params == 1:
            # pyre-fixme[20]: Anonymous call expects argument `$1`.
            evaluation = self.evaluation_function(parameterization)
        elif num_evaluation_function_params == 2:
            evaluation = self.evaluation_function(parameterization, weight)
        else:
            raise UserInputError(
                "Evaluation function must take either one parameter "
                "(parameterization) or two parameters (parameterization and weight)."
            )

        return evaluation
コード例 #19
0
 def __post_init__(self) -> None:
     if not isinstance(self.model, ModelRegistryBase):
         if not callable(self.model):
             raise UserInputError(
                 "`model` in generation step must be either a `ModelRegistryBase` "
                 "enum subclass entry or a callable factory function returning a "
                 "model bridge instance.")
         model_spec = FactoryFunctionModelSpec(
             factory_function=self.model,
             model_kwargs=self.model_kwargs,
             model_gen_kwargs=self.model_gen_kwargs,
         )
     else:
         model_spec = ModelSpec(
             model_enum=self.model,
             model_kwargs=self.model_kwargs,
             model_gen_kwargs=self.model_gen_kwargs,
         )
     super().__init__(model_specs=[model_spec])
コード例 #20
0
ファイル: pareto_frontier.py プロジェクト: facebook/Ax
def _validate_experiment_and_get_optimization_config(
    experiment: Experiment,
    metric_names: Optional[Tuple[str, str]] = None,
    reference_point: Optional[Tuple[float, float]] = None,
    minimize: Optional[Union[bool, Tuple[bool, bool]]] = None,
) -> Optional[OptimizationConfig]:
    # If `optimization_config` is unspecified, check what inputs are missing and
    # error/warn accordingly
    if experiment.optimization_config is None:
        if metric_names is None:
            raise UserInputError(
                "Inference of defaults failed. Please either specify `metric_names` "
                "(and optionally `minimize` and `reference_point`) or provide an "
                "experiment with an `optimization_config`.")
        if reference_point is None or minimize is None:
            warnings.warn(
                "Inference of defaults failed. Please specify `minimize` and "
                "`reference_point` if available, or provide an experiment with an "
                "`optimization_config` that contains an `objective` and "
                "`objective_threshold` corresponding to each of `metric_names`: "
                f"{metric_names}.")
        return None
    return not_none(experiment.optimization_config)
コード例 #21
0
ファイル: model_spec.py プロジェクト: facebook/Ax
 def _assert_fitted(self) -> None:
     """Helper that verifies a model was fitted, raising an error if not"""
     if self._fitted_model is None:
         raise UserInputError("No fitted model found. Call fit() to generate one")
コード例 #22
0
def _get_cutoffs_from_transform_config(
    metric_name: str,
    metric_values: List[float],
    winsorization_config: Union[WinsorizationConfig,
                                Dict[str, WinsorizationConfig]],
    optimization_config: Optional[OptimizationConfig],
) -> Tuple[float, float]:
    # (1) Use the same config for all metrics if one WinsorizationConfig was specified
    if isinstance(winsorization_config, WinsorizationConfig):
        return _quantiles_to_cutoffs(
            metric_name=metric_name,
            metric_values=metric_values,
            metric_config=winsorization_config,
        )

    # (2) If `winsorization_config` is a dict, use it if `metric_name` is a key,
    # and the corresponding value is a WinsorizationConfig.
    if isinstance(winsorization_config,
                  dict) and metric_name in winsorization_config:
        metric_config = winsorization_config[metric_name]
        if not isinstance(metric_config, WinsorizationConfig):
            raise UserInputError(
                "Expected winsorization config of type "
                f"`WinsorizationConfig` but got {metric_config} of type "
                f"{type(metric_config)} for metric {metric_name}.")
        return _quantiles_to_cutoffs(
            metric_name=metric_name,
            metric_values=metric_values,
            metric_config=metric_config,
        )

    # (3) For constraints and objectives that don't have a pre-specified config we
    # choose the cutoffs automatically using the optimization config (if supplied).
    # We ignore ScalarizedOutcomeConstraint and ScalarizedObjective for now. An
    # exception is raised if we encounter relative constraints.
    if optimization_config:
        if metric_name in optimization_config.objective.metric_names:
            if isinstance(optimization_config.objective, ScalarizedObjective):
                warnings.warn(
                    "Automatic winsorization isn't supported for ScalarizedObjective. "
                    "Specify the winsorization settings manually if you want to "
                    f"winsorize metric {metric_name}.")
                return DEFAULT_CUTOFFS  # Don't winsorize a ScalarizedObjective
            elif optimization_config.is_moo_problem:
                # We deal with a multi-objective function the same way as we deal
                # with an output constraint. It may be worth investigating setting
                # the winsorization cutoffs based on the Pareto frontier in the future.
                optimization_config = checked_cast(
                    MultiObjectiveOptimizationConfig, optimization_config)
                objective_threshold = _get_objective_threshold_from_moo_config(
                    optimization_config=optimization_config,
                    metric_name=metric_name)
                if objective_threshold:
                    return _get_auto_winsorization_cutoffs_outcome_constraint(
                        metric_values=metric_values,
                        outcome_constraints=objective_threshold,
                    )
                warnings.warn(
                    "Automatic winsorization isn't supported for an objective in "
                    "`MultiObjective` without objective thresholds. Specify the "
                    "winsorization settings manually if you want to winsorize "
                    f"metric {metric_name}.")
                return DEFAULT_CUTOFFS  # Don't winsorize if there is no threshold
            else:  # Single objective
                return _get_auto_winsorization_cutoffs_single_objective(
                    metric_values=metric_values,
                    minimize=optimization_config.objective.minimize,
                )
        # Get all outcome constraints for metric_name that aren't relative or scalarized
        outcome_constraints = _get_outcome_constraints_from_config(
            optimization_config=optimization_config, metric_name=metric_name)
        if outcome_constraints:
            return _get_auto_winsorization_cutoffs_outcome_constraint(
                metric_values=metric_values,
                outcome_constraints=outcome_constraints,
            )

    # If none of the above, we don't winsorize.
    return DEFAULT_CUTOFFS