Пример #1
0
Файл: ax.py Проект: zivzone/ray
    def convert_search_space(spec: Dict):
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        if grid_vars:
            raise ValueError(
                "Grid search parameters cannot be automatically converted "
                "to an Ax search space.")

        # Flatten and resolve again after checking for grid search.
        spec = flatten_dict(spec, prevent_delimiter=True)
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        def resolve_value(par, domain):
            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                logger.warning("AxSearch does not support quantization. "
                               "Dropped quantization.")
                sampler = sampler.sampler

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    return {
                        "name": par,
                        "type": "range",
                        "bounds": [domain.lower, domain.upper],
                        "value_type": "float",
                        "log_scale": True
                    }
                elif isinstance(sampler, Uniform):
                    return {
                        "name": par,
                        "type": "range",
                        "bounds": [domain.lower, domain.upper],
                        "value_type": "float",
                        "log_scale": False
                    }
            elif isinstance(domain, Integer):
                if isinstance(sampler, LogUniform):
                    return {
                        "name": par,
                        "type": "range",
                        "bounds": [domain.lower, domain.upper],
                        "value_type": "int",
                        "log_scale": True
                    }
                elif isinstance(sampler, Uniform):
                    return {
                        "name": par,
                        "type": "range",
                        "bounds": [domain.lower, domain.upper],
                        "value_type": "int",
                        "log_scale": False
                    }
            elif isinstance(domain, Categorical):
                if isinstance(sampler, Uniform):
                    return {
                        "name": par,
                        "type": "choice",
                        "values": domain.categories
                    }

            raise ValueError("AxSearch does not support parameters of type "
                             "`{}` with samplers of type `{}`".format(
                                 type(domain).__name__,
                                 type(domain.sampler).__name__))

        # Fixed vars
        fixed_values = [{
            "name": "/".join(path),
            "type": "fixed",
            "value": val
        } for path, val in resolved_vars]

        # Parameter name is e.g. "a/b/c" for nested dicts
        resolved_values = [
            resolve_value("/".join(path), domain)
            for path, domain in domain_vars
        ]

        return fixed_values + resolved_values
Пример #2
0
    def convert_search_space(spec: Dict) -> ConfigSpace.ConfigurationSpace:
        spec = flatten_dict(spec, prevent_delimiter=True)
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        if grid_vars:
            raise ValueError(
                "Grid search parameters cannot be automatically converted "
                "to a TuneBOHB search space.")

        def resolve_value(
                par: str,
                domain: Domain) -> ConfigSpace.hyperparameters.Hyperparameter:
            quantize = None

            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                quantize = sampler.q
                sampler = sampler.sampler

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    lower = domain.lower
                    upper = domain.upper
                    if quantize:
                        lower = math.ceil(domain.lower / quantize) * quantize
                        upper = math.floor(domain.upper / quantize) * quantize
                    return ConfigSpace.UniformFloatHyperparameter(par,
                                                                  lower=lower,
                                                                  upper=upper,
                                                                  q=quantize,
                                                                  log=True)
                elif isinstance(sampler, Uniform):
                    lower = domain.lower
                    upper = domain.upper
                    if quantize:
                        lower = math.ceil(domain.lower / quantize) * quantize
                        upper = math.floor(domain.upper / quantize) * quantize
                    return ConfigSpace.UniformFloatHyperparameter(par,
                                                                  lower=lower,
                                                                  upper=upper,
                                                                  q=quantize,
                                                                  log=False)
                elif isinstance(sampler, Normal):
                    return ConfigSpace.NormalFloatHyperparameter(
                        par,
                        mu=sampler.mean,
                        sigma=sampler.sd,
                        q=quantize,
                        log=False)

            elif isinstance(domain, Integer):
                if isinstance(sampler, Uniform):
                    lower = domain.lower
                    upper = domain.upper
                    if quantize:
                        lower = math.ceil(domain.lower / quantize) * quantize
                        upper = math.floor(domain.upper / quantize) * quantize
                    return ConfigSpace.UniformIntegerHyperparameter(
                        par, lower=lower, upper=upper, q=quantize, log=False)

            elif isinstance(domain, Categorical):
                if isinstance(sampler, Uniform):
                    return ConfigSpace.CategoricalHyperparameter(
                        par, choices=domain.categories)

            raise ValueError("TuneBOHB does not support parameters of type "
                             "`{}` with samplers of type `{}`".format(
                                 type(domain).__name__,
                                 type(domain.sampler).__name__))

        cs = ConfigSpace.ConfigurationSpace()
        for path, domain in domain_vars:
            par = "/".join(path)
            value = resolve_value(par, domain)
            cs.add_hyperparameter(value)

        return cs
Пример #3
0
    def convert_search_space(spec: Dict):
        spec = flatten_dict(spec, prevent_delimiter=True)
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        if not domain_vars and not grid_vars:
            return []

        if grid_vars:
            raise ValueError(
                "Grid search parameters cannot be automatically converted "
                "to an Optuna search space.")

        def resolve_value(par, domain):
            quantize = None

            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                quantize = sampler.q
                sampler = sampler.sampler

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    if quantize:
                        logger.warning(
                            "Optuna does not support both quantization and "
                            "sampling from LogUniform. Dropped quantization.")
                    return param.suggest_loguniform(par, domain.lower,
                                                    domain.upper)
                elif isinstance(sampler, Uniform):
                    if quantize:
                        return param.suggest_discrete_uniform(
                            par, domain.lower, domain.upper, quantize)
                    return param.suggest_uniform(par, domain.lower,
                                                 domain.upper)
            elif isinstance(domain, Integer):
                if isinstance(sampler, LogUniform):
                    if quantize:
                        logger.warning(
                            "Optuna does not support both quantization and "
                            "sampling from LogUniform. Dropped quantization.")
                    return param.suggest_int(par,
                                             domain.lower,
                                             domain.upper,
                                             log=True)
                elif isinstance(sampler, Uniform):
                    return param.suggest_int(par,
                                             domain.lower,
                                             domain.upper,
                                             step=quantize or 1)
            elif isinstance(domain, Categorical):
                if isinstance(sampler, Uniform):
                    return param.suggest_categorical(par, domain.categories)

            raise ValueError(
                "Optuna search does not support parameters of type "
                "`{}` with samplers of type `{}`".format(
                    type(domain).__name__,
                    type(domain.sampler).__name__))

        # Parameter name is e.g. "a/b/c" for nested dicts
        values = [
            resolve_value("/".join(path), domain)
            for path, domain in domain_vars
        ]

        return values
Пример #4
0
    def convert_search_space(spec: Dict, prefix: str = "") -> Dict:
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        params = []

        if not domain_vars and not grid_vars:
            return {}

        if grid_vars:
            raise ValueError(
                "Grid search parameters cannot be automatically converted "
                "to a HEBO search space.")

        def resolve_value(par: str, domain: Domain):
            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                logger.warning("HEBO search does not support quantization. "
                               "Dropped quantization.")
                sampler = sampler.get_sampler()

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    return {
                        "name": par,
                        "type": "pow",
                        "lb": domain.lower,
                        "ub": domain.upper,
                        "base": sampler.base
                    }
                elif isinstance(sampler, Uniform):
                    return {
                        "name": par,
                        "type": "num",
                        "lb": domain.lower,
                        "ub": domain.upper
                    }

            elif isinstance(domain, Integer):
                if isinstance(sampler, LogUniform):
                    return {
                        "name": par,
                        "type": "pow_int",
                        "lb": domain.lower,
                        "ub": domain.upper - 1,  # Upper bound exclusive
                        "base": sampler.base
                    }
                elif isinstance(sampler, Uniform):
                    return {
                        "name": par,
                        "type": "int",
                        "lb": domain.lower,
                        "ub": domain.upper - 1,  # Upper bound exclusive
                    }
            elif isinstance(domain, Categorical):
                return {
                    "name": par,
                    "type": "cat",
                    "categories": list(domain.categories)
                }

            raise ValueError("HEBO does not support parameters of type "
                             "`{}` with samplers of type `{}`".format(
                                 type(domain).__name__,
                                 type(domain.sampler).__name__))

        for path, domain in domain_vars:
            par = "/".join(
                [str(p) for p in ((prefix, ) + path if prefix else path)])
            value = resolve_value(par, domain)
            params.append(value)

        return hebo.design_space.design_space.DesignSpace().parse(params)
Пример #5
0
    def __init__(self,
                 space: Optional[Dict] = None,
                 metric: Optional[str] = None,
                 mode: Optional[str] = None,
                 points_to_evaluate: Optional[List[Dict]] = None,
                 utility_kwargs: Optional[Dict] = None,
                 random_state: int = 42,
                 random_search_steps: int = 10,
                 verbose: int = 0,
                 patience: int = 5,
                 skip_duplicate: bool = True,
                 analysis: Optional[ExperimentAnalysis] = None,
                 max_concurrent: Optional[int] = None,
                 use_early_stopped_trials: Optional[bool] = None):
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        if mode:
            assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."
        self.max_concurrent = max_concurrent
        self._config_counter = defaultdict(int)
        self._patience = patience
        # int: Precision at which to hash values.
        self.repeat_float_precision = 5
        if self._patience <= 0:
            raise ValueError("patience must be set to a value greater than 0!")
        self._skip_duplicate = skip_duplicate
        super(BayesOptSearch, self).__init__(
            metric=metric,
            mode=mode,
            max_concurrent=max_concurrent,
            use_early_stopped_trials=use_early_stopped_trials)

        if utility_kwargs is None:
            # The defaults arguments are the same
            # as in the package BayesianOptimization
            utility_kwargs = dict(
                kind="ucb",
                kappa=2.576,
                xi=0.0,
            )

        if mode == "max":
            self._metric_op = 1.
        elif mode == "min":
            self._metric_op = -1.

        self._points_to_evaluate = points_to_evaluate

        self._live_trial_mapping = {}
        self._buffered_trial_results = []
        self.random_search_trials = random_search_steps
        self._total_random_search_trials = 0

        self.utility = byo.UtilityFunction(**utility_kwargs)

        self._analysis = analysis

        if isinstance(space, dict) and space:
            resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
            if domain_vars or grid_vars:
                logger.warning(
                    UNRESOLVED_SEARCH_SPACE.format(
                        par="space", cls=type(self)))
                space = self.convert_search_space(space, join=True)

        self._space = space
        self._verbose = verbose
        self._random_state = random_state

        self.optimizer = None
        if space:
            self._setup_optimizer()
Пример #6
0
    def convert_search_space(spec: Dict) -> Dict[str, Any]:
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        if not domain_vars and not grid_vars:
            return {}

        if grid_vars:
            raise ValueError(
                "Grid search parameters cannot be automatically converted "
                "to an Optuna search space.")

        # Flatten and resolve again after checking for grid search.
        spec = flatten_dict(spec, prevent_delimiter=True)
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        def resolve_value(domain: Domain) -> ot.distributions.BaseDistribution:
            quantize = None

            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                quantize = sampler.q
                sampler = sampler.sampler
                if isinstance(sampler, LogUniform):
                    logger.warning(
                        "Optuna does not handle quantization in loguniform "
                        "sampling. The parameter will be passed but it will "
                        "probably be ignored.")

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    if quantize:
                        logger.warning(
                            "Optuna does not support both quantization and "
                            "sampling from LogUniform. Dropped quantization.")
                    return ot.distributions.LogUniformDistribution(
                        domain.lower, domain.upper)

                elif isinstance(sampler, Uniform):
                    if quantize:
                        return ot.distributions.DiscreteUniformDistribution(
                            domain.lower, domain.upper, quantize)
                    return ot.distributions.UniformDistribution(
                        domain.lower, domain.upper)

            elif isinstance(domain, Integer):
                if isinstance(sampler, LogUniform):
                    return ot.distributions.IntLogUniformDistribution(
                        domain.lower, domain.upper - 1, step=quantize or 1)
                elif isinstance(sampler, Uniform):
                    # Upper bound should be inclusive for quantization and
                    # exclusive otherwise
                    return ot.distributions.IntUniformDistribution(
                        domain.lower,
                        domain.upper - int(bool(not quantize)),
                        step=quantize or 1,
                    )
            elif isinstance(domain, Categorical):
                if isinstance(sampler, Uniform):
                    return ot.distributions.CategoricalDistribution(
                        domain.categories)

            raise ValueError(
                "Optuna search does not support parameters of type "
                "`{}` with samplers of type `{}`".format(
                    type(domain).__name__,
                    type(domain.sampler).__name__))

        # Parameter name is e.g. "a/b/c" for nested dicts
        values = {
            "/".join(path): resolve_value(domain)
            for path, domain in domain_vars
        }

        return values
Пример #7
0
    def __init__(self,
                 space: Optional[Dict] = None,
                 metric: Optional[str] = None,
                 mode: Optional[str] = None,
                 utility_kwargs: Optional[Dict] = None,
                 random_state: int = 42,
                 random_search_steps: int = 10,
                 verbose: int = 0,
                 patience: int = 5,
                 skip_duplicate: bool = True,
                 analysis: Optional[ExperimentAnalysis] = None,
                 max_concurrent: Optional[int] = None,
                 use_early_stopped_trials: Optional[bool] = None):
        """Instantiate new BayesOptSearch object.

        Args:
            space (dict): Continuous search space.
                Parameters will be sampled from
                this space which will be used to run trials.
            metric (str): The training result objective value attribute.
            mode (str): One of {min, max}. Determines whether objective is
                minimizing or maximizing the metric attribute.
            utility_kwargs (dict): Parameters to define the utility function.
                Must provide values for the keys `kind`, `kappa`, and `xi`.
            random_state (int): Used to initialize BayesOpt.
            random_search_steps (int): Number of initial random searches.
                This is necessary to avoid initial local overfitting
                of the Bayesian process.
            patience (int): Must be > 0. If the optimizer suggests a set of
                hyperparameters more than 'patience' times,
                then the whole experiment will stop.
            skip_duplicate (bool): If true, BayesOptSearch will not create
                a trial with a previously seen set of hyperparameters. By
                default, floating values will be reduced to a digit precision
                of 5. You can override this by setting
                ``searcher.repeat_float_precision``.
            analysis (ExperimentAnalysis): Optionally, the previous analysis
                to integrate.
            verbose (int): Sets verbosity level for BayesOpt packages.
            max_concurrent: Deprecated.
            use_early_stopped_trials: Deprecated.
        """
        assert byo is not None, (
            "BayesOpt must be installed!. You can install BayesOpt with"
            " the command: `pip install bayesian-optimization`.")
        if mode:
            assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."
        self.max_concurrent = max_concurrent
        self._config_counter = defaultdict(int)
        self._patience = patience
        # int: Precision at which to hash values.
        self.repeat_float_precision = 5
        if self._patience <= 0:
            raise ValueError("patience must be set to a value greater than 0!")
        self._skip_duplicate = skip_duplicate
        super(BayesOptSearch,
              self).__init__(metric=metric,
                             mode=mode,
                             max_concurrent=max_concurrent,
                             use_early_stopped_trials=use_early_stopped_trials)

        if utility_kwargs is None:
            # The defaults arguments are the same
            # as in the package BayesianOptimization
            utility_kwargs = dict(
                kind="ucb",
                kappa=2.576,
                xi=0.0,
            )

        if mode == "max":
            self._metric_op = 1.
        elif mode == "min":
            self._metric_op = -1.

        self._live_trial_mapping = {}
        self._buffered_trial_results = []
        self.random_search_trials = random_search_steps
        self._total_random_search_trials = 0

        self.utility = byo.UtilityFunction(**utility_kwargs)

        # Registering the provided analysis, if given
        if analysis is not None:
            self.register_analysis(analysis)

        if isinstance(space, dict) and space:
            resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
            if domain_vars or grid_vars:
                logger.warning(
                    UNRESOLVED_SEARCH_SPACE.format(par="space",
                                                   cls=type(self)))
                space = self.convert_search_space(space, join=True)

        self._space = space
        self._verbose = verbose
        self._random_state = random_state

        self.optimizer = None
        if space:
            self._setup_optimizer()
Пример #8
0
    def convert_search_space(spec: Dict, prefix: str = "") -> Dict:
        spec = copy.deepcopy(spec)
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        if not domain_vars and not grid_vars:
            return {}

        if grid_vars:
            raise ValueError(
                "Grid search parameters cannot be automatically converted "
                "to a HyperOpt search space.")

        def resolve_value(par: str, domain: Domain) -> Any:
            quantize = None

            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                quantize = sampler.q
                sampler = sampler.sampler

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    if quantize:
                        return hpo.hp.qloguniform(par, domain.lower,
                                                  domain.upper, quantize)
                    return hpo.hp.loguniform(par, np.log(domain.lower),
                                             np.log(domain.upper))
                elif isinstance(sampler, Uniform):
                    if quantize:
                        return hpo.hp.quniform(par, domain.lower, domain.upper,
                                               quantize)
                    return hpo.hp.uniform(par, domain.lower, domain.upper)
                elif isinstance(sampler, Normal):
                    if quantize:
                        return hpo.hp.qnormal(par, sampler.mean, sampler.sd,
                                              quantize)
                    return hpo.hp.normal(par, sampler.mean, sampler.sd)

            elif isinstance(domain, Integer):
                if isinstance(sampler, Uniform):
                    if quantize:
                        logger.warning(
                            "HyperOpt does not support quantization for "
                            "integer values. Reverting back to 'randint'.")
                    return hpo.hp.randint(par, domain.lower, high=domain.upper)
            elif isinstance(domain, Categorical):
                if isinstance(sampler, Uniform):
                    return hpo.hp.choice(par, [
                        HyperOptSearch.convert_search_space(category,
                                                            prefix=par)
                        if isinstance(category, dict) else
                        HyperOptSearch.convert_search_space(
                            dict(enumerate(category)), prefix=f"{par}/{i}")
                        if isinstance(category, list) else
                        resolve_value(f"{par}/{i}", category) if isinstance(
                            category, Domain) else category
                        for i, category in enumerate(domain.categories)
                    ])

            raise ValueError("HyperOpt does not support parameters of type "
                             "`{}` with samplers of type `{}`".format(
                                 type(domain).__name__,
                                 type(domain.sampler).__name__))

        for path, domain in domain_vars:
            par = "/".join(
                [str(p) for p in ((prefix, ) + path if prefix else path)])
            value = resolve_value(par, domain)
            assign_value(spec, path, value)

        return spec
Пример #9
0
    def __init__(self,
                 optimizer: Union[None, Optimizer, Type[Optimizer],
                                  ConfiguredOptimizer] = None,
                 space: Optional[Union[Dict, Parameter]] = None,
                 metric: Optional[str] = None,
                 mode: Optional[str] = None,
                 points_to_evaluate: Optional[List[Dict]] = None,
                 max_concurrent: Optional[int] = None,
                 **kwargs):
        assert ng is not None, """Nevergrad must be installed!
            You can install Nevergrad with the command:
            `pip install nevergrad`."""
        if mode:
            assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."

        super(NevergradSearch, self).__init__(metric=metric,
                                              mode=mode,
                                              max_concurrent=max_concurrent,
                                              **kwargs)

        self._space = None
        self._opt_factory = None
        self._nevergrad_opt = None

        if points_to_evaluate is None:
            self._points_to_evaluate = None
        elif not isinstance(points_to_evaluate, Sequence):
            raise ValueError(
                "Invalid object type passed for `points_to_evaluate`: "
                f"{type(points_to_evaluate)}. "
                "Please pass a list of points (dictionaries) instead.")
        else:
            self._points_to_evaluate = list(points_to_evaluate)

        if isinstance(space, dict) and space:
            resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
            if domain_vars or grid_vars:
                logger.warning(
                    UNRESOLVED_SEARCH_SPACE.format(par="space",
                                                   cls=type(self)))
                space = self.convert_search_space(space)

        if isinstance(optimizer, Optimizer):
            if space is not None and not isinstance(space, list):
                raise ValueError(
                    "If you pass a configured optimizer to Nevergrad, either "
                    "pass a list of parameter names or None as the `space` "
                    "parameter.")
            self._parameters = space
            self._nevergrad_opt = optimizer
        elif (inspect.isclass(optimizer)
              and issubclass(optimizer, Optimizer)) or isinstance(
                  optimizer, ConfiguredOptimizer):
            self._opt_factory = optimizer
            self._parameters = None
            self._space = space
        else:
            raise ValueError(
                "The `optimizer` argument passed to NevergradSearch must be "
                "either an `Optimizer` or a `ConfiguredOptimizer`.")

        self._live_trial_mapping = {}
        self.max_concurrent = max_concurrent

        if self._nevergrad_opt or self._space:
            self._setup_nevergrad()
Пример #10
0
    def convert_search_space(spec: Dict):
        spec = copy.deepcopy(spec)
        resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)

        if not domain_vars and not grid_vars:
            return []

        if grid_vars:
            raise ValueError(
                "Grid search parameters cannot be automatically converted "
                "to a HyperOpt search space.")

        def resolve_value(par, domain):
            quantize = None

            sampler = domain.get_sampler()
            if isinstance(sampler, Quantized):
                quantize = sampler.q
                sampler = sampler.sampler

            if isinstance(domain, Float):
                if isinstance(sampler, LogUniform):
                    if quantize:
                        return hpo.hp.qloguniform(par, domain.lower,
                                                  domain.upper, quantize)
                    return hpo.hp.loguniform(par, np.log(domain.lower),
                                             np.log(domain.upper))
                elif isinstance(sampler, Uniform):
                    if quantize:
                        return hpo.hp.quniform(par, domain.lower, domain.upper,
                                               quantize)
                    return hpo.hp.uniform(par, domain.lower, domain.upper)
                elif isinstance(sampler, Normal):
                    if quantize:
                        return hpo.hp.qnormal(par, sampler.mean, sampler.sd,
                                              quantize)
                    return hpo.hp.normal(par, sampler.mean, sampler.sd)

            elif isinstance(domain, Integer):
                if isinstance(sampler, Uniform):
                    if quantize:
                        logger.warning(
                            "HyperOpt does not support quantization for "
                            "integer values. Reverting back to 'randint'.")
                    if domain.lower != 0:
                        raise ValueError(
                            "HyperOpt only allows integer sampling with "
                            f"lower bound 0. Got: {domain.lower}.")
                    if domain.upper < 1:
                        raise ValueError(
                            "HyperOpt does not support integer sampling "
                            "of values lower than 0. Set your maximum range "
                            "to something above 0 (currently {})".format(
                                domain.upper))
                    return hpo.hp.randint(par, domain.upper)
            elif isinstance(domain, Categorical):
                if isinstance(sampler, Uniform):
                    return hpo.hp.choice(par, domain.categories)

            raise ValueError("HyperOpt does not support parameters of type "
                             "`{}` with samplers of type `{}`".format(
                                 type(domain).__name__,
                                 type(domain.sampler).__name__))

        for path, domain in domain_vars:
            par = "/".join(path)
            value = resolve_value(par, domain)
            assign_value(spec, path, value)

        return spec
Пример #11
0
    def __init__(self,
                 optimizer: Optional["sko.optimizer.Optimizer"] = None,
                 space: Union[List[str], Dict[str, Union[Tuple, List]]] = None,
                 metric: Optional[str] = None,
                 mode: Optional[str] = None,
                 points_to_evaluate: Optional[List[Dict]] = None,
                 evaluated_rewards: Optional[List] = None,
                 convert_to_python: bool = True,
                 max_concurrent: Optional[int] = None,
                 use_early_stopped_trials: Optional[bool] = None):
        assert sko is not None, ("skopt must be installed! "
                                 "You can install Skopt with the command: "
                                 "`pip install scikit-optimize`.")

        if mode:
            assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."
        self.max_concurrent = max_concurrent
        super(SkOptSearch,
              self).__init__(metric=metric,
                             mode=mode,
                             max_concurrent=max_concurrent,
                             use_early_stopped_trials=use_early_stopped_trials)

        self._initial_points = []
        self._parameters = None
        self._parameter_names = None
        self._parameter_ranges = None

        if isinstance(space, dict) and space:
            resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
            if domain_vars or grid_vars:
                logger.warning(
                    UNRESOLVED_SEARCH_SPACE.format(par="space",
                                                   cls=type(self)))
                space = self.convert_search_space(space, join=True)

        self._space = space

        if self._space:
            if isinstance(optimizer, sko.Optimizer):
                if not isinstance(space, list):
                    raise ValueError(
                        "You passed an optimizer instance to SkOpt. Your "
                        "`space` parameter should be a list of parameter"
                        "names.")
                self._parameter_names = space
            else:
                self._parameter_names = list(space.keys())
                self._parameter_ranges = list(space.values())

        self._points_to_evaluate = copy.deepcopy(points_to_evaluate)

        self._evaluated_rewards = evaluated_rewards

        self._convert_to_python = convert_to_python

        self._skopt_opt = optimizer
        if self._skopt_opt or self._space:
            self._setup_skopt()

        self._live_trial_mapping = {}
Пример #12
0
    def __init__(
        self,
        space: Optional[
            Union[Dict, "hebo.design_space.design_space.DesignSpace"]
        ] = None,
        metric: Optional[str] = None,
        mode: Optional[str] = None,
        points_to_evaluate: Optional[List[Dict]] = None,
        evaluated_rewards: Optional[List] = None,
        random_state_seed: Optional[int] = None,
        max_concurrent: int = 8,
        **kwargs,
    ):
        assert hebo is not None, (
            "HEBO must be installed! You can install HEBO with"
            " the command: `pip install 'HEBO>=0.2.0'`."
            "This error may also be caused if HEBO"
            " dependencies have bad versions. Try updating HEBO"
            " first."
        )
        if mode:
            assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."
        assert (
            isinstance(max_concurrent, int) and max_concurrent >= 1
        ), "`max_concurrent` must be an integer and at least 1."
        if random_state_seed is not None:
            assert isinstance(
                random_state_seed, int
            ), "random_state_seed must be None or int, got '{}'.".format(
                type(random_state_seed)
            )
        super(HEBOSearch, self).__init__(metric=metric, mode=mode)

        if isinstance(space, dict) and space:
            resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
            if resolved_vars:
                raise TypeError(SPACE_ERROR_MESSAGE)
            if domain_vars or grid_vars:
                logger.warning(
                    UNRESOLVED_SEARCH_SPACE.format(par="space", cls=type(self))
                )
                space = self.convert_search_space(space)
        elif space is not None and not isinstance(
            space, hebo.design_space.design_space.DesignSpace
        ):
            raise TypeError(SPACE_ERROR_MESSAGE + " Got {}.".format(type(space)))

        self._hebo_config = kwargs
        self._random_state_seed = random_state_seed
        self._space = space
        self._points_to_evaluate = points_to_evaluate
        self._evaluated_rewards = evaluated_rewards
        self._initial_points = []
        self._live_trial_mapping = {}

        self._max_concurrent = max_concurrent
        self._suggestions_cache = []
        self._batch_filled = False

        self._opt = None
        if space:
            self._setup_optimizer()
Пример #13
0
    def __init__(self,
                 space: Optional[Union[Dict[str,
                                            "OptunaDistribution"], List[Tuple],
                                       Callable[["OptunaTrial"],
                                                Optional[Dict[str,
                                                              Any]]]]] = None,
                 metric: Optional[str] = None,
                 mode: Optional[str] = None,
                 points_to_evaluate: Optional[List[Dict]] = None,
                 sampler: Optional["BaseSampler"] = None,
                 seed: Optional[int] = None,
                 evaluated_rewards: Optional[List] = None):
        assert ot is not None, (
            "Optuna must be installed! Run `pip install optuna`.")
        super(OptunaSearch, self).__init__(metric=metric,
                                           mode=mode,
                                           max_concurrent=None,
                                           use_early_stopped_trials=None)

        if isinstance(space, dict) and space:
            resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
            if domain_vars or grid_vars:
                logger.warning(
                    UNRESOLVED_SEARCH_SPACE.format(par="space",
                                                   cls=type(self).__name__))
                space = self.convert_search_space(space)
            else:
                # Flatten to support nested dicts
                space = flatten_dict(space, "/")

        # Deprecate: 1.5
        if isinstance(space, list):
            logger.warning(
                "Passing lists of `param.suggest_*()` calls to OptunaSearch "
                "as a search space is deprecated and will be removed in "
                "a future release of Ray. Please pass a dict mapping "
                "to `optuna.distributions` objects instead.")

        self._space = space

        self._points_to_evaluate = points_to_evaluate or []
        self._evaluated_rewards = evaluated_rewards

        self._study_name = "optuna"  # Fixed study name for in-memory storage

        if sampler and seed:
            logger.warning(
                "You passed an initialized sampler to `OptunaSearch`. The "
                "`seed` parameter has to be passed to the sampler directly "
                "and will be ignored.")

        self._sampler = sampler or ot.samplers.TPESampler(seed=seed)

        assert isinstance(self._sampler, BaseSampler), \
            "You can only pass an instance of `optuna.samplers.BaseSampler` " \
            "as a sampler to `OptunaSearcher`."

        self._ot_trials = {}
        self._ot_study = None
        if self._space:
            self._setup_study(mode)