Beispiel #1
0
    def _internal_tell_not_asked(self, candidate: p.Parameter,
                                 loss: float) -> None:
        next_id = self.trials.new_trial_ids(1)
        new_trial = hyperopt.rand.suggest(next_id, self.domain, self.trials,
                                          self._rng.randint(2**31 - 1))
        self.trials.insert_trial_docs(new_trial)
        self.trials.refresh()
        tid = next_id[0]

        if self._transform:
            data = candidate.get_standardized_data(
                reference=self.parametrization)
            data = self._transform.forward(data)
            self.trials._dynamic_trials[tid]["misc"]["vals"] = {
                f"x_{i}": [data[i]]
                for i in range(len(data))
            }
        else:
            null_config: dict = {
                k: []
                for k in self.trials._dynamic_trials[tid]["misc"]
                ["vals"].keys()
            }
            new_vals: dict = _hp_parametrization_to_dict(candidate,
                                                         default=null_config)
            self.trials._dynamic_trials[tid]["misc"]["vals"] = new_vals

        self.trials.refresh()
        candidate._meta["trial_id"] = tid
        self._internal_tell_candidate(candidate, loss)
Beispiel #2
0
def _constraint_solver(parameter: p.Parameter, budget: int) -> p.Parameter:
    """Runs a suboptimization to solve the parameter constraints"""
    parameter_without_constraint = parameter.copy()
    parameter_without_constraint._constraint_checkers.clear()
    opt = registry["OnePlusOne"](parameter_without_constraint,
                                 num_workers=1,
                                 budget=budget)
    for _ in range(budget):
        cand = opt.ask()
        # Our objective function is minimum for the point the closest to
        # the original candidate under the constraints.
        penalty = sum(
            utils._float_penalty(func(cand.value))
            for func in parameter._constraint_checkers)

        # TODO: this may not scale well with dimension
        distance = np.tanh(
            np.sum(cand.get_standardized_data(reference=parameter)**2))
        # TODO: because of the return whenever constraints are satisfied, the first case never arises
        loss = distance if penalty <= 0 else penalty + distance + 1.0
        opt.tell(cand, loss)
        if penalty <= 0:  # constraints are satisfied
            break
    data = opt.recommend().get_standardized_data(
        reference=parameter_without_constraint)
    return parameter.spawn_child().set_standardized_data(data)
Beispiel #3
0
    def tell(self, candidate: p.Parameter, value: float) -> None:
        """Provides the optimizer with the evaluation of a fitness value for a candidate.

        Parameters
        ----------
        x: np.ndarray
            point where the function was evaluated
        value: float
            value of the function

        Note
        ----
        The candidate should generally be one provided by `ask()`, but can be also
        a non-asked candidate. To create a p.Parameter instance from args and kwargs,
        you can use `optimizer.create_candidate.from_call(*args, **kwargs)`.
        """
        if not isinstance(candidate, p.Parameter):
            raise TypeError(
                "'tell' must be provided with the candidate (use optimizer.create_candidate.from_call(*args, **kwargs)) "
                "if you want to inoculate a point that as not been asked for"
            )
        candidate.freeze()  # make sure it is not modified somewhere
        # call callbacks for logging etc...
        for callback in self._callbacks.get("tell", []):
            callback(self, candidate, value)
        data = candidate.get_standardized_data(reference=self.parametrization)
        self._update_archive_and_bests(data, value)
        if candidate.uid in self._asked:
            self._internal_tell_candidate(candidate, value)
            self._asked.remove(candidate.uid)
        else:
            self._internal_tell_not_asked(candidate, value)
            self._num_tell_not_asked += 1
        self._num_tell += 1
Beispiel #4
0
 def __init__(self, parameter: p.Parameter, y: float, *,
              reference: p.Parameter) -> None:
     self.count = 1
     self.mean = y
     self.square = y * y
     # TODO May be safer to use a default variance which depends on y for scale invariance?
     self.variance = 1.e6
     parameter.freeze()
     self.parameter = parameter
     self._ref = reference
 def _internal_tell_not_asked(self, candidate: p.Parameter, value: float) -> None:
     candidate._meta["value"] = value
     worst: tp.Optional[p.Parameter] = None
     if not len(self.population) < self.llambda:
         worst = max(self.population.values(), key=lambda p: p._meta.get("value", float("inf")))
         if worst._meta.get("value", float("inf")) < value:
             return  # no need to update
         else:
             uid = worst.heritage["lineage"]
             del self.population[uid]
             self._uid_queue.discard(uid)
     candidate.heritage["lineage"] = candidate.uid  # new lineage
     self.population[candidate.uid] = candidate
     self._uid_queue.tell(candidate.uid)
Beispiel #6
0
 def _internal_tell_candidate(self, candidate: p.Parameter,
                              value: float) -> None:
     candidate._meta["value"] = value
     if self._config.offsprings is None:
         uid = candidate.heritage["lineage"]
         self._uid_queue.tell(uid)
         parent_value = float(
             'inf') if uid not in self._population else self._population[
                 uid]._meta["value"]
         if value < parent_value:
             self._population[uid] = candidate
     else:
         if candidate.parents_uids[0] not in self._population and len(
                 self._population) < self._config.popsize:
             self._population[candidate.uid] = candidate
             self._uid_queue.tell(candidate.uid)
         else:
             self._waiting.append(candidate)
         if len(self._waiting) >= self._config.offsprings:
             choices = self._waiting + ([]
                                        if self._config.only_offsprings else
                                        list(self._population.values()))
             choices.sort(key=lambda x: x._meta["value"])
             self._population = {
                 x.uid: x
                 for x in choices[:self._config.popsize]
             }
             self._uid_queue.clear()
             self._waiting.clear()
             for uid in self._population:
                 self._uid_queue.tell(uid)
Beispiel #7
0
 def _internal_tell_not_asked(self, candidate: p.Parameter,
                              loss: tp.FloatLoss) -> None:
     discardable: tp.Optional[str] = None
     if len(self.population) >= self.llambda:
         if self.num_objectives == 1:  # monoobjective: replace if better
             worst = max(self.population.values(), key=base._loss)
             if loss < base._loss(worst):
                 discardable = worst.heritage["lineage"]
         else:  # multiobjective: replace if in pareto and some parents are not
             pareto_uids = {c.uid for c in self.pareto_front()}
             if candidate.uid in pareto_uids:
                 non_pareto_pop = {c.uid
                                   for c in self.population.values()
                                   } - pareto_uids
                 if non_pareto_pop:
                     nonpareto = {
                         c.uid: c
                         for c in self.population.values()
                     }[list(non_pareto_pop)[0]]
                     discardable = nonpareto.heritage["lineage"]
     if discardable is not None:  # if we found a point to kick, kick it
         del self.population[discardable]
         self._uid_queue.discard(discardable)
     if len(self.population
            ) < self.llambda:  # if there is space, add the new point
         candidate.heritage["lineage"] = candidate.uid  # new lineage
         self.population[candidate.uid] = candidate
         self._uid_queue.tell(candidate.uid)
Beispiel #8
0
    def tell(self, candidate: p.Parameter, value: float) -> None:
        """Provides the optimizer with the evaluation of a fitness value for a candidate.

        Parameters
        ----------
        x: np.ndarray
            point where the function was evaluated
        value: float
            value of the function

        Note
        ----
        The candidate should generally be one provided by :code:`ask()`, but can be also
        a non-asked candidate. To create a p.Parameter instance from args and kwargs,
        you can use :code:`candidate = optimizer.parametrization.spawn_child(new_value=your_value)`:

        - for an :code:`Array(shape(2,))`: :code:`optimizer.parametrization.spawn_child(new_value=[12, 12])`

        - for an :code:`Instrumentation`: :code:`optimizer.parametrization.spawn_child(new_value=(args, kwargs))`

        Alternatively, you can provide a suggestion with :code:`optimizer.suggest(*args, **kwargs)`, the next :code:`ask`
        will use this suggestion.
        """
        if not isinstance(candidate, p.Parameter):
            raise TypeError(
                "'tell' must be provided with the candidate.\n"
                "Use optimizer.parametrization.spawn_child(new_value)) if you want to "
                "create a candidate that as not been asked for, "
                "or optimizer.suggest(*args, **kwargs) to suggest a point that should be used for "
                "the next ask"
            )
        candidate.loss = value
        candidate.freeze()  # make sure it is not modified somewhere
        # call callbacks for logging etc...
        for callback in self._callbacks.get("tell", []):
            callback(self, candidate, value)
        self._update_archive_and_bests(candidate, value)
        if candidate.uid in self._asked:
            self._internal_tell_candidate(candidate, value)
            self._asked.remove(candidate.uid)
        else:
            self._internal_tell_not_asked(candidate, value)
            self._num_tell_not_asked += 1
        self._num_tell += 1
 def _internal_tell_candidate(self, candidate: p.Parameter, value: float) -> None:
     uid = candidate.heritage["lineage"]
     self._uid_queue.tell(uid)
     candidate._meta["value"] = value
     if uid not in self.population:
         self._internal_tell_not_asked(candidate, value)
         return
     parent_value = self.population[uid]._meta.get("value", float("inf"))
     if value <= parent_value:
         self.population[uid] = candidate
Beispiel #10
0
 def _internal_tell_candidate(self, candidate: p.Parameter, value: float) -> None:
     """Returns value for a point which was "asked"
     (none asked point cannot be "tell")
     """
     x = candidate.get_standardized_data(reference=self.parametrization)
     assert self._messaging_thread is not None, 'Start by using "ask" method, instead of "tell" method'
     if not self._messaging_thread.is_alive():  # optimizer is done
         self._check_error()
         return
     messages = [m for m in self._messaging_thread.messages if m.meta.get("asked", False) and not m.done]
     messages = [m for m in messages if m.meta["uid"] == candidate.uid]
     if not messages:
         raise RuntimeError(f"No message for evaluated point {x}: {self._messaging_thread.messages}")
     messages[0].result = value  # post the value, and the thread will deal with it
 def _internal_tell_not_asked(self, candidate: p.Parameter,
                              loss: tp.FloatLoss) -> None:
     worst: tp.Optional[p.Parameter] = None
     if len(self.population) >= self.llambda:
         worst = max(self.population.values(), key=base._loss)
         if worst.loss < loss:  # type: ignore
             return  # no need to update
         else:
             uid = worst.heritage["lineage"]
             del self.population[uid]
             self._uid_queue.discard(uid)
     candidate.heritage["lineage"] = candidate.uid  # new lineage
     self.population[candidate.uid] = candidate
     self._uid_queue.tell(candidate.uid)
Beispiel #12
0
 def _internal_tell_candidate(self, candidate: p.Parameter, value: tp.FloatLoss) -> None:
     candidate._meta["value"] = value
     if self._config.offsprings is None:
         uid = candidate.heritage["lineage"]
         self._uid_queue.tell(uid)
         parent_value = float('inf') if uid not in self._population else self._population[uid]._meta["value"]
         if value < parent_value:
             self._population[uid] = candidate
     else:
         if candidate.parents_uids[0] not in self._population and len(self._population) < self._config.popsize:
             self._population[candidate.uid] = candidate
             self._uid_queue.tell(candidate.uid)
         else:
             self._waiting.append(candidate)
         if len(self._waiting) >= self._config.offsprings:
             self._select()
Beispiel #13
0
    def _update_archive_and_bests(self, candidate: p.Parameter,
                                  loss: tp.FloatLoss) -> None:
        x = candidate.get_standardized_data(reference=self.parametrization)
        if not isinstance(
                loss, (Real, float)
        ):  # using "float" along "Real" because mypy does not understand "Real" for now Issue #3186
            raise TypeError(
                f'"tell" method only supports float values but the passed loss was: {loss} (type: {type(loss)}.'
            )
        if np.isnan(loss) or loss == np.inf:
            warnings.warn(f"Updating fitness with {loss} value",
                          errors.BadLossWarning)
        mvalue: tp.Optional[utils.MultiValue] = None
        if x not in self.archive:
            self.archive[x] = utils.MultiValue(candidate,
                                               loss,
                                               reference=self.parametrization)
        else:
            mvalue = self.archive[x]
            mvalue.add_evaluation(loss)
            # both parameters should be non-None
            if mvalue.parameter.loss > candidate.loss:  # type: ignore
                mvalue.parameter = candidate  # keep best candidate
        # update current best records
        # this may have to be improved if we want to keep more kinds of best losss

        for name in self.current_bests:
            if mvalue is self.current_bests[name]:  # reboot
                best = min(self.archive.values(),
                           key=lambda mv, n=name: mv.get_estimation(n)
                           )  # type: ignore
                # rebuild best point may change, and which value did not track the updated value anyway
                self.current_bests[name] = best
            else:
                if self.archive[x].get_estimation(
                        name) <= self.current_bests[name].get_estimation(name):
                    self.current_bests[name] = self.archive[x]
                # deactivated checks
                # if not (np.isnan(loss) or loss == np.inf):
                #     if not self.current_bests[name].x in self.archive:
                #         bval = self.current_bests[name].get_estimation(name)
                #         avals = (min(v.get_estimation(name) for v in self.archive.values()),
                #                  max(v.get_estimation(name) for v in self.archive.values()))
                #         raise RuntimeError(f"Best value should exist in the archive at num_tell={self.num_tell})\n"
                #                            f"Best value is {bval} and archive is within range {avals} for {name}")
        if self.pruning is not None:
            self.archive = self.pruning(self.archive)
Beispiel #14
0
 def __init__(self, reference: p.Parameter) -> None:
     self.reference = reference.spawn_child()
     self.reference.freeze()
     # initial check
     parameter = self.reference.spawn_child()
     parameter.set_standardized_data(
         np.linspace(-1, 1, self.reference.dimension))
     expected = parameter.get_standardized_data(reference=self.reference)
     self._ref_arrays = self.list_arrays(self.reference)
     arrays = self.list_arrays(parameter)
     check = np.concatenate([
         x.get_standardized_data(reference=y)
         for x, y in zip(arrays, self._ref_arrays)
     ],
                            axis=0)
     self.working = True
     if not np.allclose(check, expected):
         self.working = False
         self._warn()
Beispiel #15
0
def _reset_copy(obj: p.Parameter) -> p.Parameter:
    """Copy a parameter and resets its random state to obtain variability"""
    out = obj.copy()
    out._set_random_state(None)  # propagates None to sub-parameters
    return out
Beispiel #16
0
def _reset_copy(obj: p.Parameter) -> p.Parameter:
    """Copy a parameter and resets its random state to obtain variability"""
    out = obj.copy()
    out.random_state = None
    return out
Beispiel #17
0
 def _internal_tell_candidate(self, candidate: p.Parameter,
                              loss: tp.FloatLoss) -> None:
     """Called whenever calling :code:`tell` on a candidate that was "asked"."""
     data = candidate.get_standardized_data(reference=self.parametrization)
     self._internal_tell(data, loss)
Beispiel #18
0
    def tell(self, candidate: p.Parameter, loss: tp.Loss) -> None:
        """Provides the optimizer with the evaluation of a fitness value for a candidate.

        Parameters
        ----------
        x: np.ndarray
            point where the function was evaluated
        loss: float/list/np.ndarray
            loss of the function (or multi-objective function

        Note
        ----
        The candidate should generally be one provided by :code:`ask()`, but can be also
        a non-asked candidate. To create a p.Parameter instance from args and kwargs,
        you can use :code:`candidate = optimizer.parametrization.spawn_child(new_value=your_value)`:

        - for an :code:`Array(shape(2,))`: :code:`optimizer.parametrization.spawn_child(new_value=[12, 12])`

        - for an :code:`Instrumentation`: :code:`optimizer.parametrization.spawn_child(new_value=(args, kwargs))`

        Alternatively, you can provide a suggestion with :code:`optimizer.suggest(*args, **kwargs)`, the next :code:`ask`
        will use this suggestion.
        """
        # Check loss type
        if isinstance(loss, (Real, float)):
            # using "float" along "Real" because mypy does not understand "Real" for now Issue #3186
            loss = float(loss)
            # Non-sense values including NaNs should not be accepted.
            # We do not use max-float as various later transformations could lead to greater values.
            if not loss < 5.0e20:  # pylint: disable=unneeded-not
                warnings.warn(
                    f"Clipping very high value {loss} in tell (rescale the cost function?).",
                    errors.LossTooLargeWarning,
                )
                loss = 5.0e20  # sys.float_info.max leads to numerical problems so let us do this.
        elif isinstance(loss, (tuple, list, np.ndarray)):
            loss = np.array(loss, copy=False,
                            dtype=float).ravel() if len(loss) != 1 else loss[0]
        elif not isinstance(loss, np.ndarray):
            raise TypeError(
                f'"tell" method only supports float values but the passed loss was: {loss} (type: {type(loss)}.'
            )
        # check Parameter
        if not isinstance(candidate, p.Parameter):
            raise TypeError(
                "'tell' must be provided with the candidate.\n"
                "Use optimizer.parametrization.spawn_child(new_value)) if you want to "
                "create a candidate that as not been asked for, "
                "or optimizer.suggest(*args, **kwargs) to suggest a point that should be used for "
                "the next ask")
        # check loss length
        self.num_objectives = 1 if isinstance(loss, float) else loss.size
        # checks are done, start processing
        candidate.freeze()  # make sure it is not modified somewhere
        # add reference if provided
        if isinstance(candidate, p.MultiobjectiveReference):
            if self._hypervolume_pareto is not None:
                raise RuntimeError(
                    "MultiobjectiveReference can only be provided before the first tell."
                )
            if not isinstance(loss, np.ndarray):
                raise RuntimeError(
                    "MultiobjectiveReference must only be used for multiobjective losses"
                )
            self._hypervolume_pareto = mobj.HypervolumePareto(
                upper_bounds=loss, seed=self._rng)
            if candidate.value is None:
                return  # no value, so stopping processing there
            candidate = candidate.value
        # preprocess multiobjective loss
        if isinstance(loss, np.ndarray):
            candidate._losses = loss
        if not isinstance(loss, float):
            loss = self._preprocess_multiobjective(candidate)
        # call callbacks for logging etc...
        candidate.loss = loss
        assert isinstance(loss, float)
        for callback in self._callbacks.get("tell", []):
            # multiobjective reference is not handled :s
            # but this allows obtaining both scalar and multiobjective loss (through losses)
            callback(self, candidate, loss)
        if not candidate.satisfies_constraints() and self.budget is not None:
            penalty = self._constraints_manager.penalty(
                candidate, self.num_ask, self.budget)
            loss = loss + penalty
        if isinstance(loss, float):
            self._update_archive_and_bests(candidate, loss)
        if candidate.uid in self._asked:
            self._internal_tell_candidate(candidate, loss)
            self._asked.remove(candidate.uid)
        else:
            self._internal_tell_not_asked(candidate, loss)
            self._num_tell_not_asked += 1
        self._num_tell += 1
Beispiel #19
0
 def _internal_tell_candidate(self, candidate: p.Parameter, value: float) -> None:
     """Called whenever calling "tell" on a candidate that was "asked".
     """
     data = candidate.get_standardized_data(reference=self.parametrization)
     self._internal_tell(data, value)
Beispiel #20
0
    def tell(self, candidate: p.Parameter, loss: tp.Loss) -> None:
        """Provides the optimizer with the evaluation of a fitness value for a candidate.

        Parameters
        ----------
        x: np.ndarray
            point where the function was evaluated
        loss: float/list/np.ndarray
            loss of the function (or multi-objective function

        Note
        ----
        The candidate should generally be one provided by :code:`ask()`, but can be also
        a non-asked candidate. To create a p.Parameter instance from args and kwargs,
        you can use :code:`candidate = optimizer.parametrization.spawn_child(new_value=your_value)`:

        - for an :code:`Array(shape(2,))`: :code:`optimizer.parametrization.spawn_child(new_value=[12, 12])`

        - for an :code:`Instrumentation`: :code:`optimizer.parametrization.spawn_child(new_value=(args, kwargs))`

        Alternatively, you can provide a suggestion with :code:`optimizer.suggest(*args, **kwargs)`, the next :code:`ask`
        will use this suggestion.
        """
        # Check loss type
        if isinstance(loss, (Real, float)):
            # using "float" along "Real" because mypy does not understand "Real" for now Issue #3186
            loss = float(loss)
        elif isinstance(loss, (tuple, list, np.ndarray)):
            loss = np.array(loss, copy=False, dtype=float).ravel() if len(loss) != 1 else loss[0]
        elif not isinstance(loss, np.ndarray):
            raise TypeError(
                f'"tell" method only supports float values but the passed loss was: {loss} (type: {type(loss)}.'
            )
        # check loss length
        if self.num_tell:
            expected = self.num_objectives
            actual = 1 if isinstance(loss, float) else loss.size
            if actual != expected:
                raise ValueError(f"Expected {expected} loss(es) (like previous ones) but received {actual}.")
        # check Parameter
        if not isinstance(candidate, p.Parameter):
            raise TypeError(
                "'tell' must be provided with the candidate.\n"
                "Use optimizer.parametrization.spawn_child(new_value)) if you want to "
                "create a candidate that as not been asked for, "
                "or optimizer.suggest(*args, **kwargs) to suggest a point that should be used for "
                "the next ask"
            )
        # checks are done, start processing
        candidate.freeze()  # make sure it is not modified somewhere
        self._first_tell_done = True
        # add reference if provided
        if isinstance(candidate, p.MultiobjectiveReference):
            if self._hypervolume_pareto is not None:
                raise RuntimeError("MultiobjectiveReference can only be provided before the first tell.")
            if not isinstance(loss, np.ndarray):
                raise RuntimeError("MultiobjectiveReference must only be used for multiobjective losses")
            self._hypervolume_pareto = mobj.HypervolumePareto(upper_bounds=loss)
            if candidate.value is None:
                return
            candidate = candidate.value
        # preprocess multiobjective loss
        if isinstance(loss, np.ndarray):
            candidate._losses = loss
        if not isinstance(loss, float):
            loss = self._preprocess_multiobjective(candidate)
        # call callbacks for logging etc...
        candidate.loss = loss
        assert isinstance(loss, float)
        for callback in self._callbacks.get("tell", []):
            # multiobjective reference is not handled :s
            # but this allows obtaining both scalar and multiobjective loss (through losses)
            callback(self, candidate, loss)
        if isinstance(loss, float):
            self._update_archive_and_bests(candidate, loss)
        if candidate.uid in self._asked:
            self._internal_tell_candidate(candidate, loss)
            self._asked.remove(candidate.uid)
        else:
            self._internal_tell_not_asked(candidate, loss)
            self._num_tell_not_asked += 1
        self._num_tell += 1