def _find_similar_experiments(self):
        """After locating similar experiments by way of the parent's :meth:`_find_similar_experiments`, fit :attr:`optimizer`
        with the hyperparameters and results of each located experiment"""
        super()._find_similar_experiments()

        for _i, _experiment in enumerate(self.similar_experiments[::-1]):
            _hyperparameters = dimension_subset(
                _experiment[0], self.hyperparameter_space.get_names())
            _evaluation = _experiment[1]
            _experiment_id = _experiment[2] if len(_experiment) > 2 else None
            self.logger.print_result(_hyperparameters,
                                     _evaluation,
                                     experiment_id=_experiment_id)

            # FLAG: Resolve switching between below options depending on `target_metric`
            # self.optimizer_result = self.optimizer.tell(_hyperparameters, _evaluation)
            self.optimizer_result = self.optimizer.tell(
                _hyperparameters, -_evaluation)
            # FLAG: Resolve switching between above options depending on `target_metric`

            # self.optimizer_result = self.optimizer.tell(
            #     _hyperparameters, _evaluation, fit=(_i == len(self.similar_experiments) - 1))

            if eval_callbacks(self.callbacks, self.optimizer_result):
                return self.optimizer_result
 def _execute_experiment(self):
     """After executing parent's :meth:`_execute_experiment`, fit :attr:`optimizer` with the set
     of hyperparameters that were used, and the utility of those hyperparameters"""
     super()._execute_experiment()
     self._update_optimizer(self.current_hyperparameters_list, self.current_score)
     if eval_callbacks(self.callbacks, self.optimizer_result):
         return
    def _execute_experiment(self):
        """After executing parent's :meth:`_execute_experiment`, fit :attr:`optimizer` with the set of hyperparameters that
        were used, and the utility of those hyperparameters"""
        super()._execute_experiment()

        # FLAG: Resolve switching between below options depending on `target_metric`
        # self.optimizer_result = self.optimizer.tell(self.current_hyperparameters_list, self.current_score, fit=True)
        self.optimizer_result = self.optimizer.tell(
            self.current_hyperparameters_list, -self.current_score, fit=True)
        # FLAG: Resolve switching between above options depending on `target_metric`

        if eval_callbacks(self.callbacks, self.optimizer_result):
            return
    def _find_similar_experiments(self):
        """After locating similar experiments by way of the parent's
        :meth:`_find_similar_experiments`, fit :attr:`optimizer` with the hyperparameters and
        results of each located experiment"""
        super()._find_similar_experiments()

        # TODO: Remove below reversal of `similar_experiments` when `result_reader.ResultFinder.sort` finished
        for _i, _experiment in enumerate(self.similar_experiments[::-1]):
            _hyperparameters = dimension_subset(_experiment[0], self.space.names())
            _evaluation = _experiment[1]
            _experiment_id = _experiment[2] if len(_experiment) > 2 else None
            self.logger.print_result(_hyperparameters, _evaluation, experiment_id=_experiment_id)
            self._update_optimizer(_hyperparameters, _evaluation)

            # self.optimizer_result = self.optimizer.tell(
            #     _hyperparameters, _evaluation, fit=(_i == len(self.similar_experiments) - 1))

            if eval_callbacks(self.callbacks, self.optimizer_result):
                return self.optimizer_result
Esempio n. 5
0
    def fit(self, X, y=None, groups=None, callback=None):
        """Run fit on the estimator with randomly drawn parameters.

        Parameters
        ----------
        X : array-like or sparse matrix, shape = [n_samples, n_features]
            The training input samples.

        y : array-like, shape = [n_samples] or [n_samples, n_output]
            Target relative to X for classification or regression (class
            labels should be integers or strings).

        groups : array-like, with shape (n_samples,), optional
            Group labels for the samples used while splitting the dataset into
            train/test set.

        callback: [callable, list of callables, optional]
            If callable then `callback(res)` is called after each parameter
            combination tested. If list of callables, then each callable in
            the list is called.
        """

        # check if space is a single dict, convert to list if so
        search_spaces = self.search_spaces
        if isinstance(search_spaces, dict):
            search_spaces = [search_spaces]

        callbacks = check_callback(callback)

        if self.optimizer_kwargs is None:
            self.optimizer_kwargs_ = {}
        else:
            self.optimizer_kwargs_ = dict(self.optimizer_kwargs)
        random_state = check_random_state(self.random_state)
        self.optimizer_kwargs_['random_state'] = random_state

        # Instantiate optimizers for all the search spaces.
        optimizers = []
        for search_space in search_spaces:
            if isinstance(search_space, tuple):
                search_space = search_space[0]
            optimizers.append(self._make_optimizer(search_space))
        self.optimizers_ = optimizers  # will save the states of the optimizers

        self.cv_results_ = defaultdict(list)
        self.best_index_ = None
        self.multimetric_ = False

        n_points = self.n_points

        for search_space, optimizer in zip(search_spaces, optimizers):
            # if not provided with search subspace, n_iter is taken as
            # self.n_iter
            if isinstance(search_space, tuple):
                search_space, n_iter = search_space
            else:
                n_iter = self.n_iter

            # do the optimization for particular search space
            while n_iter > 0:
                # when n_iter < n_points points left for evaluation
                n_points_adjusted = min(n_iter, n_points)

                optim_result = self._step(X,
                                          y,
                                          search_space,
                                          optimizer,
                                          groups=groups,
                                          n_points=n_points_adjusted)
                n_iter -= n_points

                if eval_callbacks(callbacks, optim_result):
                    break

        # Refit the best model on the the whole dataset
        if self.refit:
            self._fit_best_model(X, y)

        return self