Пример #1
0
    def _get_current_hyperparameters(self):
        """Ask :attr:`optimizer` for the upcoming set of hyperparameters that should be searched,
        then format them to be used in the next Experiment

        Returns
        -------
        current_hyperparameters: Dict
            The next set of hyperparameters that will be searched"""
        _current_hyperparameters = self.optimizer.ask()

        if _current_hyperparameters == self.current_hyperparameters_list:
            new_parameters = self.hyperparameter_space.rvs(random_state=None)[0]
            G.debug_("REPEATED  asked={}  new={}".format(_current_hyperparameters, new_parameters))
            _current_hyperparameters = new_parameters

        self.current_hyperparameters_list = _current_hyperparameters

        current_hyperparameters = dict(
            zip(
                self.hyperparameter_space.names(use_location=False),
                self.current_hyperparameters_list,
            )
        )

        return current_hyperparameters
Пример #2
0
    def find(self):
        """Execute full result-finding workflow, populating :attr:`similar_experiments`

        See Also
        --------
        :func:`update_match_status`
            Used to decorate "does_match..." methods in order to keep a detailed record of the full
            pool of candidate Experiments in :attr:`match_status`. Aside from being used to compile
            the list of finalist :attr:`similar_experiments`, :attr:`match_status` is helpful for
            debugging purposes, specifically figuring out which aspects of a candidate are
            incompatible with the template
        :meth:`does_match_feature_engineer`
            Performs special functionality beyond that of the other "does_match..." methods, namely
            providing an updated "feature_engineer" value for compatible candidates to use.
            Specifics are documented in :meth:`does_match_feature_engineer`"""
        for exp_id in self.experiment_ids:
            description_path = f"{self.descriptions_dir}/{exp_id}.json"
            # TODO: Get `description` from `get_scored_params` - Take whatever value `sort` needs
            params, score = get_scored_params(description_path, self.target_metric)

            #################### Match Init Params ####################
            self.does_match_init_params_space(exp_id, params["model_init_params"], score)

            multi_targets = [("model_init_params", "compile_params", "optimizer")]
            if self.module_name == "keras" and multi_targets[0] in self.space.names():
                self.does_match_init_params_guidelines_multi(
                    exp_id, params["model_init_params"], score, multi_targets[0][1:]
                )
            else:
                self.does_match_init_params_guidelines(exp_id, params["model_init_params"], score)

            #################### Match Extra Params ####################
            self.does_match_extra_params_space(exp_id, params["model_extra_params"], score)
            self.does_match_extra_params_guidelines(exp_id, params["model_extra_params"], score)

            #################### Match Feature Engineer ####################
            # NOTE: Matching "feature_engineer" is critically different from the other "does_match"
            #   methods. `does_match_feature_engineer` builds on the straight-forward compatibility
            #   checks of the others by returning an updated "feature_engineer" for the candidate
            #   if compatible. See :meth:`does_match_feature_engineer` for details
            params["feature_engineer"] = self.does_match_feature_engineer(
                exp_id, params["feature_engineer"], score
            )

            # Since updated "feature_engineer" is saved in `params`, clean up `match_status` entry
            if self.match_status[exp_id]["does_match_feature_engineer"] is not False:
                self.match_status[exp_id]["does_match_feature_engineer"] = True

            #################### Determine Overall Match ####################
            if all(v for k, v in self.match_status[exp_id].items() if k.startswith("does_match")):
                self.similar_experiments.append((params, score, exp_id))

        G.debug_(
            "Matching Experiments:  {}  (Candidates:  {})".format(
                len(self.similar_experiments), len(self.experiment_ids)
            )
        )
Пример #3
0
    def find(self):
        """Execute full result-finding workflow"""
        self._get_ids()
        G.debug_(
            f"Experiments matching cross-experiment key/algorithm: {len(self.experiment_ids)}"
        )
        self._get_scored_params()
        self._filter_by_space()
        G.debug_(
            f"Experiments fitting in the given space: {len(self.hyperparameters_and_scores)}"
        )

        if self.module_name == "keras":
            multi_targets = [("model_init_params", "compile_params",
                              "optimizer")]
            if multi_targets[0] in self.space.names():
                self._filter_by_guidelines_multi(multi_targets[0])
            else:
                self._filter_by_guidelines()
        else:
            self._filter_by_guidelines()

        #################### Post-Process Similar Experiments ####################
        self._reinitialize_similar_experiments()
        G.debug_(
            f"Experiments matching current guidelines: {len(self.similar_experiments)}"
        )
Пример #4
0
    def find(self):
        """Execute full result-finding workflow"""
        self._get_ids()
        G.debug_(
            F'Experiments found with matching cross-experiment key and algorithm: {len(self.experiment_ids)}'
        )
        self._get_scored_params()
        self._filter_by_space()
        G.debug_(
            F'Experiments whose hyperparameters fit in the currently defined space: {len(self.hyperparameters_and_scores)}'
        )

        if self.module_name == 'keras':
            if ('model_init_params', 'compile_params',
                    'optimizer') in self.hyperparameter_space.get_names():
                self._filter_by_guidelines_multi(
                    ('model_init_params', 'compile_params', 'optimizer'))
            else:
                self._filter_by_guidelines()
        else:
            self._filter_by_guidelines()
        G.debug_(
            F'Experiments whose hyperparameters match the current guidelines: {len(self.similar_experiments)}'
        )