Ejemplo n.º 1
0
    def create_trial_if_possible(self, experiment_spec: Dict,
                                 output_path: str) -> Optional[Trial]:
        logger.debug("creating trial")
        trial_id = Trial.generate_id()
        suggested_config = self.searcher.suggest(trial_id)
        if suggested_config == Searcher.FINISHED:
            self._finished = True
            logger.debug("Searcher has finished.")
            return

        if suggested_config is None:
            return
        spec = copy.deepcopy(experiment_spec)
        spec["config"] = merge_dicts(spec["config"],
                                     copy.deepcopy(suggested_config))

        # Create a new trial_id if duplicate trial is created
        flattened_config = resolve_nested_dict(spec["config"])
        self._counter += 1
        tag = "{0}_{1}".format(str(self._counter),
                               format_vars(flattened_config))
        trial = create_trial_from_spec(
            spec,
            output_path,
            self._parser,
            evaluated_params=flatten_dict(suggested_config),
            experiment_tag=tag,
            trial_id=trial_id,
        )
        return trial
Ejemplo n.º 2
0
    def _generate_next_trials(self):
        self._next_trials = []

        if self._unfinished_count > 0:
            # Last round not finished
            return

        trials = []
        raw_param_list, extra_arg_list = self._select()
        if not extra_arg_list:
            extra_arg_list = [None] * len(raw_param_list)

        for exp in self.experiment_list:
            for param_config, extra_arg in zip(raw_param_list, extra_arg_list):
                tag = ""
                new_spec = copy.deepcopy(exp.spec)
                for path, value in param_config.items():
                    tag += "%s=%s-" % (path.split(".")[-1], value)
                    deep_insert(path.split("."), value, new_spec["config"])

                trial = create_trial_from_spec(new_spec,
                                               exp.dir_name,
                                               self._parser,
                                               experiment_tag=tag)

                # AutoML specific fields set in Trial
                trial.results = []
                trial.best_result = None
                trial.param_config = param_config
                trial.extra_arg = extra_arg

                trial.invalidate_json_state()

                trials.append(trial)
                self._running_trials[trial.trial_id] = trial

        ntrial = len(trials)
        self._iteration += 1
        self._unfinished_count = ntrial
        self._total_trial_num += ntrial
        self._start_ts = time.time()
        logger.info(
            "=========== BEGIN Experiment-Round: %(round)s "
            "[%(new)s NEW | %(total)s TOTAL] ===========",
            {
                "round": self._iteration,
                "new": ntrial,
                "total": self._total_trial_num
            },
        )
        self._next_trials = trials
Ejemplo n.º 3
0
 def create_trial(self, resolved_vars, spec):
     trial_id = self.uuid_prefix + ("%05d" % self.counter)
     experiment_tag = str(self.counter)
     # Always append resolved vars to experiment tag?
     if resolved_vars:
         experiment_tag += "_{}".format(format_vars(resolved_vars))
     self.counter += 1
     return create_trial_from_spec(
         spec,
         self.output_path,
         self.parser,
         evaluated_params=flatten_resolved_vars(resolved_vars),
         trial_id=trial_id,
         experiment_tag=experiment_tag,
     )