def missing_search_dim_error(flag_vals): log.error( "flags for batch (%s) do not contain any search dimensions\n" "Try specifying a range for one or more flags as NAME=[MIN:MAX].", op_util.flags_desc(flag_vals), ) raise SystemExit(1)
def _check_state_dims(state): if not state.dim_names: log.error( "flags for batch (%s) do not contain any search " "dimension - quitting", op_util.flags_desc(state.proto_flags), ) raise batch_util.StopBatch(error=True)
def __call__(self, trial, state): _check_state_dims(state) next_trial_flags = self.seq_trial_cb(trial, state) for run in trial.batch.seq_trial_runs(): if next_trial_flags == run.get("flags"): log.warning("optimizer repeated trial (%s) - using random", op_util.flags_desc(next_trial_flags)) next_trial_flags = self._random_trial_flags(state) return next_trial_flags, {}
def __call__(self): runs = [] results = [] trials = self.gen_trials(self.flags, runs, **self.opts) for trial_flags, trial_opts in trials: print("Running %s (%s):" % (self.op.__name__, op_util.flags_desc(trial_flags))) run, result = _run(self.op, trial_flags, trial_opts) runs.append(run) results.append(result) return runs, results
def _row_for_print_run(run, flags, labels, status): from guild.commands import runs_impl fmt_run = runs_impl.format_run(run) row = {"opspec": fmt_run["op_desc"]} if flags: flag_vals = run.get("flags") or {} row["flags"] = op_util.flags_desc(flag_vals, delim=" ") if labels: row["label"] = run.get("label", "") if status: row["status"] = run.status return row
def __call__(self): runs = [] results = [] prev_results_cb = lambda: (runs, results) for trial in self.gen_trials(self.flag_vals, prev_results_cb, **self.opts): trial_flag_vals, trial_attrs = _split_gen_trial(trial) print("Running %s (%s):" % (self.op.__name__, op_util.flags_desc(trial_flag_vals))) run, result = _run(self.op, trial_flag_vals, self.opts, trial_attrs) runs.append(run) results.append(result) return runs, results
def gen_trials(flags, _runs, max_trials=None, random_seed=None, label=None, **kw): """Public interface for ipy.""" if kw: log.warning("ignoring configuration %s", kw) num_trials = max_trials or DEFAULT_TRIALS dim_names, dims, _initials = skopt_util.flag_dims(flags) if not dim_names: log.error( "flags for batch (%s) do not contain any search " "dimension - quitting", op_util.flags_desc(flags)) raise batch_util.StopBatch(error=True) trial_vals = _gen_trial_vals(dims, num_trials, random_seed) trial_opts = {"label": label or "random"} return [(_trial_flags(dim_names, dim_vals, flags), trial_opts) for dim_vals in trial_vals]
def _trial_flags_desc(run): flags = { name: val for name, val in (run.get("flags") or {}).items() if val is not None } return op_util.flags_desc(flags)
def _flags_desc(self): return op_util.flags_desc(self.flags)