Exemplo n.º 1
0
def _batch_trials(batch_run):
    proto_flag_vals = batch_run.batch_proto.get("flags")
    batch_run = batch_util.batch_run()
    max_trials = batch_run.get("max_trials") or DEFAULT_MAX_TRIALS
    random_seed = batch_run.get("random_seed")
    try:
        return skopt_util.random_trials_for_flags(proto_flag_vals, max_trials,
                                                  random_seed)
    except skopt_util.MissingSearchDimension as e:
        skopt_util.missing_search_dim_error(proto_flag_vals)
    except skopt_util.InvalidSearchDimension as e:
        _search_dim_error(e)
Exemplo n.º 2
0
def main():
    batch_util.init_logging()
    batch = batch_util.batch_run()
    batch_flags = batch.get("flags")
    max_trials = batch.get("max_trials") or DEFAULT_MAX_TRIALS
    random_seed = batch.get("random_seed")
    trials = hyperopt.Trials()
    try:
        hyperopt.fmin(
            fn=_objective_fn(batch),
            space=_space_for_flags(batch),
            algo=_tpe_suggest,
            max_evals=max_trials,
            show_progressbar=False,
            rstate=np.random.RandomState(random_seed),
            trials=trials,
        )
    except hyperopt.exceptions.AllTrialsFailed:
        pass
    else:
        _label_best(trials.best_trial)
Exemplo n.º 3
0
def main():
    batch_util.init_logging()
    batch_run = batch_util.batch_run()
    skopt_util.handle_seq_trials(batch_run, _suggest_x)
Exemplo n.º 4
0
def main():
    batch_util.init_logging()
    batch_run = batch_util.batch_run()
    trials = _batch_trials(batch_run)
    batch_util.handle_trials(batch_run, trials)
Exemplo n.º 5
0
from guild import batch_util

try:
    batch_run = batch_util.batch_run()
except batch_util.CurrentRunNotBatchError:
    print("This script must be run as a Guild optimizer")
else:
    proto_flags = batch_run.batch_proto.get("flags", {})
    print("Tune using proto flags: %s" % sorted(proto_flags.items()))