Ejemplo n.º 1
0
 def _get_search_algorithm(
     self, search_algorithm, config_space, metric, mode, max_concurrent):
     if search_algorithm == "BO":
         algo = BayesOptSearch(
             utility_kwargs={
             "kind": "ucb",
             "kappa": 2.5,
             "xi": 0.0
         })
         algo = ConcurrencyLimiter(algo, max_concurrent=max_concurrent)
         scheduler = AsyncHyperBandScheduler()
     elif search_algorithm == "BOHB":
         experiment_metrics = dict(metric=metric, mode=mode)
         algo = TuneBOHB(
             config_space, max_concurrent=max_concurrent, **experiment_metrics)
         scheduler = HyperBandForBOHB(
             time_attr="training_iteration",
             reduction_factor=4)
     elif search_algorithm == "PBT":
         # Problem of PBT: It mutates the param value, so sometimes, it generates unacceptable values
         algo = None
         scheduler = PopulationBasedTraining(
             time_attr='training_iteration',
             perturbation_interval=2,  # Every N time_attr units, "perturb" the parameters.
             hyperparam_mutations=config_space)
     elif search_algorithm == "GRID" or search_algorithm == "RANDOM":
         algo = None
         scheduler = None
     else:
         raise Exception(search_algorithm, "is not available yet")
     return algo, scheduler
Ejemplo n.º 2
0
def main(args):
    cfg = setup(args)

    search_space = CS.ConfigurationSpace()
    search_space.add_hyperparameters([
        CS.UniformFloatHyperparameter(name="lr", lower=1e-6, upper=1e-3),
        CS.UniformFloatHyperparameter(name="wd", lower=0, upper=1e-3),
        CS.UniformFloatHyperparameter(name="wd_bias", lower=0, upper=1e-3),
        CS.CategoricalHyperparameter(name="bsz",
                                     choices=[64, 96, 128, 160, 224, 256]),
        CS.CategoricalHyperparameter(name="num_inst",
                                     choices=[2, 4, 8, 16, 32]),
        CS.UniformIntegerHyperparameter(name="delay_iters", lower=20,
                                        upper=60),
        CS.UniformFloatHyperparameter(name="ce_scale", lower=0.1, upper=1.0),
        CS.UniformIntegerHyperparameter(name="circle_scale",
                                        lower=8,
                                        upper=256),
        CS.UniformFloatHyperparameter(name="circle_margin",
                                      lower=0.1,
                                      upper=0.5),
        CS.CategoricalHyperparameter(name="autoaug_enabled",
                                     choices=[True, False]),
        CS.CategoricalHyperparameter(name="cj_enabled", choices=[True, False]),
    ])

    exp_metrics = dict(metric="score", mode="max")
    bohb_hyperband = HyperBandForBOHB(
        time_attr="training_iteration",
        max_t=7,
        **exp_metrics,
    )
    bohb_search = TuneBOHB(search_space, max_concurrent=4, **exp_metrics)

    reporter = CLIReporter(parameter_columns=["bsz", "num_inst", "lr"],
                           metric_columns=["r1", "map", "training_iteration"])

    analysis = tune.run(partial(train_reid_tune, cfg),
                        resources_per_trial={
                            "cpu": 10,
                            "gpu": 1
                        },
                        search_alg=bohb_search,
                        num_samples=args.num_samples,
                        scheduler=bohb_hyperband,
                        progress_reporter=reporter,
                        local_dir=cfg.OUTPUT_DIR,
                        keep_checkpoints_num=4,
                        name="bohb")

    best_trial = analysis.get_best_trial("map", "max", "last")
    logger.info("Best trial config: {}".format(best_trial.config))
    logger.info("Best trial final validation mAP: {}, Rank-1: {}".format(
        best_trial.last_result["map"], best_trial.last_result["r1"]))
Ejemplo n.º 3
0
def setup_tune_scheduler():
    config_space = workload.create_ch()

    experiment_metrics = workload.exp_metric()
    bohb_hyperband = HyperBandForBOHB(time_attr="training_iteration",
                                      max_t=243,
                                      reduction_factor=3,
                                      **experiment_metrics)
    bohb_search = TuneBOHB(config_space, **experiment_metrics)

    return dict(
        scheduler=bohb_hyperband,
        search_alg=bohb_search,
        resources_per_trial=com.detect_baseline_resource(),
    )
Ejemplo n.º 4
0
        "height": tune.uniform(-100, 100),
        "activation": tune.choice(["relu", "tanh"])
    }

    # Optional: Pass the parameter space yourself
    # config_space = CS.ConfigurationSpace()
    # config_space.add_hyperparameter(
    #     CS.UniformFloatHyperparameter("width", lower=0, upper=20))
    # config_space.add_hyperparameter(
    #     CS.UniformFloatHyperparameter("height", lower=-100, upper=100))
    # config_space.add_hyperparameter(
    #     CS.CategoricalHyperparameter(
    #         "activation", choices=["relu", "tanh"]))

    bohb_hyperband = HyperBandForBOHB(time_attr="training_iteration",
                                      max_t=100,
                                      reduction_factor=4)

    bohb_search = TuneBOHB(
        # space=config_space,  # If you want to set the space manually
        max_concurrent=4)

    analysis = tune.run(MyTrainableClass,
                        name="bohb_test",
                        config=config,
                        scheduler=bohb_hyperband,
                        search_alg=bohb_search,
                        num_samples=10,
                        stop={"training_iteration": 100},
                        metric="episode_reward_mean",
                        mode="max")
Ejemplo n.º 5
0
def main(args):
    cfg = setup(args)

    exp_metrics = dict(metric="score", mode="max")

    if args.srch_algo == "hyperopt":
        # Create a HyperOpt search space
        search_space = {
            # "lr": hp.loguniform("lr", np.log(1e-6), np.log(1e-3)),
            # "delay_epochs": hp.randint("delay_epochs", 20, 60),
            # "wd": hp.uniform("wd", 0, 1e-3),
            # "wd_bias": hp.uniform("wd_bias", 0, 1e-3),
            "bsz": hp.choice("bsz", [64, 96, 128, 160, 224, 256]),
            "num_inst": hp.choice("num_inst", [2, 4, 8, 16, 32]),
            # "ce_scale": hp.uniform("ce_scale", 0.1, 1.0),
            # "circle_scale": hp.choice("circle_scale", [16, 32, 64, 128, 256]),
            # "circle_margin": hp.uniform("circle_margin", 0, 1) * 0.4 + 0.1,
        }

        current_best_params = [{
            "bsz": 0,  # index of hp.choice list
            "num_inst": 3,
        }]

        search_algo = HyperOptSearch(search_space,
                                     points_to_evaluate=current_best_params,
                                     **exp_metrics)

        if args.pbt:
            scheduler = PopulationBasedTraining(
                time_attr="training_iteration",
                **exp_metrics,
                perturbation_interval=2,
                hyperparam_mutations={
                    "bsz": [64, 96, 128, 160, 224, 256],
                    "num_inst": [2, 4, 8, 16, 32],
                })
        else:
            scheduler = ASHAScheduler(grace_period=2,
                                      reduction_factor=3,
                                      max_t=7,
                                      **exp_metrics)

    elif args.srch_algo == "bohb":
        search_space = CS.ConfigurationSpace()
        search_space.add_hyperparameters([
            # CS.UniformFloatHyperparameter(name="lr", lower=1e-6, upper=1e-3, log=True),
            # CS.UniformIntegerHyperparameter(name="delay_epochs", lower=20, upper=60),
            # CS.UniformFloatHyperparameter(name="ce_scale", lower=0.1, upper=1.0),
            # CS.UniformIntegerHyperparameter(name="circle_scale", lower=8, upper=128),
            # CS.UniformFloatHyperparameter(name="circle_margin", lower=0.1, upper=0.5),
            # CS.UniformFloatHyperparameter(name="wd", lower=0, upper=1e-3),
            # CS.UniformFloatHyperparameter(name="wd_bias", lower=0, upper=1e-3),
            CS.CategoricalHyperparameter(name="bsz",
                                         choices=[64, 96, 128, 160, 224, 256]),
            CS.CategoricalHyperparameter(name="num_inst",
                                         choices=[2, 4, 8, 16, 32]),
            # CS.CategoricalHyperparameter(name="autoaug_enabled", choices=[True, False]),
            # CS.CategoricalHyperparameter(name="cj_enabled", choices=[True, False]),
        ])

        search_algo = TuneBOHB(search_space, max_concurrent=4, **exp_metrics)

        scheduler = HyperBandForBOHB(
            time_attr="training_iteration",
            reduction_factor=3,
            max_t=7,
            **exp_metrics,
        )

    else:
        raise ValueError(
            "Search algorithm must be chosen from [hyperopt, bohb], but got {}"
            .format(args.srch_algo))

    reporter = CLIReporter(parameter_columns=["bsz", "num_inst"],
                           metric_columns=["r1", "map", "training_iteration"])

    analysis = tune.run(partial(train_tuner, cfg=cfg),
                        resources_per_trial={
                            "cpu": 4,
                            "gpu": 1
                        },
                        search_alg=search_algo,
                        num_samples=args.num_trials,
                        scheduler=scheduler,
                        progress_reporter=reporter,
                        local_dir=cfg.OUTPUT_DIR,
                        keep_checkpoints_num=10,
                        name=args.srch_algo)

    best_trial = analysis.get_best_trial("score", "max", "last")
    logger.info("Best trial config: {}".format(best_trial.config))
    logger.info("Best trial final validation mAP: {}, Rank-1: {}".format(
        best_trial.last_result["map"], best_trial.last_result["r1"]))

    save_dict = dict(R1=best_trial.last_result["r1"].item(),
                     mAP=best_trial.last_result["map"].item())
    save_dict.update(best_trial.config)
    path = os.path.join(cfg.OUTPUT_DIR, "best_config.yaml")
    with PathManager.open(path, "w") as f:
        f.write(CfgNode(save_dict).dump())
    logger.info("Best config saved to {}".format(os.path.abspath(path)))
Ejemplo n.º 6
0
    }

    # Optional: Pass the parameter space yourself
    # import ConfigSpace as CS
    # config_space = CS.ConfigurationSpace()
    # config_space.add_hyperparameter(
    #     CS.UniformFloatHyperparameter("width", lower=0, upper=20))
    # config_space.add_hyperparameter(
    #     CS.UniformFloatHyperparameter("height", lower=-100, upper=100))
    # config_space.add_hyperparameter(
    #     CS.CategoricalHyperparameter(
    #         "activation", choices=["relu", "tanh"]))

    bohb_hyperband = HyperBandForBOHB(
        time_attr="training_iteration",
        max_t=100,
        reduction_factor=4,
        stop_last_trials=False,
    )

    bohb_search = TuneBOHB(
        # space=config_space,  # If you want to set the space manually
    )
    bohb_search = tune.search.ConcurrencyLimiter(bohb_search, max_concurrent=4)

    analysis = tune.run(
        MyTrainableClass,
        name="bohb_test",
        config=config,
        scheduler=bohb_hyperband,
        search_alg=bohb_search,
        num_samples=10,
Ejemplo n.º 7
0
 def get_scheduler(self):
     return HyperBandForBOHB(max_t=10, metric="loss", mode="min")
Ejemplo n.º 8
0
def main(args):
    def trainable(config):
        print('begin a trial')
        args.params = tools.AttrDict(yaml.safe_load(args.params.replace('#', ',')))
        args.logdir = args.logdir and os.path.expanduser(args.logdir)
        print('debug ', config["divergence_scale"], config["reward_loss_scale"])
        with args.params.unlocked:
            args.params.divergence_scale = config["divergence_scale"]
            args.params.reward_loss_scale = config["reward_loss_scale"]
            # args.params.main_learning_rate = config["main_learning_rate"]
            args.params.test_steps = 50
            # args.params.num_units = config['num_units']
            args.params.test_traj = 5
        training.utility.set_up_logging()
        experiment = training.Experiment(
            args.logdir,
            process_fn=functools.partial(process, args=args),
            num_runs=args.num_runs,
            ping_every=args.ping_every,
            resume_runs=args.resume_runs)
        for run in experiment:
            for test_score in run:
                if test_score > 1.0:
                    tune.report(mean_score=test_score)
            break

    import ConfigSpace as CS
    import ConfigSpace.hyperparameters as CSH
    # search = {
    #     "divergence_scale": tune.quniform(1, 30, 1),
    #     "reward_loss_scale": tune.quniform(1, 50, 1),
    # }
    search = {
        "divergence_scale": tune.grid_search([0.1, 1, 2, 3, 5, 10]),
        "reward_loss_scale": tune.grid_search([1, 2, 5, 10, 20]),
    }
    config_space = CS.ConfigurationSpace(seed=1234)
    config_space.add_hyperparameter(
        CSH.UniformIntegerHyperparameter(name="divergence_scale", lower=1, upper=30))
    config_space.add_hyperparameter(
        CSH.UniformIntegerHyperparameter(name="reward_loss_scale", lower=1, upper=50))
    # config_space.add_hyperparameter(
    #     CSH.UniformFloatHyperparameter("main_learning_rate", lower=0.0001, upper=0.05, log=True))
    config_space.add_hyperparameter(
        CSH.UniformIntegerHyperparameter("main_learning_rate", lower=1, upper=500, log=True))
    config_space.add_hyperparameter(
        CSH.UniformIntegerHyperparameter(name="num_units", lower=150, upper=400, q=50))
    bayesopt = BayesOptSearch(metric="mean_loss", mode="min")
    bohb_hyperband = HyperBandForBOHB(metric="mean_score", mode="max", time_attr="training_iteration", max_t=30,
                                      reduction_factor=3)
    bohb_search = TuneBOHB(space=config_space, max_concurrent=1, metric="mean_score", mode="max")
    bayesopt = BayesOptSearch(max_concurrent=3, metric="mean_score", mode="max")
    asha = ASHAScheduler(metric="mean_score", mode="max", grace_period=6, reduction_factor=3)
    analysis = tune.run(
        trainable,
        config=search,
        num_samples=3,
        scheduler=asha,
        resources_per_trial={"cpu": 16, "gpu": 1},
        stop={"training_iteration": 13},
        # search_alg=bayesopt,
        log_to_file=True
    )
    df = analysis.results_df
    print("Best config: ", analysis.get_best_config(
        metric="mean_score", mode="min"))
    print(df)
Ejemplo n.º 9
0
Archivo: bohb.py Proyecto: AJSVB/GPBT
             for k, v in result.items() if k in self._csv_out.fieldnames})
        self._file.flush()


from ray.tune.logger import *
from ray import tune
from ray.tune.schedulers.hb_bohb import HyperBandForBOHB
from ray.tune.suggest.bohb import *

for _ in range(1):
    ray.shutdown()
    ray.init()
    start_time = time.time()
    algo = TuneBOHB(metric="loss", mode="min")
    bohb = HyperBandForBOHB(time_attr="training_iteration",
                            metric="loss",
                            mode="min",
                            max_t=46)

    analysis = tune.run(
        train_mnist_pb2,
        scheduler=bohb,
        reuse_actors=True,
        search_alg=algo,
        verbose=2,
        checkpoint_at_end=True,
        num_samples=36,
        # export_formats=[ExportFormat.MODEL],
        config={
            "lr": tune.loguniform(1e-8, .36),
            "drp": tune.uniform(.05, .15),
            "weight_decay": tune.loguniform(1e-4, 1e-2),
                # losses.OpticalLoss(),
                # losses.BinHeights(),
                # losses.AC(),
                # losses.ACLower(),
                # losses.Motication(),
                # losses.GaussACDC(),
            ],
        ))

    add(cat("max_validation_steps", [100]))
    add(cat("batches_per_step", [10]))


metric = dict(metric="validation_loss", mode="min")
scheduling = HyperBandForBOHB(time_attr="training_iteration",
                              max_t=500,
                              reduction_factor=3,
                              **metric)
search = TuneBOHB(config_space, **metric)

if __name__ == "__main__":
    #sys.exit(10)
    # ray.init(local_mode=True)  # use only one thread to make debugging easier
    analysis = tune.run(
        trainable.MultipackTrainable,
        name="MPT_long_run",
        scheduler=scheduling,
        search_alg=search,
        num_samples=1000,
        stop={"training_iteration": 500},
        max_failures=0,  # for debugging purposes
        resources_per_trial={