def _test_roberta(method='BlendSearch'): max_num_epoch = 100 num_samples = -1 time_budget_s = 3600 search_space = { # You can mix constants with search space objects. "num_train_epochs": flaml.tune.loguniform(1, max_num_epoch), "learning_rate": flaml.tune.loguniform(1e-5, 3e-5), "weight_decay": flaml.tune.uniform(0, 0.3), "per_device_train_batch_size": flaml.tune.choice([16, 32, 64, 128]), "seed": flaml.tune.choice([12, 22, 33, 42]), } start_time = time.time() ray.init(num_cpus=4, num_gpus=4) if 'ASHA' == method: algo = None elif 'BOHB' == method: from ray.tune.schedulers import HyperBandForBOHB from ray.tune.suggest.bohb import tuneBOHB algo = tuneBOHB(max_concurrent=4) scheduler = HyperBandForBOHB(max_t=max_num_epoch) elif 'Optuna' == method: from ray.tune.suggest.optuna import OptunaSearch algo = OptunaSearch() elif 'CFO' == method: from flaml import CFO algo = CFO(points_to_evaluate=[{ "num_train_epochs": 1, "per_device_train_batch_size": 128, }]) elif 'BlendSearch' == method: from flaml import BlendSearch algo = BlendSearch( points_to_evaluate=[{ "num_train_epochs": 1, "per_device_train_batch_size": 128, }]) elif 'Dragonfly' == method: from ray.tune.suggest.dragonfly import DragonflySearch algo = DragonflySearch() elif 'SkOpt' == method: from ray.tune.suggest.skopt import SkOptSearch algo = SkOptSearch() elif 'Nevergrad' == method: from ray.tune.suggest.nevergrad import NevergradSearch import nevergrad as ng algo = NevergradSearch(optimizer=ng.optimizers.OnePlusOne) elif 'ZOOpt' == method: from ray.tune.suggest.zoopt import ZOOptSearch algo = ZOOptSearch(budget=num_samples) elif 'Ax' == method: from ray.tune.suggest.ax import AxSearch algo = AxSearch(max_concurrent=3) elif 'HyperOpt' == method: from ray.tune.suggest.hyperopt import HyperOptSearch algo = HyperOptSearch() scheduler = None if method != 'BOHB': from ray.tune.schedulers import ASHAScheduler scheduler = ASHAScheduler(max_t=max_num_epoch, grace_period=1) scheduler = None analysis = ray.tune.run(train_roberta, metric=HP_METRIC, mode=MODE, resources_per_trial={ "gpu": 4, "cpu": 4 }, config=search_space, local_dir='logs/', num_samples=num_samples, time_budget_s=time_budget_s, keep_checkpoints_num=1, checkpoint_score_attr=HP_METRIC, scheduler=scheduler, search_alg=algo) ray.shutdown() best_trial = analysis.get_best_trial(HP_METRIC, MODE, "all") metric = best_trial.metric_analysis[HP_METRIC][MODE] logger.info(f"method={method}") logger.info(f"n_trials={len(analysis.trials)}") logger.info(f"time={time.time()-start_time}") logger.info(f"Best model eval {HP_METRIC}: {metric:.4f}") logger.info(f"Best model parameters: {best_trial.config}")
def _test_distillbert(method='BlendSearch'): max_num_epoch = 64 num_samples = -1 time_budget_s = 10800 search_space = { # You can mix constants with search space objects. "num_train_epochs": flaml.tune.loguniform(1, max_num_epoch), "learning_rate": flaml.tune.loguniform(1e-6, 1e-4), "adam_beta1": flaml.tune.uniform(0.8, 0.99), "adam_beta2": flaml.tune.loguniform(98e-2, 9999e-4), "adam_epsilon": flaml.tune.loguniform(1e-9, 1e-7), } start_time = time.time() ray.init(num_cpus=4, num_gpus=4) if 'ASHA' == method: algo = None elif 'BOHB' == method: from ray.tune.schedulers import HyperBandForBOHB from ray.tune.suggest.bohb import tuneBOHB algo = tuneBOHB(max_concurrent=4) scheduler = HyperBandForBOHB(max_t=max_num_epoch) elif 'Optuna' == method: from ray.tune.suggest.optuna import OptunaSearch algo = OptunaSearch() elif 'CFO' == method: from flaml import CFO algo = CFO(points_to_evaluate=[{ "num_train_epochs": 1, }]) elif 'BlendSearch' == method: from flaml import BlendSearch algo = BlendSearch(points_to_evaluate=[{ "num_train_epochs": 1, }]) elif 'Dragonfly' == method: from ray.tune.suggest.dragonfly import DragonflySearch algo = DragonflySearch() elif 'SkOpt' == method: from ray.tune.suggest.skopt import SkOptSearch algo = SkOptSearch() elif 'Nevergrad' == method: from ray.tune.suggest.nevergrad import NevergradSearch import nevergrad as ng algo = NevergradSearch(optimizer=ng.optimizers.OnePlusOne) elif 'ZOOpt' == method: from ray.tune.suggest.zoopt import ZOOptSearch algo = ZOOptSearch(budget=num_samples) elif 'Ax' == method: from ray.tune.suggest.ax import AxSearch algo = AxSearch() elif 'HyperOpt' == method: from ray.tune.suggest.hyperopt import HyperOptSearch algo = HyperOptSearch() scheduler = None if method != 'BOHB': from ray.tune.schedulers import ASHAScheduler scheduler = ASHAScheduler(max_t=max_num_epoch, grace_period=1) scheduler = None analysis = ray.tune.run( train_distilbert, metric=HP_METRIC, mode=MODE, # You can add "gpu": 1 to allocate GPUs resources_per_trial={"gpu": 1}, config=search_space, local_dir='test/logs/', num_samples=num_samples, time_budget_s=time_budget_s, keep_checkpoints_num=1, checkpoint_score_attr=HP_METRIC, scheduler=scheduler, search_alg=algo) ray.shutdown() best_trial = analysis.get_best_trial(HP_METRIC, MODE, "all") metric = best_trial.metric_analysis[HP_METRIC][MODE] logger.info(f"method={method}") logger.info(f"n_trials={len(analysis.trials)}") logger.info(f"time={time.time()-start_time}") logger.info(f"Best model eval {HP_METRIC}: {metric:.4f}") logger.info(f"Best model parameters: {best_trial.config}")