def main(early_stopping,optimizer_class_o,problem_o,num_evals_o,random_seed_o,path_o): config.set_early_stopping(early_stopping) optimizer_class = OPTIM_MAP[optimizer_class_o] hyperparams = OPTIM_PARAMETERS[optimizer_class_o] sampler = OPTIM_SAMPLERS_TUNED_PRIOR[optimizer_class_o] runner = StandardRunner if optimizer_class_o in ['sgdmcwclr', 'adamwclrdecay']: optimizer_class.set_max_epochs(config.get_testproblem_default_setting(problem_o)['num_epochs']) runner = LearningRateScheduleRunner tuner = RandomSearch(optimizer_class, hyperparams, sampler, runner=runner, ressources=num_evals_o) tuner.tune(problem_o, rerun_best_setting=False, output_dir=path_o, random_seed=random_seed_o, weight_decay=0)
def main(): args = parse_arguments() # config.set_framework('pytorch') config.set_early_stopping(args.early_stopping) optimizer_class = OPTIM_MAP[args.optim] hyperparams = OPTIM_PARAMETERS[args.optim] sampler = OPTIM_SAMPLERS_TUNED_PRIOR[args.optim] runner = StandardRunner if args.optim in ['sgdmcwclr', 'adamwclrdecay']: optimizer_class.set_max_epochs( config.get_testproblem_default_setting(args.problem)['num_epochs']) runner = LearningRateScheduleRunner tuner = RandomSearch(optimizer_class, hyperparams, sampler, runner=runner, ressources=args.num_evals) tuner.tune(args.problem, rerun_best_setting=False, output_dir=args.log_path, random_seed=args.random_seed, weight_decay=0)
def init_default_problem_params(testproblem): defaults = {} tesproblem_default = config.get_testproblem_default_setting(testproblem) defaults[testproblem] = tesproblem_default return defaults
def _use_default(testproblem, key): return global_config.get_testproblem_default_setting(testproblem)[key]