def set_basic_conf(self): from optuna.samplers import TPESampler space = [ ot_param.suggest_uniform("width", 0, 20), ot_param.suggest_uniform("height", -100, 100) ] def cost(space, reporter): reporter(loss=(space["height"] - 14)**2 - abs(space["width"] - 3)) search_alg = OptunaSearch( space, sampler=TPESampler(seed=10), metric="loss", mode="min") return search_alg, cost
def _get_optuna_params(self): from ray.tune.suggest.optuna import param config_space = [] for param_name, space in self.param_distributions.items(): prior = "uniform" param_name = str(param_name) if isinstance(space, tuple) and len(space) >= 2 and len(space) <= 3: try: low = float(space[0]) high = float(space[1]) except Exception: raise ValueError( "low and high need to be of type float, " f"are of type {type(low)} and {type(high)}") from None if len(space) == 3: prior = space[2] if prior not in ["uniform", "log-uniform"]: raise ValueError( "prior needs to be either " f"'uniform' or 'log-uniform', was {prior}") if prior == "log-uniform": config_space.append( param.suggest_loguniform(param_name, low, high)) else: config_space.append( param.suggest_uniform(param_name, low, high)) elif isinstance(space, list): config_space.append( param.suggest_categorical(param_name, space)) else: config_space.append(space) return config_space
# Feed the score back back to Tune. tune.report(iterations=step, mean_loss=intermediate_score) time.sleep(0.1) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument( "--smoke-test", action="store_true", help="Finish quickly for testing") args, _ = parser.parse_known_args() ray.init(configure_logging=False) space = [ param.suggest_uniform("width", 0, 20), param.suggest_uniform("height", -100, 100), # This is an ignored parameter. param.suggest_categorical("activation", ["relu", "tanh"]) ] config = { "num_samples": 10 if args.smoke_test else 100, "config": { "steps": 100, } } algo = OptunaSearch(space, metric="mean_loss", mode="min") scheduler = AsyncHyperBandScheduler(metric="mean_loss", mode="min") tune.run(easy_objective, search_alg=algo, scheduler=scheduler, **config)