def testCFO(self): from ray.tune.suggest.flaml import CFO searcher = CFO(space=self.config, metric=self.metric_name, mode="max") self._save(searcher) searcher = CFO(space=self.config, metric=self.metric_name, mode="max") self._restore(searcher)
def testCFO(self): self.skipTest( "Broken in FLAML, reenable once " "https://github.com/microsoft/FLAML/pull/263 is merged" ) from ray.tune.suggest.flaml import CFO out = tune.run( _invalid_objective, search_alg=CFO( points_to_evaluate=[ {"report": 1.0}, {"report": 2.1}, {"report": 3.1}, {"report": 4.1}, ] ), config=self.config, metric="_metric", mode="max", num_samples=16, reuse_actors=False, ) best_trial = out.best_trial self.assertLessEqual(best_trial.config["report"], 2.0)
def testConvergenceCFO(self): from ray.tune.suggest.flaml import CFO np.random.seed(0) searcher = CFO() analysis = self._testConvergence(searcher, patience=200) assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-2)
def testCFO(self): from ray.tune.suggest.flaml import CFO out = tune.run(_invalid_objective, search_alg=CFO(), config=self.config, metric="_metric", mode="max", num_samples=16, reuse_actors=False) best_trial = out.best_trial self.assertLessEqual(best_trial.config["report"], 2.0)
def set_basic_conf(self): space = { "height": tune.uniform(-100, 100), "width": tune.randint(0, 100), } def cost(param, reporter): reporter(loss=(param["height"] - 14)**2 - abs(param["width"] - 3)) search_alg = CFO( space=space, metric="loss", mode="min", seed=20, ) return search_alg, cost
def run_cfo_tune(smoke_test=False): algo = CFO() algo = ConcurrencyLimiter(algo, max_concurrent=4) scheduler = AsyncHyperBandScheduler() analysis = tune.run( easy_objective, metric="mean_loss", mode="min", search_alg=algo, scheduler=scheduler, num_samples=10 if smoke_test else 100, config={ "steps": 100, "width": tune.uniform(0, 20), "height": tune.uniform(-100, 100), # This is an ignored parameter. "activation": tune.choice(["relu", "tanh"]) }) print("Best hyperparameters found were: ", analysis.best_config)