def test_convergence_gaussian_process(self): np.random.seed(0) ray.init(local_mode=True, num_cpus=1, num_gpus=1) # This is the space of parameters to explore space = {"x": tune.uniform(0, 20)} resources_per_trial = {"cpu": 1, "gpu": 0} # Following bayesian optimization gp = BayesOptSearch(random_search_steps=10) gp.repeat_float_precision = 5 gp = ConcurrencyLimiter(gp, 1) # Execution of the BO. analysis = tune.run( loss, metric="loss", mode="min", # stop=EarlyStopping("loss", mode="min", patience=5), search_alg=gp, config=space, num_samples=100, # Number of iterations resources_per_trial=resources_per_trial, raise_on_failed_trial=False, fail_fast=True, verbose=1) assert len(analysis.trials) in {13, 40, 43} # it is 43 on the cluster? assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-5)
def test_convergence_gaussian_process(self): np.random.seed(0) ray.init(local_mode=True, num_cpus=1, num_gpus=1) space = { "x": (0, 20) # This is the space of parameters to explore } resources_per_trial = {"cpu": 1, "gpu": 0} # Following bayesian optimization gp = BayesOptSearch(space, metric="loss", mode="min", random_search_steps=10) gp.repeat_float_precision = 5 gp = ConcurrencyLimiter(gp, 1) # Execution of the BO. analysis = tune.run( loss, # stop=EarlyStopping("loss", mode="min", patience=5), search_alg=gp, config={}, num_samples=100, # Number of iterations resources_per_trial=resources_per_trial, raise_on_failed_trial=False, fail_fast=True, verbose=1) assert len(analysis.trials) == 41 ray.shutdown()
def testConvergenceBayesOpt(self): from ray.tune.suggest.bayesopt import BayesOptSearch np.random.seed(0) # Following bayesian optimization searcher = BayesOptSearch(random_search_steps=10) searcher.repeat_float_precision = 5 searcher = ConcurrencyLimiter(searcher, 1) analysis = self._testConvergence(searcher, patience=100) assert len(analysis.trials) < 50 assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-5)