def testMaxConcurrentSuggestions(self): """Checks that next_trials() supports throttling.""" experiment_spec = { "run": "PPO", "num_samples": 6, } experiments = [Experiment.from_json("test", experiment_spec)] searcher = _MockSuggestionAlgorithm(max_concurrent=4) searcher.add_configurations(experiments) trials = searcher.next_trials() self.assertEqual(len(trials), 4) self.assertEqual(searcher.next_trials(), []) finished_trial = trials.pop() searcher.on_trial_complete(finished_trial.trial_id) self.assertEqual(len(searcher.next_trials()), 1) finished_trial = trials.pop() searcher.on_trial_complete(finished_trial.trial_id) finished_trial = trials.pop() searcher.on_trial_complete(finished_trial.trial_id) finished_trial = trials.pop() searcher.on_trial_complete(finished_trial.trial_id) self.assertEqual(len(searcher.next_trials()), 1) self.assertEqual(len(searcher.next_trials()), 0)
def testSearchAlgSchedulerEarlyStop(self): """Early termination notif to Searcher can be turned off.""" class _MockScheduler(FIFOScheduler): def on_trial_result(self, *args, **kwargs): return TrialScheduler.STOP ray.init(num_cpus=4, num_gpus=2) experiment_spec = {"run": "__fake", "stop": {"training_iteration": 2}} experiments = [Experiment.from_json("test", experiment_spec)] searcher = _MockSuggestionAlgorithm(use_early_stopped_trials=True) searcher.add_configurations(experiments) runner = TrialRunner(search_alg=searcher, scheduler=_MockScheduler()) runner.step() runner.step() self.assertEqual(len(searcher.final_results), 1) searcher = _MockSuggestionAlgorithm(use_early_stopped_trials=False) searcher.add_configurations(experiments) runner = TrialRunner(search_alg=searcher, scheduler=_MockScheduler()) runner.step() runner.step() self.assertEqual(len(searcher.final_results), 0)
def testSearchAlgStalled(self): """Checks that runner and searcher state is maintained when stalled.""" ray.init(num_cpus=4, num_gpus=2) experiment_spec = { "run": "__fake", "num_samples": 3, "stop": { "training_iteration": 1 } } experiments = [Experiment.from_json("test", experiment_spec)] search_alg = _MockSuggestionAlgorithm(max_concurrent=1) search_alg.add_configurations(experiments) searcher = search_alg.searcher runner = TrialRunner(search_alg=search_alg) runner.step() trials = runner.get_trials() self.assertEqual(trials[0].status, Trial.RUNNING) runner.step() self.assertEqual(trials[0].status, Trial.TERMINATED) trials = runner.get_trials() runner.step() self.assertEqual(trials[1].status, Trial.RUNNING) self.assertEqual(len(searcher.live_trials), 1) searcher.stall = True runner.step() self.assertEqual(trials[1].status, Trial.TERMINATED) self.assertEqual(len(searcher.live_trials), 0) self.assertTrue(all(trial.is_finished() for trial in trials)) self.assertFalse(search_alg.is_finished()) self.assertFalse(runner.is_finished()) searcher.stall = False runner.step() trials = runner.get_trials() self.assertEqual(trials[2].status, Trial.RUNNING) self.assertEqual(len(searcher.live_trials), 1) runner.step() self.assertEqual(trials[2].status, Trial.TERMINATED) self.assertEqual(len(searcher.live_trials), 0) self.assertTrue(search_alg.is_finished()) self.assertTrue(runner.is_finished())
def testNestedResults(self): def create_result(i): return {"test": {"1": {"2": {"3": i, "4": False}}}} flattened_keys = list(flatten_dict(create_result(0))) class _MockScheduler(FIFOScheduler): results = [] def on_trial_result(self, trial_runner, trial, result): self.results += [result] return TrialScheduler.CONTINUE def on_trial_complete(self, trial_runner, trial, result): self.complete_result = result def train(config, reporter): for i in range(100): reporter(**create_result(i)) algo = _MockSuggestionAlgorithm() scheduler = _MockScheduler() [trial] = tune.run( train, scheduler=scheduler, search_alg=algo, stop={ "test/1/2/3": 20 }).trials self.assertEqual(trial.status, Trial.TERMINATED) self.assertEqual(trial.last_result["test"]["1"]["2"]["3"], 20) self.assertEqual(trial.last_result["test"]["1"]["2"]["4"], False) self.assertEqual(trial.last_result[TRAINING_ITERATION], 21) self.assertEqual(len(scheduler.results), 20) self.assertTrue( all( set(result) >= set(flattened_keys) for result in scheduler.results)) self.assertTrue(set(scheduler.complete_result) >= set(flattened_keys)) self.assertEqual(len(algo.results), 20) self.assertTrue( all(set(result) >= set(flattened_keys) for result in algo.results)) with self.assertRaises(TuneError): [trial] = tune.run(train, stop={"1/2/3": 20}) with self.assertRaises(TuneError): [trial] = tune.run(train, stop={"test": 1}).trials
def testSearchAlgFinished(self): """Checks that SearchAlg is Finished before all trials are done.""" ray.init(num_cpus=4, num_gpus=2) experiment_spec = {"run": "__fake", "stop": {"training_iteration": 1}} experiments = [Experiment.from_json("test", experiment_spec)] searcher = _MockSuggestionAlgorithm(experiments, max_concurrent=10) runner = TrialRunner(search_alg=searcher) runner.step() trials = runner.get_trials() self.assertEqual(trials[0].status, Trial.RUNNING) self.assertTrue(searcher.is_finished()) self.assertFalse(runner.is_finished()) runner.step() self.assertEqual(trials[0].status, Trial.TERMINATED) self.assertEqual(len(searcher.live_trials), 0) self.assertTrue(searcher.is_finished()) self.assertTrue(runner.is_finished())
def testSearchAlgNotification(self): """Checks notification of trial to the Search Algorithm.""" ray.init(num_cpus=4, num_gpus=2) experiment_spec = {"run": "__fake", "stop": {"training_iteration": 2}} experiments = [Experiment.from_json("test", experiment_spec)] searcher = _MockSuggestionAlgorithm(experiments, max_concurrent=10) runner = TrialRunner(search_alg=searcher) runner.step() trials = runner.get_trials() self.assertEqual(trials[0].status, Trial.RUNNING) runner.step() self.assertEqual(trials[0].status, Trial.RUNNING) runner.step() self.assertEqual(trials[0].status, Trial.TERMINATED) self.assertEqual(searcher.counter["result"], 1) self.assertEqual(searcher.counter["complete"], 1)
def testSearchAlgSchedulerInteraction(self): """Checks that TrialScheduler killing trial will notify SearchAlg.""" class _MockScheduler(FIFOScheduler): def on_trial_result(self, *args, **kwargs): return TrialScheduler.STOP ray.init(num_cpus=4, num_gpus=2) experiment_spec = {"run": "__fake", "stop": {"training_iteration": 2}} experiments = [Experiment.from_json("test", experiment_spec)] searcher = _MockSuggestionAlgorithm(experiments, max_concurrent=10) runner = TrialRunner(search_alg=searcher, scheduler=_MockScheduler()) runner.step() trials = runner.get_trials() self.assertEqual(trials[0].status, Trial.RUNNING) self.assertTrue(searcher.is_finished()) self.assertFalse(runner.is_finished()) runner.step() self.assertEqual(trials[0].status, Trial.TERMINATED) self.assertEqual(len(searcher.live_trials), 0) self.assertTrue(searcher.is_finished()) self.assertTrue(runner.is_finished())