def test_get_ordered_experiments_by_metric(self, _): experiment_group = ExperimentGroupFactory() assert len( # pylint:disable=len-as-condition experiment_group.get_ordered_experiments_by_metric( experiment_ids=[], metric='precision', optimization='maximize' )) == 0 experiments = [] experiment_ids = [] for _ in range(5): experiment = ExperimentFactory(experiment_group=experiment_group) experiments.append(experiment) experiment_ids.append(experiment.id) ExperimentMetric.objects.create(experiment=experiment, values={'precision': random.random()}) for experiment in experiments[:3]: ExperimentMetric.objects.create(experiment=experiment, values={'loss': random.random()}) experiment_metrics = experiment_group.get_ordered_experiments_by_metric( experiment_ids=experiment_ids, metric='precision', optimization='maximize' ) assert len(experiment_metrics) == 5 metrics = [m.precision for m in experiment_metrics if m.precision is not None] assert len(metrics) == 2 assert sorted(metrics, reverse=True) == metrics experiment_metrics = experiment_group.get_ordered_experiments_by_metric( experiment_ids=experiment_ids, metric='loss', optimization='minimize' ) assert len(experiment_metrics) == 5 metrics = [m.loss for m in experiment_metrics if m.loss is not None] assert len(metrics) == 3 assert sorted(metrics) == metrics experiment_metrics = experiment_group.get_ordered_experiments_by_metric( experiment_ids=experiment_ids, metric='accuracy', optimization='maximize' ) assert len(experiment_metrics) == 5 # pylint:disable=len-as-condition assert len( # pylint:disable=len-as-condition [m for m in experiment_metrics if m.accuracy is not None]) == 0
def test_experiment_group_deletion_triggers_experiments_deletion(self): with patch('hpsearch.tasks.grid.hp_grid_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory() assert mock_fct.call_count == 1 assert Experiment.objects.filter(experiment_group=experiment_group).count() == 2 with patch('scheduler.experiment_scheduler.stop_experiment') as mock_fct: experiment_group.delete() assert mock_fct.call_count == 2 assert Experiment.objects.filter(experiment_group=experiment_group).count() == 0
def test_get_experiments_metrics(self, _): experiment_group = ExperimentGroupFactory() assert len(experiment_group.get_experiments_metrics( # pylint:disable=len-as-condition experiment_ids=[], metric='precision' )) == 0 experiments = [] experiment_ids = [] for _ in range(5): experiment = ExperimentFactory(experiment_group=experiment_group) experiments.append(experiment) experiment_ids.append(experiment.id) ExperimentMetric.objects.create(experiment=experiment, values={'precision': random.random()}) for experiment in experiments[:3]: ExperimentMetric.objects.create(experiment=experiment, values={'loss': random.random()}) experiment_metrics = experiment_group.get_experiments_metrics( experiment_ids=experiment_ids, metric='precision' ) assert len(experiment_metrics) == 5 metrics = [m[1] for m in experiment_metrics if m[1] is not None] assert len(metrics) == 2 experiment_metrics = experiment_group.get_experiments_metrics( experiment_ids=experiment_ids, metric='loss' ) assert len(experiment_metrics) == 5 metrics = [m[1] for m in experiment_metrics if m[1] is not None] assert len(metrics) == 3 experiment_metrics = experiment_group.get_experiments_metrics( experiment_ids=experiment_ids, metric='accuracy' ) assert len(experiment_metrics) == 5 assert len( # pylint:disable=len-as-condition [m for m in experiment_metrics if m[1] is not None]) == 0
def test_experiment_group_deletion_triggers_stopping_for_running_experiment(self): with patch('hpsearch.tasks.grid.hp_grid_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory() assert mock_fct.call_count == 1 experiment = ExperimentFactory(project=experiment_group.project, experiment_group=experiment_group) # Set this experiment to scheduled experiment.set_status(ExperimentLifeCycle.SCHEDULED) # Add job ExperimentJobFactory(experiment=experiment) assert Experiment.objects.filter(experiment_group=experiment_group).count() == 3 with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct: experiment_group.delete() assert mock_fct.call_count == 1 # Only one experiment was stopped assert Experiment.objects.filter(experiment_group=experiment_group).count() == 0
def test_status_update_results_in_new_updated_at_datetime(self): experiment_group = ExperimentGroupFactory() updated_at = experiment_group.updated_at # Create new status ExperimentGroupStatusFactory(experiment_group=experiment_group, status=ExperimentGroupLifeCycle.RUNNING) experiment_group.refresh_from_db() assert updated_at < experiment_group.updated_at updated_at = experiment_group.updated_at # Create status Using set_status experiment_group.set_status(ExperimentLifeCycle.FAILED) experiment_group.refresh_from_db() assert updated_at < experiment_group.updated_at
def test_experiment_create_a_max_of_experiments(self): assert ExperimentGroupIteration.objects.count() == 0 with patch('hpsearch.tasks.random.hp_random_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert mock_fct.call_count == 2 assert experiment_group.specification.matrix_space == 3 assert experiment_group.experiments.count() == 2 assert ExperimentGroupIteration.objects.count() == 1 assert ExperimentGroupIteration.objects.last().data['num_suggestions'] == 2
def setUp(self): super().setUp() self.experiment_group = ExperimentGroupFactory() auditor.validate() auditor.setup() tracker.validate() tracker.setup() activitylogs.validate() activitylogs.setup() notifier.validate() notifier.setup()
def test_stop_pending_experiments(self): with patch('runner.hp_search.random.hp_random_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert mock_fct.call_count == 1 assert experiment_group.pending_experiments.count() == 2 stop_group_experiments(experiment_group_id=experiment_group.id, pending=True) assert experiment_group.pending_experiments.count() == 0
def test_bo_rescheduling(self): with patch('runner.hp_search.bo.hp_bo_start.apply_async') as mock_fct: ExperimentGroupFactory(content=experiment_group_spec_content_bo) assert mock_fct.call_count == 1 with patch('runner.hp_search.bo.hp_bo_iterate.delay') as mock_fct1: with patch('runner.tasks.experiments.build_experiment.delay' ) as mock_fct2: ExperimentGroupFactory( content=experiment_group_spec_content_bo) assert mock_fct1.call_count == 1 assert mock_fct2.call_count == 2 # Fake with patch('runner.hp_search.bo.hp_bo_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_bo) assert mock_fct.call_count == 1 assert experiment_group.non_done_experiments.count() == 2 # Mark experiment as done with patch('runner.schedulers.experiment_scheduler.stop_experiment' ) as _: # noqa for xp in experiment_group.experiments.all(): ExperimentStatusFactory(experiment=xp, status=ExperimentLifeCycle.SUCCEEDED) with patch('runner.hp_search.bo.hp_bo_iterate.delay') as mock_fct1: hp_bo_start(experiment_group.id) assert mock_fct1.call_count == 1 # Mark experiment as done with patch('runner.schedulers.experiment_scheduler.stop_experiment' ) as _: # noqa for xp in experiment_group.experiments.all(): ExperimentStatusFactory(experiment=xp, status=ExperimentLifeCycle.SUCCEEDED) with patch('runner.hp_search.bo.hp_bo_create.delay') as mock_fct1: hp_bo_start(experiment_group.id) assert mock_fct1.call_count == 1
def test_non_independent_experiment_creation_doesnt_trigger_start(self): with patch('runner.hp_search.grid.hp_grid_search_start.apply_async' ) as _: # noqa experiment_group = ExperimentGroupFactory() with patch( 'runner.tasks.experiments.start_experiment.delay') as mock_fct: with patch.object(Experiment, 'set_status') as mock_fct2: ExperimentFactory(experiment_group=experiment_group) assert mock_fct.call_count == 0 assert mock_fct2.call_count == 1
def test_get_search_iteration_manager(self): # Grid search experiment_group = ExperimentGroupFactory() assert get_search_iteration_manager(experiment_group) is None # Random search experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert get_search_iteration_manager(experiment_group) is None # Hyperband experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_hyperband) assert isinstance(get_search_iteration_manager(experiment_group), HyperbandIterationManager) # BO experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_bo) assert isinstance(get_search_iteration_manager(experiment_group), BOIterationManager)
def test_non_independent_experiment_creation_doesnt_trigger_start(self): with patch('hpsearch.tasks.hp_create.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory() assert mock_fct.call_count == 1 with patch('scheduler.tasks.experiments.experiments_start.apply_async') as mock_fct: with patch.object(Experiment, 'set_status') as mock_fct2: ExperimentFactory(experiment_group=experiment_group) assert mock_fct.call_count == 0 assert mock_fct2.call_count == 1
def setUp(self): super().setUp() self.project = ProjectFactory(user=self.auth_client.user) self.group = ExperimentGroupFactory(project=self.project) self.object = self.factory_class(experiment_group=self.group) self.url = '/{}/{}/{}/groups/{}/chartviews/{}/'.format( API_V1, self.group.project.user.username, self.group.project.name, self.group.id, self.object.id) self.queryset = self.model_class.objects.all()
def test_spec_creation_triggers_experiments_creations_and_scheduling(self): with patch('experiment_groups.tasks.start_group_experiments.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory() assert Experiment.objects.filter(experiment_group=experiment_group).count() == 2 assert mock_fct.call_count == 1 assert experiment_group.pending_experiments.count() == 2 assert experiment_group.running_experiments.count() == 0 experiment = Experiment.objects.filter(experiment_group=experiment_group).first() ExperimentStatusFactory(experiment=experiment, status=ExperimentLifeCycle.RUNNING) assert experiment_group.pending_experiments.count() == 1 assert experiment_group.running_experiments.count() == 1
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.group = ExperimentGroupFactory(project=project) self.url = '/{}/{}/{}/groups/{}/chartviews/'.format(API_V1, project.user.username, project.name, self.group.id) self.objects = [self.factory_class(experiment_group=self.group, name='view{}'.format(i)) for i in range(self.num_objects)] self.queryset = self.model_class.objects.all() self.queryset = self.queryset.order_by('created_at')
def test_archive(self): with patch('hpsearch.tasks.grid.hp_grid_search_start.apply_async' ) as mock_fct: experiment_group = ExperimentGroupFactory() assert mock_fct.call_count == 2 assert experiment_group.deleted is False assert Experiment.objects.filter( experiment_group=experiment_group).count() == 2 assert ExperimentGroup.objects.count() == 1 assert ExperimentGroup.all.count() == 1 experiment_group.archive() assert experiment_group.deleted is True assert ExperimentGroup.objects.count() == 0 assert ExperimentGroup.all.count() == 1 assert Experiment.objects.filter( experiment_group=experiment_group).count() == 0 assert Experiment.all.filter( experiment_group=experiment_group).count() == 2 assert experiment_group.experiments.count() == 0 assert experiment_group.all_experiments.count() == 2 experiment_group.unarchive() assert experiment_group.deleted is False assert ExperimentGroup.objects.count() == 1 assert ExperimentGroup.all.count() == 1 assert Experiment.objects.filter( experiment_group=experiment_group).count() == 2 assert Experiment.all.filter( experiment_group=experiment_group).count() == 2 assert experiment_group.experiments.count() == 2 assert experiment_group.all_experiments.count() == 2
def test_archive(self, create_build_job): build = BuildJobFactory() BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED, job=build) create_build_job.return_value = build, True, True with patch('hpsearch.tasks.grid.hp_grid_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory() assert mock_fct.call_count == 1 assert experiment_group.deleted is False assert Experiment.objects.filter(experiment_group=experiment_group).count() == 2 assert ExperimentGroup.objects.count() == 1 assert ExperimentGroup.all.count() == 1 experiment_group.archive() assert experiment_group.deleted is True assert ExperimentGroup.objects.count() == 0 assert ExperimentGroup.all.count() == 1 assert Experiment.objects.filter(experiment_group=experiment_group).count() == 0 assert Experiment.all.filter(experiment_group=experiment_group).count() == 2 assert experiment_group.experiments.count() == 0 assert experiment_group.all_experiments.count() == 2 experiment_group.restore() assert experiment_group.deleted is False assert ExperimentGroup.objects.count() == 1 assert ExperimentGroup.all.count() == 1 assert Experiment.objects.filter(experiment_group=experiment_group).count() == 2 assert Experiment.all.filter(experiment_group=experiment_group).count() == 2 assert experiment_group.experiments.count() == 2 assert experiment_group.all_experiments.count() == 2
def test_experiment_group_should_stop_early(self): with patch('hpsearch.tasks.random.hp_random_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert mock_fct.call_count == 1 assert experiment_group.should_stop_early() is False assert experiment_group.pending_experiments.count() == 2 # Make a metric for one of the experiments experiment1, experiment2 = list(experiment_group.experiments.all()) metric1 = ExperimentMetric.objects.create(experiment=experiment1, values={'precision': 0.99}) # Check again that early stopping works assert experiment_group.should_stop_early() is True # Add another metric metric2 = ExperimentMetric.objects.create(experiment=experiment2, values={'loss': 0.01}) # Check again that early stopping still works assert experiment_group.should_stop_early() is True # Delete metric1 metric1.delete() # Check again that early stopping still works assert experiment_group.should_stop_early() is True # Delete metric2 metric2.delete() # Check again that early stopping still works assert experiment_group.should_stop_early() is False
def test_should_stop_early(self, _): # Experiment group with no early stopping experiment_group = ExperimentGroupFactory() assert experiment_group.should_stop_early() is False # Experiment group with early stopping experiment_group = ExperimentGroupFactory( content=None, hptuning={ 'concurrency': 2, 'random_search': {'n_experiments': 10}, 'early_stopping': [ {'metric': 'precision', 'value': 0.9, 'optimization': 'maximize'} ], 'matrix': {'lr': {'values': [1, 2, 3]}} }) assert experiment_group.should_stop_early() is False # Create experiments and metrics experiments = [ExperimentFactory(experiment_group=experiment_group) for _ in range(2)] ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.8}) assert experiment_group.should_stop_early() is False # Create a metric that triggers early stopping ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.91}) assert experiment_group.should_stop_early() is True
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.group = ExperimentGroupFactory(project=project) self.selection = ExperimentGroupFactory(project=project, content=None) self.experiment1 = ExperimentFactory(project=project, experiment_group=self.group) self.experiment2 = ExperimentFactory(project=project, experiment_group=self.group) self.experiment3 = ExperimentFactory(project=project) self.selection.selection_experiments.set([self.experiment3]) self.group_url = '/{}/{}/{}/groups/{}/metrics/'.format(API_V1, project.user.username, project.name, self.group.id) self.url = self.group_url self.selection_url = '/{}/{}/{}/groups/{}/metrics/'.format(API_V1, project.user.username, project.name, self.selection.id) self.objects1 = [ self.factory_class(experiment=self.experiment1, values={'accuracy': i / 10}) for i in range(self.num_objects)] self.objects2 = [ self.factory_class(experiment=self.experiment2, values={'accuracy': i / 10}) for i in range(self.num_objects)] self.objects3 = [ self.factory_class(experiment=self.experiment3, values={'accuracy': i / 10}) for i in range(self.num_objects)] # Add a random experiment and metric self.experiment4 = ExperimentFactory(project=project) self.factory_class(experiment=self.experiment4, values={'accuracy': 0.9}) self.group_queryset = self.model_class.objects.filter( experiment__experiment_group=self.group) self.group_queryset = self.group_queryset.order_by('created_at') self.selection_queryset = self.model_class.objects.filter( experiment__selections=self.selection) self.selection_queryset = self.selection_queryset.order_by('created_at')
def test_get_search_manager(self): # Grid search experiment_group = ExperimentGroupFactory() assert isinstance(get_search_algorithm_manager(experiment_group.hptuning_config), GridSearchManager) # Random search experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert isinstance(get_search_algorithm_manager(experiment_group.hptuning_config), RandomSearchManager) # Hyperband experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_hyperband) assert isinstance(get_search_algorithm_manager(experiment_group.hptuning_config), HyperbandSearchManager) # BO experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_bo) assert isinstance(get_search_algorithm_manager(experiment_group.hptuning_config), BOSearchManager)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) with patch.object(ExperimentGroup, 'set_status') as _: # noqa self.experiment_group = ExperimentGroupFactory(project=project) self.url = '/{}/{}/{}/groups/{}/statuses/'.format(API_V1, project.user.username, project.name, self.experiment_group.id) self.objects = [self.factory_class(experiment_group=self.experiment_group, status=ExperimentGroupLifeCycle.CHOICES[i][0]) for i in range(self.num_objects)] self.queryset = self.model_class.objects.filter(experiment_group=self.experiment_group) self.queryset = self.queryset.order_by('created_at')
def test_project_requests_tensorboard_url(self): project = ProjectFactory(user=self.auth_client.user) with patch('scheduler.tasks.experiment_groups.' 'experiments_group_create.apply_async') as _: # noqa group = ExperimentGroupFactory(project=project) tensorboard = TensorboardJobFactory(project=project, experiment_group=group) tensorboard.set_status(status=JobLifeCycle.RUNNING) with patch('scheduler.tensorboard_scheduler.get_tensorboard_url' ) as mock_fct: response = self.auth_client.get(self._get_url(project, group)) assert mock_fct.call_count == 1 assert response.status_code == 200
def setUp(self): super().setUp() self.experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_bo) self.experiments_iter1 = [ ExperimentFactory(experiment_group=self.experiment_group, declarations={'i': i}) for i in range(2)] self.experiments_iter2 = [ ExperimentFactory(experiment_group=self.experiment_group, declarations={'i': i}) for i in range(2)] self.experiments_iter3 = [ ExperimentFactory(experiment_group=self.experiment_group, declarations={'i': i}) for i in range(2)] self.iteration_manager = BOIterationManager(experiment_group=self.experiment_group)
def setUp(self): super().setUp() self.experiment_group = ExperimentGroupFactory() self.objects = [ self.factory_class(experiment_group=self.experiment_group) for _ in range(self.num_objects) ] self.url = '/{}/{}/{}/groups/{}/experiments/'.format( API_V1, self.experiment_group.project.user, self.experiment_group.project.name, self.experiment_group.sequence) # one object that does not belong to the filter self.factory_class() self.queryset = self.model_class.objects.filter( experiment_group=self.experiment_group)
def test_stop_pending_experiments(self): with patch('hpsearch.tasks.random.hp_random_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) experiment = ExperimentFactory(experiment_group=experiment_group) ExperimentStatusFactory(experiment=experiment, status=ExperimentLifeCycle.RUNNING) assert mock_fct.call_count == 1 assert experiment_group.pending_experiments.count() == 2 assert experiment_group.running_experiments.count() == 1 experiments_group_stop_experiments(experiment_group_id=experiment_group.id, pending=True) assert experiment_group.pending_experiments.count() == 0 assert experiment_group.running_experiments.count() == 1
def test_experiment_create_a_max_of_experiments(self, create_build_job): build = BuildJobFactory() BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED, job=build) create_build_job.return_value = build, True, True assert ExperimentGroupIteration.objects.count() == 0 with patch('hpsearch.tasks.random.hp_random_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory( content=experiment_group_spec_content_early_stopping) assert mock_fct.call_count == 1 assert experiment_group.specification.matrix_space == 3 assert experiment_group.experiments.count() == 2 assert ExperimentGroupIteration.objects.count() == 1 assert ExperimentGroupIteration.objects.last().data['num_suggestions'] == 2
def test_experiment_group_deletion_triggers_stopping_for_running_experiment(self, create_build_job): build = BuildJobFactory() BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED, job=build) create_build_job.return_value = build, True, True with patch('hpsearch.tasks.grid.hp_grid_search_start.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory() assert mock_fct.call_count == 1 experiment = ExperimentFactory(project=experiment_group.project, experiment_group=experiment_group) # Set this experiment to scheduled experiment.set_status(ExperimentLifeCycle.SCHEDULED) # Add job ExperimentJobFactory(experiment=experiment) assert Experiment.objects.filter(experiment_group=experiment_group).count() == 3 with patch('scheduler.tasks.experiments.experiments_stop.apply_async') as mock_fct: experiment_group.delete() assert mock_fct.call_count == 1 # Only one experiment was stopped assert Experiment.objects.filter(experiment_group=experiment_group).count() == 0
def test_non_independent_experiment_creation_doesnt_trigger_start(self, create_build_job): build = BuildJobFactory() BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED, job=build) create_build_job.return_value = build, True, True with patch('hpsearch.tasks.hp_create.apply_async') as mock_fct: experiment_group = ExperimentGroupFactory() assert mock_fct.call_count == 1 with patch('scheduler.tasks.experiments.experiments_start.apply_async') as mock_fct: with patch.object(Experiment, 'set_status') as mock_fct2: ExperimentFactory(experiment_group=experiment_group) assert mock_fct.call_count == 0 assert mock_fct2.call_count == 1
def test_should_stop_early(self, _): # Experiment group with no early stopping experiment_group = ExperimentGroupFactory() assert experiment_group.should_stop_early() is False # Experiment group with early stopping experiment_group = ExperimentGroupFactory(content=None, hptuning={ 'concurrency': 2, 'random_search': { 'n_experiments': 10 }, 'early_stopping': [{ 'metric': 'precision', 'value': 0.9, 'optimization': 'maximize' }], 'matrix': { 'lr': { 'values': [1, 2, 3] } } }) assert experiment_group.should_stop_early() is False # Create experiments and metrics experiments = [ ExperimentFactory(experiment_group=experiment_group) for _ in range(2) ] ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.8}) assert experiment_group.should_stop_early() is False # Create a metric that triggers early stopping ExperimentMetric.objects.create(experiment=experiments[0], values={'precision': 0.91}) assert experiment_group.should_stop_early() is True
def test_delete_remove_paths(self, delete_outputs_path, delete_logs_path): project = ProjectFactory() for _ in range(2): ExperimentGroupFactory(project=project) ExperimentFactory(project=project) assert ExperimentGroup.objects.count() == 2 assert Experiment.objects.count() == 2 with patch('libs.paths.projects.delete_path' ) as delete_path_project_mock_stop: project.delete() # 1 repo assert delete_path_project_mock_stop.call_count == 1 # 1 project + 2 * groups + 2 experiments assert delete_outputs_path.call_count == 5 assert delete_logs_path.call_count == 5
def test_iteration(self, _): experiment_group = ExperimentGroupFactory() assert experiment_group.iteration is None assert experiment_group.iteration_data is None # Add iteration iteration = ExperimentGroupIteration.objects.create( experiment_group=experiment_group, data={'dummy': 10}) assert experiment_group.iteration == iteration assert experiment_group.iteration_data == {'dummy': 10} # Update data iteration.data['foo'] = 'bar' iteration.save() assert experiment_group.iteration.data == {'dummy': 10, 'foo': 'bar'}
def test_delete_remove_paths(self): project = ProjectFactory() for _ in range(2): ExperimentGroupFactory(project=project) ExperimentFactory(project=project) assert ExperimentGroup.objects.count() == 2 assert Experiment.objects.count() == 2 with patch('libs.paths.projects.delete_path') as delete_path_project_mock_stop: with patch('libs.paths.experiment_groups.delete_path') as delete_path_group_mock_stop: with patch('libs.paths.experiments.delete_path') as delete_path_xp_mock_stop: project.delete() # 2 * project + 1 repo assert delete_path_project_mock_stop.call_count == 3 # 2 * 2 * groups assert delete_path_group_mock_stop.call_count assert delete_path_xp_mock_stop.call_count == 4 # 2 * 2 * groups
def test_archive(self): project = ProjectFactory() ExperimentGroupFactory(project=project) ExperimentFactory(project=project) JobFactory(project=project) BuildJobFactory(project=project) NotebookJobFactory(project=project) TensorboardJobFactory(project=project) assert project.deleted is False assert project.experiments.count() == 1 assert project.experiment_groups.count() == 1 assert project.jobs.count() == 1 assert project.build_jobs.count() == 1 assert project.notebook_jobs.count() == 1 assert project.tensorboard_jobs.count() == 1 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1 project.archive() assert project.deleted is True assert project.experiments.count() == 0 assert project.experiment_groups.count() == 0 assert project.jobs.count() == 0 assert project.build_jobs.count() == 0 assert project.notebook_jobs.count() == 0 assert project.tensorboard_jobs.count() == 0 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1 project.unarchive() assert project.deleted is False assert project.experiments.count() == 1 assert project.experiment_groups.count() == 1 assert project.jobs.count() == 1 assert project.build_jobs.count() == 1 assert project.notebook_jobs.count() == 1 assert project.tensorboard_jobs.count() == 1 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1
def test_get_ordered_experiments_by_metric(self): experiment_group = ExperimentGroupFactory() assert len( # pylint:disable=len-as-condition experiment_group.get_ordered_experiments_by_metric( experiment_ids=[], metric='precision', optimization='maximize' )) == 0 experiments = [] experiment_ids = [] for _ in range(5): experiment = ExperimentFactory(experiment_group=experiment_group) experiments.append(experiment) experiment_ids.append(experiment.id) ExperimentMetric.objects.create(experiment=experiment, values={'precision': random.random()}) for experiment in experiments[:3]: ExperimentMetric.objects.create(experiment=experiment, values={'loss': random.random()}) experiment_metrics = experiment_group.get_ordered_experiments_by_metric( experiment_ids=experiment_ids, metric='precision', optimization='maximize' ) assert len(experiment_metrics) == 5 metrics = [m.precision for m in experiment_metrics if m.precision is not None] assert len(metrics) == 2 assert sorted(metrics, reverse=True) == metrics experiment_metrics = experiment_group.get_ordered_experiments_by_metric( experiment_ids=experiment_ids, metric='loss', optimization='minimize' ) assert len(experiment_metrics) == 5 metrics = [m.loss for m in experiment_metrics if m.loss is not None] assert len(metrics) == 3 assert sorted(metrics) == metrics experiment_metrics = experiment_group.get_ordered_experiments_by_metric( experiment_ids=experiment_ids, metric='accuracy', optimization='maximize' ) assert len(experiment_metrics) == 5 # pylint:disable=len-as-condition assert len( # pylint:disable=len-as-condition [m for m in experiment_metrics if m.accuracy is not None]) == 0
def test_experiment_group_deletion_deletes_old_data(self, delete_path, _): experiment_group = ExperimentGroupFactory() assert delete_path.call_count == 2 # outputs + logs experiment_group.delete() assert delete_path.call_count == 2 + 2 # outputs + logs
def test_managers(self, _): experiment_group = ExperimentGroupFactory(content=None, hptuning=None) assert experiment_group.search_manager is None # Adding hptuning experiment_group.hptuning = { 'concurrency': 2, 'grid_search': {'n_experiments': 10}, 'matrix': {'lr': {'values': [1, 2, 3]}} } experiment_group.save() experiment_group = ExperimentGroup.objects.get(id=experiment_group.id) assert isinstance(experiment_group.search_manager, GridSearchManager) assert experiment_group.iteration_manager is None # Adding hptuning experiment_group.hptuning = { 'concurrency': 2, 'random_search': {'n_experiments': 10}, 'matrix': {'lr': {'values': [1, 2, 3]}} } experiment_group.save() experiment_group = ExperimentGroup.objects.get(id=experiment_group.id) assert isinstance(experiment_group.search_manager, RandomSearchManager) assert experiment_group.iteration_manager is None # Adding hptuning experiment_group.hptuning = { 'concurrency': 2, 'hyperband': { 'max_iter': 10, 'eta': 3, 'resource': {'name': 'steps', 'type': 'int'}, 'resume': False, 'metric': {'name': 'loss', 'optimization': 'minimize'} }, 'matrix': {'lr': {'values': [1, 2, 3]}} } experiment_group.save() experiment_group = ExperimentGroup.objects.get(id=experiment_group.id) assert isinstance(experiment_group.search_manager, HyperbandSearchManager) assert isinstance(experiment_group.iteration_manager, HyperbandIterationManager) # Adding hptuning experiment_group.hptuning = { 'concurrency': 2, 'bo': { 'n_iterations': 4, 'n_initial_trials': 4, 'metric': { 'name': 'loss', 'optimization': 'minimize' }, 'utility_function': { 'acquisition_function': 'ei', 'eps': 1.2, 'gaussian_process': { 'kernel': 'matern', 'length_scale': 1.0, 'nu': 1.9, 'n_restarts_optimizer': 0 } } }, 'matrix': {'lr': {'values': [1, 2, 3]}} } experiment_group.save() experiment_group = ExperimentGroup.objects.get(id=experiment_group.id) assert isinstance(experiment_group.search_manager, BOSearchManager) assert isinstance(experiment_group.iteration_manager, BOIterationManager)