def test_project_deletion_cascade_to_notebook_job(self): assert NotebookJob.objects.count() == 0 project = ProjectFactory() NotebookJobFactory(project=project) assert NotebookJob.objects.count() == 1 with patch('scheduler.tensorboard_scheduler.stop_tensorboard') as _: # noqa with patch('scheduler.notebook_scheduler.stop_notebook') as _: # noqa project.delete() assert NotebookJob.objects.count() == 0
def setUp(self): super().setUp() with patch.object(Job, 'set_status') as _: with patch('scheduler.tasks.jobs.jobs_build.apply_async') as _: # noqa project = ProjectFactory(user=self.auth_client.user) self.job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/statuses/'.format(API_V1, project.user.username, project.name, self.job.id) self.objects = [self.factory_class(job=self.job, status=JobLifeCycle.CHOICES[i][0]) for i in range(self.num_objects)] self.queryset = self.model_class.objects.all()
def test_experiment_group_with_spec_create_hptuning(self, _): # Create group with spec creates params project = ProjectFactory() experiment_group = ExperimentGroup.objects.create( user=project.user, project=project, content=experiment_group_spec_content_early_stopping) assert isinstance(experiment_group.specification, GroupSpecification) assert experiment_group.hptuning == experiment_group.specification.hptuning.to_dict() assert isinstance(experiment_group.hptuning_config, HPTuningConfig) assert experiment_group.concurrency == 2 assert experiment_group.search_algorithm == SearchAlgorithms.RANDOM assert len(experiment_group.early_stopping) == 2 assert experiment_group.group_type == GroupTypes.STUDY
def test_experiment_group_without_spec_and_hptuning(self, _): # Create group without params and spec works project = ProjectFactory() experiment_group = ExperimentGroup.objects.create( user=project.user, project=project) assert experiment_group.specification is None assert experiment_group.hptuning is None assert experiment_group.hptuning_config is None assert experiment_group.concurrency is None assert experiment_group.search_algorithm is None assert experiment_group.early_stopping is None assert experiment_group.code_reference is None assert experiment_group.group_type == GroupTypes.SELECTION
def setUp(self): super().setUp() with patch('experiments.tasks.start_experiment.delay') as _: with patch.object(ExperimentJob, 'set_status') as _: project = ProjectFactory(user=self.auth_client.user) experiment = ExperimentFactory(project=project) self.experiment_job = ExperimentJobFactory( experiment=experiment) self.object = self.factory_class(job=self.experiment_job) self.url = '/{}/{}/{}/experiments/{}/jobs/{}/statuses/{}'.format( API_V1, project.user.username, project.name, experiment.sequence, self.experiment_job.sequence, self.object.uuid.hex) self.queryset = self.model_class.objects.filter( job=self.experiment_job)
def setUp(self): super().setUp() self.project = ProjectFactory(user=self.auth_client.user) with patch('scheduler.tasks.experiment_groups.' 'experiments_group_create.apply_async') as _: # noqa self.object = self.factory_class(user=self.auth_client.user, project=self.project) tensorboard = TensorboardJobFactory(project=self.project, experiment_group=self.object) tensorboard.set_status(status=JobLifeCycle.RUNNING) self.url = '/{}/{}/{}/groups/{}/tensorboard/stop'.format( API_V1, self.project.user.username, self.project.name, self.object.id) self.queryset = TensorboardJob.objects.all()
def test_project_requests_tensorboard_url(self): project = ProjectFactory(user=self.auth_client.user) with patch('scheduler.tasks.experiment_groups.' 'experiments_group_create.apply_async') as _: # noqa group = ExperimentGroupFactory(project=project) tensorboard = TensorboardJobFactory(project=project, experiment_group=group) tensorboard.set_status(status=JobLifeCycle.RUNNING) with patch('scheduler.tensorboard_scheduler.get_tensorboard_url' ) as mock_fct: response = self.auth_client.get(self._get_url(project, group)) assert mock_fct.call_count == 1 assert response.status_code == 200
def test_redirects_to_proxy_protected_url(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) tensorboard = TensorboardJobFactory(project=project) tensorboard.set_status(status=JobLifeCycle.RUNNING) deployment_name = DEPLOYMENT_NAME.format( job_uuid=tensorboard.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_tensorboard_url.return_value = service_url response = self.auth_client.get(self._get_url(project)) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/'.format(service_url) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) with patch('hpsearch.tasks.grid.hp_grid_search_start.apply_async') as mock_fct: self.object = self.factory_class(project=project) assert mock_fct.call_count == 2 # Add a running experiment experiment = ExperimentFactory(experiment_group=self.object) ExperimentStatusFactory(experiment=experiment, status=ExperimentLifeCycle.RUNNING) self.url = '/{}/{}/{}/groups/{}/stop'.format( API_V1, project.user.username, project.name, self.object.id)
def test_status_update_results_in_new_updated_at_datetime_tensorboard( self): project = ProjectFactory() job = TensorboardJobFactory(project=project) updated_at = job.updated_at # Create new status TensorboardJobStatus.objects.create(job=job, status=JobLifeCycle.BUILDING) job.refresh_from_db() assert updated_at < job.updated_at updated_at = job.updated_at # Create status Using set_status job.set_status(JobLifeCycle.RUNNING) job.refresh_from_db() assert updated_at < job.updated_at
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.object = self.factory_class(project=project) self.url = '/{}/{}/{}/groups/{}/'.format(API_V1, project.user.username, project.name, self.object.id) self.queryset = self.model_class.objects.all() # Add 2 experiments for _ in range(2): ExperimentFactory(experiment_group=self.object) self.object_query = queries.groups_details.get(id=self.object.id)
def setUp(self): super().setUp() with patch.object(Experiment, 'set_status') as _: with patch('experiments.tasks.start_experiment.delay') as _: project = ProjectFactory(user=self.auth_client.user) self.experiment = ExperimentFactory(project=project) self.url = '/{}/{}/{}/experiments/{}/metrics/'.format( API_V1, project.user.username, project.name, self.experiment.sequence) self.objects = [ self.factory_class(experiment=self.experiment, values={'accuracy': i / 10}) for i in range(self.num_objects) ] self.queryset = self.model_class.objects.all()
def test_redirects_to_proxy_protected_url(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user, has_notebook=True) deployment_name = DEPLOYMENT_NAME.format(project_uuid=project.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url response = self.auth_client.get(self._get_url(project)) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', scheduler.get_notebook_token(project)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def setUp(self): super().setUp() with patch.object(Experiment, 'set_status') as _: with patch('experiments.tasks.start_experiment.delay') as _: project = ProjectFactory(user=self.auth_client.user) self.experiment = ExperimentFactory(project=project) self.url = '/{}/{}/{}/experiments/{}/statuses/'.format( API_V1, project.user.username, project.name, self.experiment.sequence) self.objects = [ self.factory_class(experiment=self.experiment, status=ExperimentLifeCycle.CHOICES[i][0]) for i in range(self.num_objects) ] self.queryset = self.model_class.objects.all()
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) with patch.object(ExperimentGroup, 'set_status') as _: # noqa self.experiment_group = ExperimentGroupFactory(project=project) self.url = '/{}/{}/{}/groups/{}/statuses/'.format( API_V1, project.user.username, project.name, self.experiment_group.id) self.objects = [ self.factory_class(experiment_group=self.experiment_group, status=ExperimentGroupLifeCycle.CHOICES[i][0]) for i in range(self.num_objects) ] self.queryset = self.model_class.objects.filter( experiment_group=self.experiment_group) self.queryset = self.queryset.order_by('created_at')
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) with patch('runner.hp_search.grid.hp_grid_search_start.apply_async') as mock_fct: self.object = self.factory_class(project=project) assert mock_fct.call_count == 1 self.url = '/{}/{}/{}/groups/{}/'.format(API_V1, project.user.username, project.name, self.object.sequence) self.queryset = self.model_class.objects.all() # Add 2 more experiments for _ in range(2): ExperimentFactory(experiment_group=self.object)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/outputs/files'.format( API_V1, project.user.username, project.name, job.id) outputs_path = get_job_outputs_path( persistence_outputs=job.persistence_outputs, job_name=job.unique_name) create_job_outputs_path(persistence_outputs=job.persistence_outputs, job_name=job.unique_name) self.create_paths(path=outputs_path, url=self.url)
def test_copy_repo_path_to_tmp_dir(self): project = ProjectFactory() repo_path = '{}/{}/{}/{}'.format(settings.REPOS_ROOT, project.user.username, project.name, project.name) self.assertFalse(os.path.exists(repo_path)) repo = RepoFactory(project=project) assert repo.path == repo_path self.assertTrue(os.path.exists(repo_path)) git_file_path = '{}/.git'.format(repo_path) self.assertTrue(os.path.exists(git_file_path)) copy_to_tmp_dir(repo_path, 'new') git_file_path = '{}/.git'.format(get_tmp_path('new')) self.assertTrue(os.path.exists(git_file_path))
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) experiment = ExperimentFactory(project=project) self.url = '/{}/{}/{}/experiments/{}/logs'.format( API_V1, project.user.username, project.name, experiment.sequence) log_path = get_experiment_logs_path(experiment.unique_name) fake = Faker() self.logs = [] for _ in range(self.num_log_lines): self.logs.append(fake.sentence()) with open(log_path, 'w') as file: for line in self.logs: file.write(line) file.write('\n')
def test_project_ci_code_ref(self): project = ProjectFactory() repo = ExternalRepoFactory(project=project, git_url='https://github.com/polyaxon/empty.git') ci = CIFactory(project=project) assert ci.code_reference is None code_ref = CodeReferenceFactory(external_repo=repo) ci.code_reference = code_ref ci.save() ci.refresh_from_db() assert ci.code_reference == code_ref code_ref = CodeReferenceFactory(external_repo=repo) ci.code_reference = code_ref ci.save() ci.refresh_from_db() assert ci.code_reference == code_ref
def setUp(self): super().setUp() self.user = UserFactory() activitylogs.validate() activitylogs.setup() self.project = ProjectFactory() activitylogs.record(event_type=PROJECT_DELETED_TRIGGERED, instance=self.project, actor_id=self.user.id) self.experiment = ExperimentFactory() activitylogs.record(event_type=EXPERIMENT_DELETED_TRIGGERED, instance=self.experiment, actor_id=self.user.id) self.job = JobFactory() activitylogs.record(event_type=JOB_VIEWED, instance=self.job, actor_id=self.user.id)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.group = ExperimentGroupFactory(project=project) self.selection = ExperimentGroupFactory(project=project, content=None) self.experiment1 = ExperimentFactory(project=project, experiment_group=self.group) self.experiment2 = ExperimentFactory(project=project, experiment_group=self.group) self.experiment3 = ExperimentFactory(project=project) self.selection.selection_experiments.set([self.experiment3]) self.group_url = '/{}/{}/{}/groups/{}/metrics/'.format( API_V1, project.user.username, project.name, self.group.id) self.url = self.group_url self.selection_url = '/{}/{}/{}/groups/{}/metrics/'.format( API_V1, project.user.username, project.name, self.selection.id) self.objects1 = [ self.factory_class(experiment=self.experiment1, values={'accuracy': i / 10}) for i in range(self.num_objects) ] self.objects2 = [ self.factory_class(experiment=self.experiment2, values={'accuracy': i / 10}) for i in range(self.num_objects) ] self.objects3 = [ self.factory_class(experiment=self.experiment3, values={'accuracy': i / 10}) for i in range(self.num_objects) ] # Add a random experiment and metric self.experiment4 = ExperimentFactory(project=project) self.factory_class(experiment=self.experiment4, values={'accuracy': 0.9}) self.group_queryset = self.model_class.objects.filter( experiment__experiment_group=self.group) self.group_queryset = self.group_queryset.order_by('created_at') self.selection_queryset = self.model_class.objects.filter( experiment__selections=self.selection) self.selection_queryset = self.selection_queryset.order_by( 'created_at')
def test_redirects_to_proxy_protected_url(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) deployment_name = JOB_NAME.format(job_uuid=notebook.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url response = self.auth_client.get(self._get_url(project)) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', notebook_scheduler.get_notebook_token(notebook)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/logs'.format(API_V1, project.user.username, project.name, job.id) log_path = get_job_logs_path(job.unique_name) create_job_logs_path(job_name=job.unique_name) fake = Faker() self.logs = [] for _ in range(self.num_log_lines): self.logs.append(fake.sentence()) with open(log_path, 'w') as file: for line in self.logs: file.write(line) file.write('\n')
def setUp(self): super().setUp() with patch('scheduler.tasks.experiments.experiments_build.apply_async' ) as _: # noqa with patch.object(ExperimentJob, 'set_status') as _: # noqa project = ProjectFactory(user=self.auth_client.user) experiment = ExperimentFactory(project=project) self.experiment_job = ExperimentJobFactory( experiment=experiment) self.url = '/{}/{}/{}/experiments/{}/jobs/{}/statuses/'.format( API_V1, project.user.username, project.name, experiment.sequence, self.experiment_job.sequence) self.objects = [ self.factory_class(job=self.experiment_job, status=JobLifeCycle.CHOICES[i][0]) for i in range(self.num_objects) ] self.queryset = self.model_class.objects.filter( job=self.experiment_job)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) with patch('hpsearch.tasks.grid.hp_grid_search_start.apply_async') as mock_fct: with patch('scheduler.dockerizer_scheduler.create_build_job') as mock_start: build = BuildJobFactory() BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED, job=build) mock_start.return_value = build, True, True self.object = self.factory_class(project=project) assert mock_fct.call_count == 1 # Add a running experiment experiment = ExperimentFactory(experiment_group=self.object) ExperimentStatusFactory(experiment=experiment, status=ExperimentLifeCycle.RUNNING) self.url = '/{}/{}/{}/groups/{}/stop'.format( API_V1, project.user.username, project.name, self.object.id)
def test_get_with_environment(self): spec_content = """--- version: 1 kind: job environment: node_selector: foo: bar tolerations: - key: "key" operator: "Equal" value: "value" effect: "NoSchedule" affinity: foo: bar resources: gpu: requests: 1 limits: 1 gpu: requests: 1 limits: 1 build: image: my_image run: cmd: do_something """ spec_parsed_content = JobSpecification.read(spec_content) project = ProjectFactory(user=self.auth_client.user) exp = self.factory_class(project=project, config=spec_parsed_content.parsed_data) url = '/{}/{}/{}/jobs/{}/'.format(API_V1, project.user.username, project.name, exp.id) resp = self.auth_client.get(url) assert resp.status_code == status.HTTP_200_OK exp.refresh_from_db() assert resp.data == self.serializer_class(exp).data
def test_experiment_group_with_params(self): # Create group with spec creates params project = ProjectFactory() params = { 'concurrency': 2, 'random_search': {'n_experiments': 10}, 'matrix': {'lr': {'values': [1, 2, 3]}} } experiment_group = ExperimentGroup.objects.create( user=project.user, project=project, params=params) assert experiment_group.specification is None assert experiment_group.params == params assert isinstance(experiment_group.params_config, SettingsConfig) assert experiment_group.concurrency == 2 assert experiment_group.search_algorithm == SearchAlgorithms.RANDOM assert experiment_group.params_config.random_search.n_experiments == 10 assert isinstance(experiment_group.params_config.matrix['lr'], MatrixConfig)
def test_redirects_to_proxy_protected_url(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) with patch('scheduler.tasks.experiment_groups.' 'experiments_group_create.apply_async') as _: # noqa group = ExperimentGroupFactory(project=project) tensorboard = TensorboardJobFactory(project=project, experiment_group=group) tensorboard.set_status(status=JobLifeCycle.RUNNING) deployment_name = JOB_NAME.format(job_uuid=tensorboard.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_tensorboard_url.return_value = service_url response = self.auth_client.get(self._get_url(project, group)) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/'.format(service_url) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.logs = [] self.job = BuildJobFactory(project=project) self.url = '/{}/{}/{}/builds/{}/logs'.format( API_V1, project.user.username, project.name, self.job.id) self.stream_url = '/{}/{}/{}/builds/{}/logs/stream'.format( API_V1, project.user.username, project.name, self.job.id) self.ws_url = '/{}/{}/{}/builds/{}/logs'.format( WS_V1, project.user.username, project.name, self.job.id)
def set_objects(self): self.user = self.auth_client.user self.project = ProjectFactory() activitylogs.record(event_type=PROJECT_DELETED_TRIGGERED, instance=self.project, actor_id=self.user.id, actor_name=self.user.username) self.experiment = ExperimentFactory() activitylogs.record(event_type=EXPERIMENT_DELETED_TRIGGERED, instance=self.experiment, actor_id=self.user.id, actor_name=self.user.username) self.job = JobFactory() activitylogs.record(event_type=JOB_CREATED, instance=self.job, actor_id=self.user.id, actor_name=self.user.username) activitylogs.record(event_type=JOB_VIEWED, instance=self.job, actor_id=self.user.id, actor_name=self.user.username)