def test_redirects_to_proxy_protected_url_with_extra_path( self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) deployment_name = DEPLOYMENT_NAME.format(project_uuid=project.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url # To `tree?` response = self.auth_client.get(self._get_url(project, 'tree?')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', notebook_scheduler.get_notebook_token(project)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url) # To static files response = self.auth_client.get( self._get_url(project, 'static/components/something?v=4.7.0')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}&token={}'.format( service_url, 'static/components/something?v=4.7.0', notebook_scheduler.get_notebook_token(project)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def test_archive_triggers_stopping_of_plugin_jobs(self): assert self.queryset.count() == 1 notebook_job = NotebookJobFactory(project=self.object) notebook_job.set_status(JobLifeCycle.SCHEDULED) tensorboard_job = TensorboardJobFactory(project=self.object) tensorboard_job.set_status(JobLifeCycle.SCHEDULED) assert NotebookJob.objects.count() == 1 assert TensorboardJob.objects.count() == 1 with patch('scheduler.tasks.notebooks.' 'projects_notebook_stop.apply_async') as notebook_mock_stop: with patch( 'scheduler.tasks.tensorboards.' 'tensorboards_stop.apply_async') as tensorboard_mock_stop: resp = self.auth_client.post(self.url + 'archive/') assert notebook_mock_stop.call_count == 1 assert tensorboard_mock_stop.call_count == 1 assert resp.status_code == status.HTTP_200_OK assert self.queryset.count() == 0 assert TensorboardJob.objects.count() == 0 assert NotebookJob.objects.count() == 0 assert TensorboardJob.all.count() == 1 assert NotebookJob.all.count() == 1
def test_delete_triggers_stopping_of_plugin_jobs(self): assert self.queryset.count() == 1 notebook_job = NotebookJobFactory(project=self.object) notebook_job.set_status(JobLifeCycle.SCHEDULED) tensorboard_job = TensorboardJobFactory(project=self.object) tensorboard_job.set_status(JobLifeCycle.SCHEDULED) assert NotebookJob.objects.count() == 1 assert TensorboardJob.objects.count() == 1 with patch('scheduler.tasks.notebooks.' 'projects_notebook_stop.apply_async') as notebook_mock_stop: with patch( 'scheduler.tasks.tensorboards.' 'tensorboards_stop.apply_async') as tensorboard_mock_stop: resp = self.auth_client.delete(self.url) assert notebook_mock_stop.call_count == 1 assert tensorboard_mock_stop.call_count == 1 assert resp.status_code == status.HTTP_204_NO_CONTENT assert self.queryset.count() == 0 assert ExperimentGroup.objects.count() == 0 assert Experiment.objects.count() == 0 # Delete does not work for other project public and private resp = self.auth_client.delete(self.url_other) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN) resp = self.auth_client.delete(self.url_private) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_redirects_to_proxy_protected_url_with_extra_path(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) deployment_name = JOB_NAME.format( job_uuid=notebook.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url # To `tree?` response = self.auth_client.get(self._get_url(project, 'tree?')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', notebook_scheduler.get_notebook_token(notebook) ) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url) # To static files response = self.auth_client.get( self._get_url(project, 'static/components/something?v=4.7.0')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}&token={}'.format( service_url, 'static/components/something?v=4.7.0', notebook_scheduler.get_notebook_token(notebook) ) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def test_cannot_upload_if_project_has_a_running_notebook_with_code_mount(self): user = self.auth_client.user repo_name = self.project.name # Update project with has_notebook True notebook = NotebookJobFactory(project=self.project) notebook.set_status(status=JobLifeCycle.RUNNING) assert self.model_class.objects.count() == 0 uploaded_file = self.get_upload_file() with patch('api.repos.views.handle_new_files') as mock_task: response = self.auth_client.put(self.url, data={'repo': uploaded_file}, content_type=MULTIPART_CONTENT) assert response.status_code == status.HTTP_403_FORBIDDEN file_path = '{}/{}/{}.tar.gz'.format( conf.get('UPLOAD_MOUNT_PATH'), user.username, repo_name) self.assertFalse(os.path.exists(file_path)) assert mock_task.call_count == 0 # No new repo was not created and still exists assert self.model_class.objects.count() == 0 repo_path = '{}/{}/{}/{}'.format( conf.get('REPOS_MOUNT_PATH'), user.username, repo_name, repo_name) self.assertFalse(os.path.exists(repo_path))
def setUp(self): super().setUp() self.project = ProjectFactory() self.build_job = BuildJobFactory(project=self.project) self.notebook = NotebookJobFactory(project=self.project, build_job=self.build_job) self.tensorboard = TensorboardJobFactory(project=self.project, build_job=self.build_job) self.job = JobFactory(project=self.project, build_job=self.build_job) self.experiment = ExperimentFactory(project=self.project, build_job=self.build_job)
def setUp(self): super().setUp() self.object = self.factory_class(user=self.auth_client.user) tensorboard = NotebookJobFactory(project=self.object) tensorboard.set_status(status=JobLifeCycle.RUNNING) RepoFactory(project=self.object) self.url = '/{}/{}/{}/notebook/stop'.format(API_V1, self.object.user.username, self.object.name) self.queryset = self.model_class.objects.all()
def test_project_requests_notebook_url(self): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) with patch('scheduler.notebook_scheduler.get_notebook_url') as mock_url_fct: with patch('scheduler.notebook_scheduler.get_notebook_token') as mock_token_fct: response = self.auth_client.get(self._get_url(project)) assert mock_url_fct.call_count == 1 assert mock_token_fct.call_count == 1 assert response.status_code == 200
def test_project_requests_notebook_url(self): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) with patch('scheduler.notebook_scheduler.get_notebook_url') as mock_url_fct: with patch('scheduler.notebook_scheduler.get_notebook_token') as mock_token_fct: response = self.auth_client.get(self._get_url(project)) assert mock_url_fct.call_count == 1 assert mock_token_fct.call_count == 1 assert response.status_code == 200
def setUp(self): super().setUp() self.object = self.factory_class(user=self.auth_client.user) tensorboard = NotebookJobFactory(project=self.object) tensorboard.set_status(status=JobLifeCycle.RUNNING) RepoFactory(project=self.object) self.url = '/{}/{}/{}/notebook/stop'.format( API_V1, self.object.user.username, self.object.name) self.queryset = self.model_class.objects.all()
def test_delete_notebook_jobs(self): project1 = ProjectFactory() NotebookJobFactory(project=project1) project1.archive() project2 = ProjectFactory() job2 = NotebookJobFactory(project=project2) job2.archive() assert NotebookJob.all.count() == 2 conf.set(CLEANING_INTERVALS_ARCHIVES, -10) delete_archived_notebook_jobs() # Although the other entity is archived it's not deleted because of project1 assert NotebookJob.all.count() == 1
def test_delete_notebook_jobs(self): project1 = ProjectFactory() NotebookJobFactory(project=project1) project1.archive() project2 = ProjectFactory() job2 = NotebookJobFactory(project=project2) job2.archive() assert NotebookJob.all.count() == 2 CleaningIntervals.ARCHIVED = -10 delete_archived_notebook_jobs() # Although the other entity is archived it's not deleted because of project1 assert NotebookJob.all.count() == 1
def test_notebook_creation_triggers_status_creation(self): assert NotebookJobStatus.objects.count() == 0 project = ProjectFactory() NotebookJobFactory(project=project) assert NotebookJobStatus.objects.count() == 1 assert project.notebook.last_status == JobLifeCycle.CREATED
def test_create_build_from_notebook(self): assert BuildJobStatus.objects.count() == 0 notebook = NotebookJobFactory(project=self.project) build_job = BuildJob.create(user=notebook.user, project=notebook.project, config=notebook.specification.build, code_reference=self.code_reference) assert build_job.last_status == JobLifeCycle.CREATED assert BuildJobStatus.objects.count() == 1
def setUp(self): self.job = NotebookJobFactory(project=ProjectFactory()) auditor.validate() auditor.setup() tracker.validate() tracker.setup() activitylogs.validate() activitylogs.setup() super(AuditorJobTest, self).setUp()
def test_redirects_to_proxy_protected_url(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) deployment_name = JOB_NAME.format(job_uuid=notebook.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url response = self.auth_client.get(self._get_url(project)) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', notebook_scheduler.get_notebook_token(notebook)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def test_project_deletion_cascade_to_notebook_job(self): assert NotebookJob.objects.count() == 0 project = ProjectFactory() NotebookJobFactory(project=project) assert NotebookJob.objects.count() == 1 with patch('scheduler.tensorboard_scheduler.stop_tensorboard') as _: # noqa with patch('scheduler.notebook_scheduler.stop_notebook') as _: # noqa project.delete() assert NotebookJob.objects.count() == 0
def test_status_update_results_in_new_updated_at_datetime_notebook(self): project = ProjectFactory() job = NotebookJobFactory(project=project) updated_at = job.updated_at # Create new status NotebookJobStatus.objects.create(job=job, status=JobLifeCycle.BUILDING) job.refresh_from_db() assert updated_at < job.updated_at updated_at = job.updated_at # Create status Using set_status job.set_status(JobLifeCycle.RUNNING) job.refresh_from_db() assert updated_at < job.updated_at
def test_archive(self): project = ProjectFactory() notebook_job = NotebookJobFactory(project=project) tensorboard_job = TensorboardJobFactory(project=project) assert notebook_job.deleted is False assert tensorboard_job.deleted is False assert NotebookJob.objects.count() == 1 assert TensorboardJob.all.count() == 1 notebook_job.archive() tensorboard_job.archive() assert notebook_job.deleted is True assert tensorboard_job.deleted is True assert NotebookJob.objects.count() == 0 assert TensorboardJob.objects.count() == 0 assert NotebookJob.all.count() == 1 assert TensorboardJob.all.count() == 1 notebook_job.restore() tensorboard_job.restore() assert notebook_job.deleted is False assert tensorboard_job.deleted is False assert NotebookJob.objects.count() == 1 assert TensorboardJob.objects.count() == 1 assert NotebookJob.all.count() == 1 assert TensorboardJob.all.count() == 1
def test_archive_schedules_deletion(self, xp_group_scheduler_mock, xp_scheduler_mock, job_scheduler_mock, build_scheduler_mock, notebook_scheduler_mock, tensorboard_scheduler_mock): for _ in range(2): JobFactory(project=self.object) BuildJobFactory(project=self.object) TensorboardJobFactory(project=self.object) NotebookJobFactory(project=self.object) self.object.experiment_groups.first().set_status(ExperimentGroupLifeCycle.RUNNING) self.object.experiments.first().set_status(ExperimentLifeCycle.RUNNING) self.object.jobs.first().set_status(JobLifeCycle.RUNNING) self.object.build_jobs.first().set_status(JobLifeCycle.RUNNING) self.object.notebook_jobs.first().set_status(JobLifeCycle.RUNNING) self.object.tensorboard_jobs.first().set_status(JobLifeCycle.RUNNING) assert self.queryset.count() == 1 assert ExperimentGroup.objects.count() == 2 assert Experiment.objects.count() == 2 assert Job.objects.count() == 2 assert BuildJob.objects.count() == 2 assert NotebookJob.objects.count() == 2 assert TensorboardJob.objects.count() == 2 resp = self.auth_client.post(self.url + 'archive/') assert xp_group_scheduler_mock.call_count == 2 assert xp_scheduler_mock.call_count == 1 assert job_scheduler_mock.call_count == 1 assert build_scheduler_mock.call_count == 1 assert notebook_scheduler_mock.call_count == 1 assert tensorboard_scheduler_mock.call_count == 1 assert resp.status_code == status.HTTP_200_OK assert self.queryset.count() == 0 assert Project.all.filter(user=self.object.user).count() == 1 assert ExperimentGroup.objects.count() == 0 assert ExperimentGroup.all.count() == 2 assert Experiment.objects.count() == 0 assert Experiment.all.count() == 2 assert Job.objects.count() == 0 assert Job.all.count() == 2 assert BuildJob.objects.count() == 0 assert BuildJob.all.count() == 2 assert TensorboardJob.objects.count() == 0 assert TensorboardJob.all.count() == 2 assert NotebookJob.objects.count() == 0 assert NotebookJob.all.count() == 2
def test_cannot_upload_if_project_has_a_running_notebook(self): user = self.auth_client.user repo_name = self.project.name # Update project with has_notebook True notebook = NotebookJobFactory(project=self.project) notebook.set_status(status=JobLifeCycle.RUNNING) assert self.model_class.objects.count() == 0 uploaded_file = self.get_upload_file() with patch('api.repos.tasks.handle_new_files.apply_async') as mock_task: response = self.auth_client.put(self.url, data={'repo': uploaded_file}, content_type=MULTIPART_CONTENT) assert response.status_code == status.HTTP_403_FORBIDDEN file_path = '{}/{}/{}.tar.gz'.format(settings.UPLOAD_ROOT, user.username, repo_name) self.assertFalse(os.path.exists(file_path)) assert mock_task.call_count == 0 # No new repo was not created and still exists assert self.model_class.objects.count() == 0 repo_path = '{}/{}/{}/{}'.format(settings.REPOS_ROOT, user.username, repo_name, repo_name) self.assertFalse(os.path.exists(repo_path))
def test_archive(self): project = ProjectFactory() ExperimentGroupFactory(project=project) ExperimentFactory(project=project) JobFactory(project=project) BuildJobFactory(project=project) NotebookJobFactory(project=project) TensorboardJobFactory(project=project) assert project.deleted is False assert project.experiments.count() == 1 assert project.experiment_groups.count() == 1 assert project.jobs.count() == 1 assert project.build_jobs.count() == 1 assert project.notebook_jobs.count() == 1 assert project.tensorboard_jobs.count() == 1 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1 project.archive() assert project.deleted is True assert project.experiments.count() == 0 assert project.experiment_groups.count() == 0 assert project.jobs.count() == 0 assert project.build_jobs.count() == 0 assert project.notebook_jobs.count() == 0 assert project.tensorboard_jobs.count() == 0 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1 project.unarchive() assert project.deleted is False assert project.experiments.count() == 1 assert project.experiment_groups.count() == 1 assert project.jobs.count() == 1 assert project.build_jobs.count() == 1 assert project.notebook_jobs.count() == 1 assert project.tensorboard_jobs.count() == 1 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1
def setUp(self): super().setUp() self.notebook = NotebookJobFactory(project=ProjectFactory()) self.tested_events = { notebook_events.NOTEBOOK_STARTED, notebook_events.NOTEBOOK_STARTED_TRIGGERED, notebook_events.NOTEBOOK_STOPPED, notebook_events.NOTEBOOK_STOPPED_TRIGGERED, notebook_events.NOTEBOOK_CLEANED_TRIGGERED, notebook_events.NOTEBOOK_VIEWED, notebook_events.NOTEBOOK_NEW_STATUS, notebook_events.NOTEBOOK_FAILED, notebook_events.NOTEBOOK_SUCCEEDED, notebook_events.NOTEBOOK_STATUSES_VIEWED, notebook_events.NOTEBOOK_UPDATED, notebook_events.NOTEBOOK_DELETED, notebook_events.NOTEBOOK_DELETED_TRIGGERED, notebook_events.NOTEBOOK_ARCHIVED, notebook_events.NOTEBOOK_RESTORED, notebook_events.NOTEBOOK_UNBOOKMARKED, notebook_events.NOTEBOOK_BOOKMARKED, }
def test_creation_with_bad_config(self): with self.assertRaises(ValidationError): NotebookJobFactory(content='foo') with self.assertRaises(ValidationError): TensorboardJobFactory(content='foo')
def get_job_object(self, job_state): project_uuid = job_state.details.labels.project_uuid.hex return ProjectFactory(uuid=project_uuid, notebook=NotebookJobFactory()).notebook
class TestBuildJobStatuses(BaseTest): def setUp(self): super().setUp() self.project = ProjectFactory() self.build_job = BuildJobFactory(project=self.project) self.notebook = NotebookJobFactory(project=self.project, build_job=self.build_job) self.tensorboard = TensorboardJobFactory(project=self.project, build_job=self.build_job) self.job = JobFactory(project=self.project, build_job=self.build_job) self.experiment = ExperimentFactory(project=self.project, build_job=self.build_job) def test_build_job_failed_sets_dependency_to_failed(self): assert self.build_job.last_status != JobLifeCycle.FAILED assert self.notebook.last_status != JobLifeCycle.FAILED assert self.tensorboard.last_status != JobLifeCycle.FAILED assert self.job.last_status != JobLifeCycle.FAILED assert self.experiment.last_status != ExperimentLifeCycle.FAILED self.build_job.set_status(JobLifeCycle.FAILED) assert self.build_job.last_status == JobLifeCycle.FAILED self.notebook.refresh_from_db() assert self.notebook.last_status == JobLifeCycle.FAILED self.tensorboard.refresh_from_db() assert self.tensorboard.last_status == JobLifeCycle.FAILED self.job.refresh_from_db() assert self.job.last_status == JobLifeCycle.FAILED self.experiment.refresh_from_db() assert self.experiment.last_status == ExperimentLifeCycle.FAILED def test_build_job_stopped_sets_dependency_to_stopped(self): assert self.build_job.last_status != JobLifeCycle.STOPPED assert self.notebook.last_status != JobLifeCycle.STOPPED assert self.tensorboard.last_status != JobLifeCycle.STOPPED assert self.job.last_status != JobLifeCycle.STOPPED assert self.experiment.last_status != ExperimentLifeCycle.STOPPED self.build_job.set_status(JobLifeCycle.STOPPED) assert self.build_job.last_status == JobLifeCycle.STOPPED self.notebook.refresh_from_db() assert self.notebook.last_status == JobLifeCycle.STOPPED self.tensorboard.refresh_from_db() assert self.tensorboard.last_status == JobLifeCycle.STOPPED self.job.refresh_from_db() assert self.job.last_status == JobLifeCycle.STOPPED self.experiment.refresh_from_db() assert self.experiment.last_status == ExperimentLifeCycle.STOPPED def test_build_job_succeeded_starts_dependency(self): assert self.build_job.last_status != JobLifeCycle.SUCCEEDED assert self.notebook.last_status != JobLifeCycle.SUCCEEDED assert self.tensorboard.last_status != JobLifeCycle.SUCCEEDED assert self.job.last_status != JobLifeCycle.SUCCEEDED assert self.experiment.last_status != ExperimentLifeCycle.SUCCEEDED with patch('scheduler.notebook_scheduler.start_notebook' ) as mock_notebook: with patch('scheduler.tensorboard_scheduler.start_tensorboard' ) as mock_tensorboard: with patch('scheduler.experiment_scheduler.start_experiment' ) as mock_experiment: with patch( 'scheduler.job_scheduler.start_job') as mock_job: self.build_job.set_status(JobLifeCycle.SUCCEEDED) assert self.build_job.last_status == JobLifeCycle.SUCCEEDED assert mock_notebook.call_count == 1 assert mock_tensorboard.call_count == 1 assert mock_experiment.call_count == 1 assert mock_job.call_count == 1
def setUp(self): self.notebook = NotebookJobFactory(project=ProjectFactory()) super().setUp()
def get_job_object(self, job_state): project_uuid = job_state.details.labels.project_uuid.hex project = ProjectFactory(uuid=project_uuid) return NotebookJobFactory(project=project)