def test_cannot_upload_if_project_has_a_running_notebook_with_code_mount(self): user = self.auth_client.user repo_name = self.project.name # Update project with has_notebook True notebook = NotebookJobFactory(project=self.project) notebook.set_status(status=JobLifeCycle.RUNNING) assert self.model_class.objects.count() == 0 uploaded_file = self.get_upload_file() with patch('api.repos.views.handle_new_files') as mock_task: response = self.auth_client.put(self.url, data={'repo': uploaded_file}, content_type=MULTIPART_CONTENT) assert response.status_code == status.HTTP_403_FORBIDDEN file_path = '{}/{}/{}.tar.gz'.format( conf.get('UPLOAD_MOUNT_PATH'), user.username, repo_name) self.assertFalse(os.path.exists(file_path)) assert mock_task.call_count == 0 # No new repo was not created and still exists assert self.model_class.objects.count() == 0 repo_path = '{}/{}/{}/{}'.format( conf.get('REPOS_MOUNT_PATH'), user.username, repo_name, repo_name) self.assertFalse(os.path.exists(repo_path))
def test_archive_triggers_stopping_of_plugin_jobs(self): assert self.queryset.count() == 1 notebook_job = NotebookJobFactory(project=self.object) notebook_job.set_status(JobLifeCycle.SCHEDULED) tensorboard_job = TensorboardJobFactory(project=self.object) tensorboard_job.set_status(JobLifeCycle.SCHEDULED) assert NotebookJob.objects.count() == 1 assert TensorboardJob.objects.count() == 1 with patch('scheduler.tasks.notebooks.' 'projects_notebook_stop.apply_async') as notebook_mock_stop: with patch( 'scheduler.tasks.tensorboards.' 'tensorboards_stop.apply_async') as tensorboard_mock_stop: resp = self.auth_client.post(self.url + 'archive/') assert notebook_mock_stop.call_count == 1 assert tensorboard_mock_stop.call_count == 1 assert resp.status_code == status.HTTP_200_OK assert self.queryset.count() == 0 assert TensorboardJob.objects.count() == 0 assert NotebookJob.objects.count() == 0 assert TensorboardJob.all.count() == 1 assert NotebookJob.all.count() == 1
def test_delete_triggers_stopping_of_plugin_jobs(self): assert self.queryset.count() == 1 notebook_job = NotebookJobFactory(project=self.object) notebook_job.set_status(JobLifeCycle.SCHEDULED) tensorboard_job = TensorboardJobFactory(project=self.object) tensorboard_job.set_status(JobLifeCycle.SCHEDULED) assert NotebookJob.objects.count() == 1 assert TensorboardJob.objects.count() == 1 with patch('scheduler.tasks.notebooks.' 'projects_notebook_stop.apply_async') as notebook_mock_stop: with patch( 'scheduler.tasks.tensorboards.' 'tensorboards_stop.apply_async') as tensorboard_mock_stop: resp = self.auth_client.delete(self.url) assert notebook_mock_stop.call_count == 1 assert tensorboard_mock_stop.call_count == 1 assert resp.status_code == status.HTTP_204_NO_CONTENT assert self.queryset.count() == 0 assert ExperimentGroup.objects.count() == 0 assert Experiment.objects.count() == 0 # Delete does not work for other project public and private resp = self.auth_client.delete(self.url_other) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN) resp = self.auth_client.delete(self.url_private) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def test_redirects_to_proxy_protected_url_with_extra_path(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) deployment_name = JOB_NAME.format( job_uuid=notebook.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url # To `tree?` response = self.auth_client.get(self._get_url(project, 'tree?')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', notebook_scheduler.get_notebook_token(notebook) ) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url) # To static files response = self.auth_client.get( self._get_url(project, 'static/components/something?v=4.7.0')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}&token={}'.format( service_url, 'static/components/something?v=4.7.0', notebook_scheduler.get_notebook_token(notebook) ) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def test_redirects_to_proxy_protected_url_with_extra_path( self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) deployment_name = DEPLOYMENT_NAME.format(project_uuid=project.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url # To `tree?` response = self.auth_client.get(self._get_url(project, 'tree?')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', notebook_scheduler.get_notebook_token(project)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url) # To static files response = self.auth_client.get( self._get_url(project, 'static/components/something?v=4.7.0')) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}&token={}'.format( service_url, 'static/components/something?v=4.7.0', notebook_scheduler.get_notebook_token(project)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def setUp(self): super().setUp() self.object = self.factory_class(user=self.auth_client.user) tensorboard = NotebookJobFactory(project=self.object) tensorboard.set_status(status=JobLifeCycle.RUNNING) RepoFactory(project=self.object) self.url = '/{}/{}/{}/notebook/stop'.format(API_V1, self.object.user.username, self.object.name) self.queryset = self.model_class.objects.all()
def test_project_requests_notebook_url(self): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) with patch('scheduler.notebook_scheduler.get_notebook_url') as mock_url_fct: with patch('scheduler.notebook_scheduler.get_notebook_token') as mock_token_fct: response = self.auth_client.get(self._get_url(project)) assert mock_url_fct.call_count == 1 assert mock_token_fct.call_count == 1 assert response.status_code == 200
def setUp(self): super().setUp() self.object = self.factory_class(user=self.auth_client.user) tensorboard = NotebookJobFactory(project=self.object) tensorboard.set_status(status=JobLifeCycle.RUNNING) RepoFactory(project=self.object) self.url = '/{}/{}/{}/notebook/stop'.format( API_V1, self.object.user.username, self.object.name) self.queryset = self.model_class.objects.all()
def test_project_requests_notebook_url(self): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) with patch('scheduler.notebook_scheduler.get_notebook_url') as mock_url_fct: with patch('scheduler.notebook_scheduler.get_notebook_token') as mock_token_fct: response = self.auth_client.get(self._get_url(project)) assert mock_url_fct.call_count == 1 assert mock_token_fct.call_count == 1 assert response.status_code == 200
def test_status_update_results_in_new_updated_at_datetime_notebook(self): project = ProjectFactory() job = NotebookJobFactory(project=project) updated_at = job.updated_at # Create new status NotebookJobStatus.objects.create(job=job, status=JobLifeCycle.BUILDING) job.refresh_from_db() assert updated_at < job.updated_at updated_at = job.updated_at # Create status Using set_status job.set_status(JobLifeCycle.RUNNING) job.refresh_from_db() assert updated_at < job.updated_at
def test_redirects_to_proxy_protected_url(self, spawner_mock): project = ProjectFactory(user=self.auth_client.user) notebook = NotebookJobFactory(project=project) notebook.set_status(status=JobLifeCycle.RUNNING) deployment_name = JOB_NAME.format(job_uuid=notebook.uuid.hex, name=self.plugin_app) service_url = self._get_service_url(deployment_name=deployment_name) mock_instance = spawner_mock.return_value mock_instance.get_notebook_url.return_value = service_url response = self.auth_client.get(self._get_url(project)) assert response.status_code == 200 self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) proxy_url = '{}/{}?token={}'.format( service_url, 'tree', notebook_scheduler.get_notebook_token(notebook)) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], proxy_url)
def test_cannot_upload_if_project_has_a_running_notebook(self): user = self.auth_client.user repo_name = self.project.name # Update project with has_notebook True notebook = NotebookJobFactory(project=self.project) notebook.set_status(status=JobLifeCycle.RUNNING) assert self.model_class.objects.count() == 0 uploaded_file = self.get_upload_file() with patch('api.repos.tasks.handle_new_files.apply_async') as mock_task: response = self.auth_client.put(self.url, data={'repo': uploaded_file}, content_type=MULTIPART_CONTENT) assert response.status_code == status.HTTP_403_FORBIDDEN file_path = '{}/{}/{}.tar.gz'.format(settings.UPLOAD_ROOT, user.username, repo_name) self.assertFalse(os.path.exists(file_path)) assert mock_task.call_count == 0 # No new repo was not created and still exists assert self.model_class.objects.count() == 0 repo_path = '{}/{}/{}/{}'.format(settings.REPOS_ROOT, user.username, repo_name, repo_name) self.assertFalse(os.path.exists(repo_path))