def test_delete_job_triggers_job_stop_mock(self, delete_path): job = JobFactory() job.set_status(JobLifeCycle.SCHEDULED) assert delete_path.call_count == 2 # outputs + logs with patch('scheduler.job_scheduler.stop_job') as mock_fct: job.delete() assert delete_path.call_count == 2 + 2 # outputs + logs assert mock_fct.call_count == 1
def setUp(self): super().setUp() with patch.object(Job, 'set_status') as _: with patch('scheduler.tasks.jobs.jobs_build.apply_async' ) as _: # noqa project = ProjectFactory(user=self.auth_client.user) self.job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/statuses/'.format( API_V1, project.user.username, project.name, self.job.id) self.objects = [ self.factory_class(job=self.job, status=JobLifeCycle.CHOICES[i][0]) for i in range(self.num_objects) ] self.queryset = self.model_class.objects.all()
def test_archive_schedules_deletion(self, xp_group_scheduler_mock, xp_scheduler_mock, job_scheduler_mock, build_scheduler_mock, notebook_scheduler_mock, tensorboard_scheduler_mock): for _ in range(2): JobFactory(project=self.object) BuildJobFactory(project=self.object) TensorboardJobFactory(project=self.object) NotebookJobFactory(project=self.object) self.object.experiment_groups.first().set_status( ExperimentGroupLifeCycle.RUNNING) self.object.experiments.first().set_status(ExperimentLifeCycle.RUNNING) self.object.jobs.first().set_status(JobLifeCycle.RUNNING) self.object.build_jobs.first().set_status(JobLifeCycle.RUNNING) self.object.notebook_jobs.first().set_status(JobLifeCycle.RUNNING) self.object.tensorboard_jobs.first().set_status(JobLifeCycle.RUNNING) assert self.queryset.count() == 1 assert ExperimentGroup.objects.count() == 2 assert Experiment.objects.count() == 2 assert Job.objects.count() == 2 assert BuildJob.objects.count() == 2 assert NotebookJob.objects.count() == 2 assert TensorboardJob.objects.count() == 2 resp = self.auth_client.post(self.url + 'archive/') assert xp_group_scheduler_mock.call_count == 2 assert xp_scheduler_mock.call_count == 1 assert job_scheduler_mock.call_count == 1 assert build_scheduler_mock.call_count == 1 assert notebook_scheduler_mock.call_count == 1 assert tensorboard_scheduler_mock.call_count == 1 assert resp.status_code == status.HTTP_200_OK assert self.queryset.count() == 0 assert Project.all.filter(user=self.object.user).count() == 1 assert ExperimentGroup.objects.count() == 0 assert ExperimentGroup.all.count() == 2 assert Experiment.objects.count() == 0 assert Experiment.all.count() == 2 assert Job.objects.count() == 0 assert Job.all.count() == 2 assert BuildJob.objects.count() == 0 assert BuildJob.all.count() == 2 assert TensorboardJob.objects.count() == 0 assert TensorboardJob.all.count() == 2 assert NotebookJob.objects.count() == 0 assert NotebookJob.all.count() == 2
def test_delete_triggers_stopping_of_jobs(self): assert self.queryset.count() == 1 for _ in range(2): job = JobFactory(project=self.object) job.set_status(JobLifeCycle.SCHEDULED) assert Job.objects.count() == 2 with patch( 'scheduler.tasks.jobs.jobs_stop.apply_async') as job_mock_stop: resp = self.auth_client.delete(self.url) assert job_mock_stop.call_count == 2 assert resp.status_code == status.HTTP_204_NO_CONTENT assert self.queryset.count() == 0 assert ExperimentGroup.objects.count() == 0 assert Experiment.objects.count() == 0 # Delete does not work for other project public and private resp = self.auth_client.delete(self.url_other) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN) resp = self.auth_client.delete(self.url_private) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
def set_objects(self): self.user = self.auth_client.user self.project = ProjectFactory(user=self.user) activitylogs.record(event_type=PROJECT_DELETED_TRIGGERED, instance=self.project, actor_id=self.user.id) self.experiment = ExperimentFactory(project=self.project) activitylogs.record(event_type=EXPERIMENT_DELETED_TRIGGERED, instance=self.experiment, actor_id=self.user.id) self.job = JobFactory(project=self.project) activitylogs.record(event_type=JOB_VIEWED, instance=self.job, actor_id=self.user.id)
def test_create_job_with_resources_spec(self, spawner_mock): config = JobSpecification.read(job_spec_resources_content) mock_instance = spawner_mock.return_value mock_instance.start_job.return_value = {'pod': 'pod_content'} mock_instance.spec = config with patch('scheduler.dockerizer_scheduler.create_build_job' ) as mock_start: build = BuildJobFactory() BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED, job=build) mock_start.return_value = build, True, True job = JobFactory(config=config.parsed_data) assert mock_start.call_count == 1 assert JobStatus.objects.filter(job=job).count() == 2 assert list( JobStatus.objects.filter(job=job).values_list( 'status', flat=True)) == [JobLifeCycle.CREATED, JobLifeCycle.SCHEDULED] job.refresh_from_db() assert job.last_status == JobLifeCycle.SCHEDULED
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/outputs/files'.format( API_V1, project.user.username, project.name, job.id) outputs_path = get_job_outputs_path( persistence_outputs=job.persistence_outputs, job_name=job.unique_name) create_job_outputs_path(persistence_outputs=job.persistence_outputs, job_name=job.unique_name) self.create_paths(path=outputs_path, url=self.url)
def setUp(self): super().setUp() self.user = UserFactory() activitylogs.validate() activitylogs.setup() self.project = ProjectFactory() activitylogs.record(event_type=PROJECT_DELETED_TRIGGERED, instance=self.project, actor_id=self.user.id) self.experiment = ExperimentFactory() activitylogs.record(event_type=EXPERIMENT_DELETED_TRIGGERED, instance=self.experiment, actor_id=self.user.id) self.job = JobFactory() activitylogs.record(event_type=JOB_VIEWED, instance=self.job, actor_id=self.user.id)
def test_archive(self): project = ProjectFactory() ExperimentGroupFactory(project=project) ExperimentFactory(project=project) JobFactory(project=project) BuildJobFactory(project=project) NotebookJobFactory(project=project) TensorboardJobFactory(project=project) assert project.deleted is False assert project.experiments.count() == 1 assert project.experiment_groups.count() == 1 assert project.jobs.count() == 1 assert project.build_jobs.count() == 1 assert project.notebook_jobs.count() == 1 assert project.tensorboard_jobs.count() == 1 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1 project.archive() assert project.deleted is True assert project.experiments.count() == 0 assert project.experiment_groups.count() == 0 assert project.jobs.count() == 0 assert project.build_jobs.count() == 0 assert project.notebook_jobs.count() == 0 assert project.tensorboard_jobs.count() == 0 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1 project.unarchive() assert project.deleted is False assert project.experiments.count() == 1 assert project.experiment_groups.count() == 1 assert project.jobs.count() == 1 assert project.build_jobs.count() == 1 assert project.notebook_jobs.count() == 1 assert project.tensorboard_jobs.count() == 1 assert project.all_experiments.count() == 1 assert project.all_experiment_groups.count() == 1 assert project.all_notebook_jobs.count() == 1 assert project.all_tensorboard_jobs.count() == 1
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/logs'.format(API_V1, project.user.username, project.name, job.id) log_path = get_job_logs_path(job.unique_name) create_job_logs_path(job_name=job.unique_name) fake = Faker() self.logs = [] for _ in range(self.num_log_lines): self.logs.append(fake.sentence()) with open(log_path, 'w') as file: for line in self.logs: file.write(line) file.write('\n')
def test_archive(self): job = JobFactory() assert job.deleted is False assert Job.objects.count() == 1 assert Job.all.count() == 1 job.archive() assert job.deleted is True assert Job.objects.count() == 0 assert Job.all.count() == 1 job.restore() assert job.deleted is False assert Job.objects.count() == 1 assert Job.all.count() == 1
def set_objects(self): self.user = self.auth_client.user self.project = ProjectFactory() activitylogs.record(event_type=PROJECT_VIEWED, instance=self.project, actor_id=self.user.id, actor_name=self.user.username) self.experiment = ExperimentFactory() activitylogs.record(event_type=EXPERIMENT_VIEWED, instance=self.experiment, actor_id=self.user.id, actor_name=self.user.username) self.job = JobFactory() activitylogs.record(event_type=JOB_CREATED, instance=self.job, actor_id=self.user.id, actor_name=self.user.username) activitylogs.record(event_type=JOB_VIEWED, instance=self.job, actor_id=self.user.id, actor_name=self.user.username)
def test_patch(self): new_description = 'updated_xp_name' data = {'description': new_description} assert self.object.description != data['description'] resp = self.auth_client.patch(self.url, data=data) assert resp.status_code == status.HTTP_200_OK new_object = self.model_class.objects.get(id=self.object.id) assert new_object.user == self.object.user assert new_object.description != self.object.description assert new_object.description == new_description # Update original job assert new_object.is_clone is False new_job = JobFactory() data = {'original_job': new_job.id} resp = self.auth_client.patch(self.url, data=data) assert resp.status_code == status.HTTP_200_OK new_object = self.model_class.objects.get(id=self.object.id) assert new_object.user == self.object.user assert new_object.description == new_description assert new_object.is_clone is True assert new_object.original_job == new_job
class TestJobLogsViewV1(BaseViewTest): num_log_lines = 10 HAS_AUTH = True def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.job = JobFactory(project=project) self.logs = [] self.url = '/{}/{}/{}/jobs/{}/logs'.format(API_V1, project.user.username, project.name, self.job.id) self.stream_url = '/{}/{}/{}/jobs/{}/logs/stream'.format( API_V1, project.user.username, project.name, self.job.id) self.ws_url = '/{}/{}/{}/jobs/{}/logs'.format(WS_V1, project.user.username, project.name, self.job.id) def create_logs(self, temp): log_path = stores.get_job_logs_path(job_name=self.job.unique_name, temp=temp) stores.create_job_logs_path(job_name=self.job.unique_name, temp=temp) fake = Faker() self.logs = [] for _ in range(self.num_log_lines): self.logs.append(fake.sentence()) with open(log_path, 'w') as file: for line in self.logs: file.write(line) file.write('\n') def test_get_done_job(self): self.job.set_status(JobLifeCycle.SUCCEEDED) self.assertTrue(self.job.is_done) # No logs resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check the it does not return temp file self.create_logs(temp=True) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check returns the correct file self.create_logs(temp=False) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_200_OK data = [i for i in resp._iterator] # pylint:disable=protected-access data = [d for d in data[0].decode('utf-8').split('\n') if d] assert len(data) == len(self.logs) assert data == self.logs @patch('api.jobs.views.process_logs') def test_get_non_done_job(self, _): self.assertFalse(self.job.is_done) # No logs resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check the it does not return non temp file self.create_logs(temp=False) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check returns the correct file self.create_logs(temp=True) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_200_OK data = [i for i in resp._iterator] # pylint:disable=protected-access data = [d for d in data[0].decode('utf-8').split('\n') if d] assert len(data) == len(self.logs) assert data == self.logs def test_stream_redirects_to_internal_service(self): response = self.auth_client.get(self.stream_url) self.assertEqual(response.status_code, 200) self.assertTrue(ProtectedView.NGINX_REDIRECT_HEADER in response) self.assertEqual(response[ProtectedView.NGINX_REDIRECT_HEADER], self.ws_url)
def setUp(self): super().setUp() self.job = JobFactory(project=ProjectFactory())
def test_job_creation_triggers_status_creation(self): job = JobFactory() assert JobStatus.objects.filter(job=job).count() == 1 assert job.last_status == JobLifeCycle.CREATED
def test_creation_with_bad_config(self): with self.assertRaises(ValidationError): JobFactory(content='foo')
def get_instance(): with patch('scheduler.tasks.jobs.jobs_build.apply_async') as _: # noqa return JobFactory()
def get_job_object(self, job_state): job_uuid = job_state.details.labels.job_uuid.hex with patch('scheduler.tasks.jobs.jobs_build.apply_async') as _: # noqa return JobFactory(uuid=job_uuid)
def test_create_job(self): job = JobFactory() assert isinstance(job.specification, JobSpecification)
def test_job_creation_triggers_status_creation(self): with patch('scheduler.tasks.jobs.jobs_build.apply_async') as _: # noqa job = JobFactory() assert JobStatus.objects.filter(job=job).count() == 1 assert job.last_status == JobLifeCycle.CREATED
def create_job(self, config): config = JobSpecification.read(config) return JobFactory(config=config.parsed_data, project=self.project)
def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.job = JobFactory(project=project) self.url = '/{}/{}/{}/jobs/{}/_reconcile'.format( API_V1, project.user.username, project.name, self.job.id)
def test_job_creation_triggers_status_creation_mock(self): with patch.object(Job, 'set_status') as mock_fct: JobFactory() assert mock_fct.call_count == 1
class TestBuildJobStatuses(BaseTest): def setUp(self): super().setUp() self.project = ProjectFactory() self.build_job = BuildJobFactory(project=self.project) self.notebook = NotebookJobFactory(project=self.project, build_job=self.build_job) self.tensorboard = TensorboardJobFactory(project=self.project, build_job=self.build_job) self.job = JobFactory(project=self.project, build_job=self.build_job) self.experiment = ExperimentFactory(project=self.project, build_job=self.build_job) def test_build_job_failed_sets_dependency_to_failed(self): assert self.build_job.last_status != JobLifeCycle.FAILED assert self.notebook.last_status != JobLifeCycle.FAILED assert self.tensorboard.last_status != JobLifeCycle.FAILED assert self.job.last_status != JobLifeCycle.FAILED assert self.experiment.last_status != ExperimentLifeCycle.FAILED self.build_job.set_status(JobLifeCycle.FAILED) assert self.build_job.last_status == JobLifeCycle.FAILED self.notebook.refresh_from_db() assert self.notebook.last_status == JobLifeCycle.FAILED self.tensorboard.refresh_from_db() assert self.tensorboard.last_status == JobLifeCycle.FAILED self.job.refresh_from_db() assert self.job.last_status == JobLifeCycle.FAILED self.experiment.refresh_from_db() assert self.experiment.last_status == ExperimentLifeCycle.FAILED def test_build_job_stopped_sets_dependency_to_stopped(self): assert self.build_job.last_status != JobLifeCycle.STOPPED assert self.notebook.last_status != JobLifeCycle.STOPPED assert self.tensorboard.last_status != JobLifeCycle.STOPPED assert self.job.last_status != JobLifeCycle.STOPPED assert self.experiment.last_status != ExperimentLifeCycle.STOPPED self.build_job.set_status(JobLifeCycle.STOPPED) assert self.build_job.last_status == JobLifeCycle.STOPPED self.notebook.refresh_from_db() assert self.notebook.last_status == JobLifeCycle.STOPPED self.tensorboard.refresh_from_db() assert self.tensorboard.last_status == JobLifeCycle.STOPPED self.job.refresh_from_db() assert self.job.last_status == JobLifeCycle.STOPPED self.experiment.refresh_from_db() assert self.experiment.last_status == ExperimentLifeCycle.STOPPED def test_build_job_succeeded_starts_dependency(self): assert self.build_job.last_status != JobLifeCycle.SUCCEEDED assert self.notebook.last_status != JobLifeCycle.SUCCEEDED assert self.tensorboard.last_status != JobLifeCycle.SUCCEEDED assert self.job.last_status != JobLifeCycle.SUCCEEDED assert self.experiment.last_status != ExperimentLifeCycle.SUCCEEDED with patch('scheduler.notebook_scheduler.start_notebook' ) as mock_notebook: with patch('scheduler.tensorboard_scheduler.start_tensorboard' ) as mock_tensorboard: with patch('scheduler.experiment_scheduler.start_experiment' ) as mock_experiment: with patch( 'scheduler.job_scheduler.start_job') as mock_job: self.build_job.set_status(JobLifeCycle.SUCCEEDED) assert self.build_job.last_status == JobLifeCycle.SUCCEEDED assert mock_notebook.call_count == 1 assert mock_tensorboard.call_count == 1 assert mock_experiment.call_count == 1 assert mock_job.call_count == 1
def test_job_created_status_triggers_scheduling(self): with patch('scheduler.tasks.jobs.jobs_build.apply_async') as mock_fct: job = JobFactory() assert mock_fct.call_count == 1 assert job.last_status == JobLifeCycle.CREATED
def get_job_object(self, job_state): job_uuid = job_state.details.labels.job_uuid.hex return JobFactory(uuid=job_uuid)