def test_scheduler_create_build_job_of_already_done_job(self): """Check the case when the job is already done and we need to create a new job.""" config = {'image': 'busybox:tag'} build_job = BuildJobFactory(project=self.project, user=self.project.user, code_reference=self.code_reference, config=BuildSpecification.create_specification(config)) build_job.set_status(JobLifeCycle.STOPPED) assert BuildJob.objects.count() == 1 with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start: with patch('scheduler.dockerizer_scheduler.check_image') as mock_check: mock_start.return_value = True mock_check.return_value = False build_job, image_exists, build_status = dockerizer_scheduler.create_build_job( user=self.project.user, project=self.project, config=config, code_reference=self.code_reference ) assert mock_start.call_count == 1 assert mock_check.call_count == 1 assert image_exists is False assert build_status is True assert BuildJob.objects.count() == 2
def test_scheduler_create_build_job_of_already_done_job(self): """Check the case when the job is already done and we need to create a new job.""" config = {'image': 'busybox:tag'} build_job = BuildJobFactory(project=self.project, user=self.project.user, code_reference=self.code_reference, config=BuildSpecification.create_specification(config)) build_job.set_status(JobLifeCycle.STOPPED) assert BuildJob.objects.count() == 1 with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start: with patch('scheduler.dockerizer_scheduler.check_image') as mock_check: mock_start.return_value = True mock_check.return_value = False build_job, image_exists, build_status = dockerizer_scheduler.create_build_job( user=self.project.user, project=self.project, config=config, code_reference=self.code_reference ) assert mock_start.call_count == 1 assert mock_check.call_count == 1 assert image_exists is False assert build_status is True assert BuildJob.objects.count() == 2
def test_status_update_results_in_new_updated_at_datetime(self): job = BuildJobFactory() updated_at = job.updated_at # Create new status BuildJobStatus.objects.create(job=job, status=JobLifeCycle.BUILDING) job.refresh_from_db() assert updated_at < job.updated_at updated_at = job.updated_at # Create status Using set_status job.set_status(JobLifeCycle.RUNNING) job.refresh_from_db() assert updated_at < job.updated_at
def test_archive_triggers_stopping_of_build_jobs(self): assert self.queryset.count() == 1 for _ in range(2): job = BuildJobFactory(project=self.object) job.set_status(JobLifeCycle.SCHEDULED) assert BuildJob.objects.count() == 2 with patch('scheduler.tasks.build_jobs.build_jobs_stop.apply_async') as job_mock_stop: resp = self.auth_client.post(self.url + 'archive/') assert job_mock_stop.call_count == 2 assert resp.status_code == status.HTTP_200_OK assert self.queryset.count() == 0 assert BuildJob.objects.count() == 0 assert BuildJob.all.count() == 2
def test_scheduler_create_build_job_of_already_running_job(self): """Check the case when the job is already running and we just set the requesting service to running.""" config = {'image': 'busybox:tag'} build_job = BuildJobFactory(project=self.project, user=self.project.user, code_reference=self.code_reference, config=BuildSpecification.create_specification(config)) build_job.set_status(JobLifeCycle.RUNNING) assert BuildJob.objects.count() == 1 with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start: build_job, image_exists, build_status = dockerizer_scheduler.create_build_job( user=self.project.user, project=self.project, config=config, code_reference=self.code_reference ) assert mock_start.call_count == 0 assert image_exists is False assert build_status is True assert BuildJob.objects.count() == 1
def test_delete_triggers_stopping_of_build_jobs(self): assert self.queryset.count() == 1 for _ in range(2): job = BuildJobFactory(project=self.object) job.set_status(JobLifeCycle.SCHEDULED) assert BuildJob.objects.count() == 2 with patch('scheduler.tasks.build_jobs.build_jobs_stop.apply_async' ) as job_mock_stop: resp = self.auth_client.delete(self.url) assert job_mock_stop.call_count == 2 assert resp.status_code == status.HTTP_204_NO_CONTENT assert self.queryset.count() == 0 assert ExperimentGroup.objects.count() == 0 assert Experiment.objects.count() == 0 # Delete does not work for other project public and private resp = self.auth_client.delete(self.url_other) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN) resp = self.auth_client.delete(self.url_private) assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
class TestBuildLogsViewV1(BaseViewTest): num_log_lines = 10 HAS_AUTH = True def setUp(self): super().setUp() project = ProjectFactory(user=self.auth_client.user) self.logs = [] self.job = BuildJobFactory(project=project) self.url = '/{}/{}/{}/builds/{}/logs'.format( API_V1, project.user.username, project.name, self.job.id) def create_logs(self, temp): log_path = stores.get_job_logs_path(job_name=self.job.unique_name, temp=temp) stores.create_job_logs_path(job_name=self.job.unique_name, temp=temp) fake = Faker() self.logs = [] for _ in range(self.num_log_lines): self.logs.append(fake.sentence()) with open(log_path, 'w') as file: for line in self.logs: file.write(line) file.write('\n') def test_get_done_job(self): self.job.set_status(JobLifeCycle.SUCCEEDED) self.assertTrue(self.job.is_done) # No logs resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check the it does not return temp file self.create_logs(temp=True) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check returns the correct file self.create_logs(temp=False) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_200_OK data = [i for i in resp._iterator] # pylint:disable=protected-access data = [d for d in data[0].decode('utf-8').split('\n') if d] assert len(data) == len(self.logs) assert data == self.logs @patch('api.build_jobs.views.process_logs') def test_get_non_done_job(self, _): self.assertFalse(self.job.is_done) # No logs resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check the it does not return non temp file self.create_logs(temp=False) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_404_NOT_FOUND # Check returns the correct file self.create_logs(temp=True) resp = self.auth_client.get(self.url) assert resp.status_code == status.HTTP_200_OK data = [i for i in resp._iterator] # pylint:disable=protected-access data = [d for d in data[0].decode('utf-8').split('\n') if d] assert len(data) == len(self.logs) assert data == self.logs
class TestBuildJobStatuses(BaseTest): def setUp(self): super().setUp() self.project = ProjectFactory() self.build_job = BuildJobFactory(project=self.project) self.notebook = NotebookJobFactory(project=self.project, build_job=self.build_job) self.tensorboard = TensorboardJobFactory(project=self.project, build_job=self.build_job) self.job = JobFactory(project=self.project, build_job=self.build_job) self.experiment = ExperimentFactory(project=self.project, build_job=self.build_job) def test_build_job_failed_sets_dependency_to_failed(self): assert self.build_job.last_status != JobLifeCycle.FAILED assert self.notebook.last_status != JobLifeCycle.FAILED assert self.tensorboard.last_status != JobLifeCycle.FAILED assert self.job.last_status != JobLifeCycle.FAILED assert self.experiment.last_status != ExperimentLifeCycle.FAILED self.build_job.set_status(JobLifeCycle.FAILED) assert self.build_job.last_status == JobLifeCycle.FAILED self.notebook.refresh_from_db() assert self.notebook.last_status == JobLifeCycle.FAILED self.tensorboard.refresh_from_db() assert self.tensorboard.last_status == JobLifeCycle.FAILED self.job.refresh_from_db() assert self.job.last_status == JobLifeCycle.FAILED self.experiment.refresh_from_db() assert self.experiment.last_status == ExperimentLifeCycle.FAILED def test_build_job_stopped_sets_dependency_to_stopped(self): assert self.build_job.last_status != JobLifeCycle.STOPPED assert self.notebook.last_status != JobLifeCycle.STOPPED assert self.tensorboard.last_status != JobLifeCycle.STOPPED assert self.job.last_status != JobLifeCycle.STOPPED assert self.experiment.last_status != ExperimentLifeCycle.STOPPED self.build_job.set_status(JobLifeCycle.STOPPED) assert self.build_job.last_status == JobLifeCycle.STOPPED self.notebook.refresh_from_db() assert self.notebook.last_status == JobLifeCycle.STOPPED self.tensorboard.refresh_from_db() assert self.tensorboard.last_status == JobLifeCycle.STOPPED self.job.refresh_from_db() assert self.job.last_status == JobLifeCycle.STOPPED self.experiment.refresh_from_db() assert self.experiment.last_status == ExperimentLifeCycle.STOPPED def test_build_job_succeeded_starts_dependency(self): assert self.build_job.last_status != JobLifeCycle.SUCCEEDED assert self.notebook.last_status != JobLifeCycle.SUCCEEDED assert self.tensorboard.last_status != JobLifeCycle.SUCCEEDED assert self.job.last_status != JobLifeCycle.SUCCEEDED assert self.experiment.last_status != ExperimentLifeCycle.SUCCEEDED with patch('scheduler.notebook_scheduler.start_notebook' ) as mock_notebook: with patch('scheduler.tensorboard_scheduler.start_tensorboard' ) as mock_tensorboard: with patch('scheduler.experiment_scheduler.start_experiment' ) as mock_experiment: with patch( 'scheduler.job_scheduler.start_job') as mock_job: self.build_job.set_status(JobLifeCycle.SUCCEEDED) assert self.build_job.last_status == JobLifeCycle.SUCCEEDED assert mock_notebook.call_count == 1 assert mock_tensorboard.call_count == 1 assert mock_experiment.call_count == 1 assert mock_job.call_count == 1