def test_gets_one_job(db): job_factory(workspace="the-workspace", action="the-action", state=State.SUCCEEDED) job = only(calculate_workspace_state("the-workspace")) assert job.action == "the-action" assert job.state == State.SUCCEEDED
def test_gets_a_job_for_each_action(db): job_factory(workspace="the-workspace", action="action1") job_factory(workspace="the-workspace", action="action2") jobs = calculate_workspace_state("the-workspace") assert len(jobs) == 2 for action in ["action1", "action2"]: assert action in [job.action for job in jobs]
def test_ignores_cancelled_jobs_when_calculating_dependencies(db): job_factory( id="1", action="other-action", state=State.SUCCEEDED, created_at=1000, outputs={"output-from-completed-run": "highly_sensitive_output"}, ) job_factory( id="2", action="other-action", state=State.SUCCEEDED, created_at=2000, cancelled=True, outputs={"output-from-cancelled-run": "highly_sensitive_output"}, ) api = RecordingExecutor(JobStatus(ExecutorState.UNKNOWN), JobStatus(ExecutorState.PREPARING)) run.handle_job( job_factory(id="3", requires_outputs_from=["other-action"], state=State.PENDING), api, ) assert api.job.inputs == ["output-from-completed-run"]
def test_handle_job_finalized_success_with_delete(db): api = StubExecutorAPI() # insert previous outputs job_factory( state=State.SUCCEEDED, outputs={"output/old.csv": "medium"}, ) job = api.add_test_job(ExecutorState.FINALIZED, State.RUNNING) api.set_job_result(job, outputs={"output/file.csv": "medium"}) run.handle_job(job, api) # executor state assert job.id in api.tracker["cleanup"] # its been cleaned up and is now unknown assert api.get_status(job).state == ExecutorState.UNKNOWN # our state assert job.state == State.SUCCEEDED assert job.status_message == "Completed successfully" assert job.outputs == {"output/file.csv": "medium"} assert api.deleted["workspace"][Privacy.MEDIUM] == ["output/old.csv"] assert api.deleted["workspace"][Privacy.HIGH] == ["output/old.csv"]
def test_returns_correct_data(self, client): job_factory() response = client.get(url_for('jobs.jobs')) assert response.json == [{ 'id': 1, 'name': 'Test job', 'prefix': '/test-job' }]
def test_gets_the_latest_job_for_an_action(db): job_factory( workspace="the-workspace", action="the-action", created_at=1000, state=State.FAILED, ) job_factory( workspace="the-workspace", action="the-action", created_at=2000, state=State.SUCCEEDED, ) job = only(calculate_workspace_state("the-workspace")) assert job.state == State.SUCCEEDED
def test_ignores_cancelled_jobs(db): job_factory( workspace="the-workspace", action="the-action", created_at=1000, state=State.FAILED, ) job_factory( workspace="the-workspace", action="the-action", created_at=2000, state=State.SUCCEEDED, cancelled=True, ) job = only(calculate_workspace_state("the-workspace")) assert job.state == State.FAILED
def files(self): job = job_factory() return [ file_factory(job_id=job.id), file_factory(job_id=job.id), file_factory(job_id=job.id), file_factory(job_id=job.id), ]
def test_get(self, client): job = job_factory() file = file_factory(job_id=job.id) response = client.get(url_for('files.files_get', job_id=job.id)) assert response.status_code == 200 assert response.json == [{ 'annotations': [], 'bucket_path': '/test-job/my-foto.png', 'created_at': '2019-03-01T00:00:00+02:00', 'id': file.id, 'job_id': job.id, }]
def test_file_get(self, client, mock_bucket): job = job_factory() file = file_factory(job_id=job.id) response = client.get(url_for('files.file_get', file_id=file.id)) assert response.status_code == 200 assert response.json == { 'annotations': [], 'bucket_path': '/test-job/my-foto.png', 'created_at': '2019-03-01T00:00:00+02:00', 'id': 1, 'signed_url': 'mock-signed-url', 'job_id': 1 }
def test_get_obsolete_files_nothing_to_delete(db): outputs = { "high.txt": "high_privacy", "medium.txt": "medium_privacy", } job = job_factory( state=State.SUCCEEDED, outputs=outputs, ) definition = run.job_to_job_definition(job) obsolete = run.get_obsolete_files(definition, outputs) assert obsolete == []
def test_get_obsolete_files_case_change(db): old_outputs = { "high.txt": "high_privacy", } new_outputs = { "HIGH.txt": "high_privacy", } job = job_factory( state=State.SUCCEEDED, outputs=old_outputs, ) definition = run.job_to_job_definition(job) obsolete = run.get_obsolete_files(definition, new_outputs) assert obsolete == []
def test_get_obsolete_files_things_to_delete(db): old_outputs = { "old_high.txt": "high_privacy", "old_medium.txt": "medium_privacy", "current.txt": "high_privacy", } new_outputs = { "new_high.txt": "high_privacy", "new_medium.txt": "medium_privacy", "current.txt": "high_privacy", } job = job_factory( state=State.SUCCEEDED, outputs=old_outputs, ) definition = run.job_to_job_definition(job) obsolete = run.get_obsolete_files(definition, new_outputs) assert obsolete == ["old_high.txt", "old_medium.txt"]
def test_returns_200(self, client): job_factory() response = client.get(url_for('jobs.jobs')) assert response.status_code == 200
def job(self): return job_factory()
def test_does_not_return_categories_from_other_job(self, client, categories, job): category_factory(job_id=job_factory().id) response = client.get(url_for('categories.categories', job_id=job.id)) assert len(response.json) == 2
def test_doesnt_include_dummy_error_jobs(db): job_factory(workspace="the-workspace", action="__error__") jobs = calculate_workspace_state("the-workspace") assert not jobs
def test_job_definition_limits(db): job = job_factory() definition = run.job_to_job_definition(job) assert definition.cpu_count == 2 assert definition.memory_limit == "4G"
def file_annotation(): job = job_factory() file = file_factory(job_id=job.id) return file_annotation_factory(file_id=file.id)