def test_multiple_job_runs(self, db_session: Session, job_status: JobStatus) -> None: """Try to run the same job multiple times.""" job_service = get_configured_job_service(db_session, airflow=False) user = get_random_user() job_id = add_job(job_service, user=user) dag_handler = DagHandler() job_service.airflow.check_dag_status.return_value = (job_status, datetime.now()) results = [ job_service.process(user=user, job_id=job_id) for _ in range(5) ] assert all( [res == { 'code': 202, 'status': 'success' } for res in results]) assert isfile( dag_handler.get_dag_path_from_id( dag_handler.get_preparation_dag_id(job_id=job_id))) # there should always only be one job run assert len([ d for d in listdir(settings.JOB_FOLDER) if isdir(join(settings.JOB_FOLDER, d)) ]) == 1
def test_update_status_parallel(self, db_session: Session, dag_status_prep: Tuple[Optional[JobStatus], Optional[datetime]], dag_status_parallel: Tuple[ Optional[JobStatus], Optional[datetime]], ref_job_status: JobStatus) -> None: """Test the update_status method. Different combinations for the two dags are provided and tested. """ def check_dag_status_mock( dag_id: str) -> Tuple[Optional[JobStatus], Optional[datetime]]: """Return the dag_status depending on the dag_id.""" if dag_id.endswith("prep"): return dag_status_prep if dag_id.endswith("parallel"): return dag_status_parallel return None, None job_service = get_configured_job_service(db_session, airflow=False) job_service.airflow.check_dag_status.side_effect = check_dag_status_mock user = get_random_user() job_id = add_job(job_service, user=user) assert db_session.query(Job).filter_by( id=job_id).first().status == JobStatus.created job_service._update_job_status(job_id=job_id) assert db_session.query(Job).filter_by( id=job_id).first().status == ref_job_status
def test_multiple_job_runs_exception(self, db_session: Session, job_status: JobStatus) -> None: """Test exception if running/queued job is triggered again.""" job_service = get_configured_job_service(db_session, airflow=False) user = get_random_user() job_id = add_job(job_service, user=user) dag_handler = DagHandler() job_service.airflow.check_dag_status.return_value = (JobStatus.created, datetime.now()) result1 = job_service.process(user=user, job_id=job_id) job_service.airflow.check_dag_status.return_value = (job_status, datetime.now()) result2 = job_service.process(user=user, job_id=job_id) assert result1 == {'code': 202, 'status': 'success'} assert result2 == get_cannot_start_processing(user["id"], job_id, str(job_status)) assert isfile( dag_handler.get_dag_path_from_id( dag_handler.get_preparation_dag_id(job_id=job_id))) # one job run should be present assert len([ d for d in listdir(settings.JOB_FOLDER) if isdir(join(settings.JOB_FOLDER, d)) ]) == 1
def test_basic(self, db_session: Session) -> None: """Check creating a basic job works as expected.""" job_service = get_configured_job_service(db_session, files=False) user = get_random_user() job_data = load_json('pg') # here: Job is added to database but dag file is not created (mocked away, as it happens in a separate pkg) result = job_service.create(user=user, **job_data) assert result['status'] == 'success' assert result['code'] == 201 assert result['headers']['Location'].startswith('jobs/jb-') assert result['headers']['OpenEO-Identifier'].startswith('jb-') results_job_id = result['headers']['OpenEO-Identifier'] assert results_job_id == result['headers']['Location'][5:] assert db_session.query(Job).filter(Job.user_id == user["id"]).filter( Job.id == results_job_id).count() == 1 dag_handler = DagHandler() assert not isfile( dag_handler.get_dag_path_from_id( dag_handler.get_preparation_dag_id( job_id=result['headers']['OpenEO-Identifier']))) job_service.processes_service.put_user_defined.assert_called_once_with( user=user, process_graph_id="pg_id", **job_data["process"])
def test_get_all_jobs(self, db_session: Session) -> None: """Check getting all jobs works as expected.""" job_service = get_configured_job_service(db_session) user = get_random_user() job_id = add_job(job_service, user=user) job_id_update = add_job(job_service, user=user, json_name='job_update_pg') # Get jobs result = job_service.get_all(user=user) assert result['status'] == 'success' # Datetime changes for each test -> cannot be compared to fixed value assert datetime.strptime(result['data']['jobs'][0].pop('created'), '%Y-%m-%dT%H:%M:%SZ') assert datetime.strptime(result['data']['jobs'][1].pop('created'), '%Y-%m-%dT%H:%M:%SZ') assert result == { 'code': 200, 'data': { 'jobs': [{ 'id': job_id, 'status': 'created', 'budget': 1624.78, 'description': 'some description', 'plan': 'plan', 'title': 'evi_job_old', }, { 'id': job_id_update, 'status': 'created' }], 'links': [], }, 'status': 'success', }
def test_job_active_error(self, db_session: Session, job_status: JobStatus) -> None: """Test the correct error is thrown if the user tries to modify an active job.""" job_service = get_configured_job_service(db_session, airflow=False) job_service.airflow.check_dag_status.return_value = (job_status, datetime.now()) user = get_random_user() job_id = add_job(job_service, user=user) job_args = { 'title': 'New title', } result = job_service.modify(user=user, job_id=job_id, **job_args) assert result == get_job_locked_exception(user_id=user["id"], job_id=job_id, job_status=str(job_status))
def test_default_estimates(self, db_session: Session) -> None: """Check the default estimation works as expected.""" job_service = get_configured_job_service(db_session) user = get_random_user() result = job_service.estimate(user=user, job_id='test_job') assert result == { "status": "success", "code": 200, "data": { "costs": 0, }, }
def test_result_exceptions(self, db_session: Session, job_status: JobStatus, exception_func: Callable) -> None: """Check the correct exceptions are returned depending on the current job status.""" job_service = get_configured_job_service(db_session, airflow=False) user = get_random_user() job_id = add_job(job_service, user=user) job_service.airflow.check_dag_status.return_value = (job_status, datetime.now()) result = job_service.get_results(user=user, job_id=job_id, api_spec=self.api_spec) assert result == exception_func(user_id=user["id"], job_id=job_id)
def test_get_all_empty(self, db_session: Session) -> None: """Check getting all jobs when no jobs exist.""" job_service = get_configured_job_service(db_session) user = get_random_user() result = job_service.get_all(user=user) assert result == { "status": "success", "code": 200, "data": { "jobs": [], "links": [], } }
def test_get_results(self, db_session: Session) -> None: """Check getting results for a basic job works as expected.""" job_service = get_configured_job_service(db_session, airflow=False) job_service.airflow.check_dag_status.return_value = ( JobStatus.finished, datetime.now()) user = get_random_user() job_id = add_job(job_service, user=user) result = job_service.get_results(user=user, job_id=job_id, api_spec=self.api_spec) assert result["status"] == "success" assets = result["data"].pop("assets") assert assets["sample-output.tif"]["href"].endswith( "result/sample-output.tif") assert result == { "status": "success", "code": 200, "headers": { "Expires": "not given", "OpenEO-Costs": 0 }, "data": { 'bbox': [1, 2, 3, 4], 'description': 'some description', 'geometry': { 'coordinates': [[[12, 34], [24, 89]]], 'type': 'Polygon' }, 'id': job_id, 'properties': { 'datetime': '2020-02-20T16:05:21Z' }, 'stac_version': '0.9.0', 'status': 'finished', 'title': 'evi_job_old', 'type': 'Feature', 'links': [{ 'rel': 'self', 'href': 'https://openeo.eodc.eu/v1.0/collections' }] } }
def test_delete_basic(self, db_session: Session) -> None: """Check deleting a basic job works as expected.""" job_service = get_configured_job_service(db_session) user = get_random_user() job_id = add_job(job_service, user=user) assert db_session.query(Job).filter_by(user_id=user["id"]).filter_by( id=job_id).count() == 1 result = job_service.delete(user=user, job_id=job_id) assert result == {"status": "success", "code": 204} # Check everything is deleted which should be deleted job_service.files_service.delete_complete_job.assert_called_once_with( user_id=user["id"], job_id=job_id) for dag_id in self.dag_handler.get_all_dag_ids(job_id=job_id): job_service.airflow.delete_dag.assert_any_call(dag_id=dag_id) assert not isfile( self.dag_handler.get_dag_path_from_id(dag_id=dag_id)) assert db_session.query(Job).filter_by(user_id=user["id"]).filter_by( id=job_id).count() == 0
def test_start_processing_job(self, db_session: Session) -> None: """Test start processing a created job.""" job_service = get_configured_job_service(db_session) user = get_random_user() job_id = add_job(job_service, user=user) dag_handler = DagHandler() result = job_service.process(user=user, job_id=job_id) assert result == {'code': 202, 'status': 'success'} assert isfile( dag_handler.get_dag_path_from_id( dag_handler.get_preparation_dag_id(job_id=job_id))) # one job run should be present assert len([ d for d in listdir(settings.JOB_FOLDER) if isdir(join(settings.JOB_FOLDER, d)) ]) == 1 job_service.processes_service.get_all_predefined.assert_called_once() dag_id = dag_handler.get_preparation_dag_id(job_id=job_id) job_service.airflow.trigger_dag.assert_called_once_with(dag_id=dag_id)
def test_start_processing_sync_job(self, db_session: Session) -> None: """Check process_sync works as expected.""" job_service = get_configured_job_service(db_session) user = get_random_user() job_data = load_json('pg') _ = job_data.pop("title") _ = job_data.pop("description") result = job_service.process_sync(user=user, **job_data) assert result['status'] == 'success' assert 'result/sample-output.tif' in result['file'] _ = result.pop('file') assert result == { 'code': 200, 'status': 'success', 'headers': { 'Content-Type': 'image/tiff', 'OpenEO-Costs': 0 } }
def test_modify_job_pg(self, db_session: Session) -> None: """Test modification of a job's process graph.""" job_service = get_configured_job_service(db_session) user = get_random_user() job_id = add_job(job_service, user=user) job_args: dict = {'process': {'process_graph': {}}} result = job_service.modify(user=user, job_id=job_id, **job_args) assert result == {'code': 204, 'status': 'success'} result = job_service.get(user=user, job_id=job_id) assert result['status'] == 'success' assert datetime.strptime(result['data'].pop('created'), '%Y-%m-%dT%H:%M:%SZ') assert result['data'].pop('id').startswith('jb-') result['data'].pop('process') # is returned from MockedProcessesService > always returns PG_OLD_REF assert result == {'code': 200, 'data': {'budget': 1624.78, 'description': 'some description', 'plan': 'plan', 'status': 'created', 'title': 'evi_job_old'}, 'status': 'success'}
def test_modify_job(self, db_session: Session) -> None: """Test modification of simple job attributes.""" job_service = get_configured_job_service(db_session) user = get_random_user() job_id = add_job(job_service, user=user) job_args = { 'title': 'New title', 'description': 'New description', 'plan': 'new plan', 'budget': 1.28, } result = job_service.modify(user=user, job_id=job_id, **job_args) assert result == {'code': 204, 'status': 'success'} job_args.update({'status': 'created'}) result = job_service.get(user=user, job_id=job_id) assert result['status'] == 'success' assert datetime.strptime(result['data'].pop('created'), '%Y-%m-%dT%H:%M:%SZ') assert result['data'].pop('id').startswith('jb-') assert result['data'].pop('process') == PG_OLD_REF['data'] assert result == {'code': 200, 'data': job_args, 'status': 'success'}
def test_add_and_get(self, db_session: Session) -> None: """Check getting details about a job works as expected.""" job_service = get_configured_job_service(db_session) user = get_random_user() job_id = add_job(job_service, user=user) result = job_service.get(user=user, job_id=job_id) assert result['status'] == 'success' # Datetime changes for each test -> cannot be compared to fixed value assert datetime.strptime(result['data'].pop('created'), '%Y-%m-%dT%H:%M:%SZ') assert result['data'].pop('process') == PG_OLD_REF['data'] assert result == { 'code': 200, 'data': { 'id': job_id, 'budget': 1624.78, 'description': 'some description', 'plan': 'plan', 'status': 'created', 'title': 'evi_job_old' }, 'status': 'success' }