def test_does_not_return_an_expired_job(self): """ Tests that when getting a job by id, it is not returned and it is deleted if has is expired """ with self.flask_app.app_context(): job_type = 'SIMILARITY' params = { 'search_type': 'SIMILARITY', 'structure': '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1', 'threshold': '70' } input_files_hashes = { 'input1': 'hash1-hash1-hash1-hash1-hash1-hash1-hash1', 'input2': 'hash2-hash2-hash2-hash2-hash2-hash2-hash2', } docker_image_url = 'some_url' job = delayed_job_models.get_or_create(job_type, params, docker_image_url, input_files_hashes) expired_time = datetime.datetime.utcnow() - datetime.timedelta(days=1) job.expires_at = expired_time id_got = job.id with self.assertRaises(delayed_job_models.JobNotFoundError, msg='The job should have not been returned!'): delayed_job_models.get_job_by_id(id_got) job_got = delayed_job_models.DelayedJob.query.filter_by(id=id_got).first() self.assertIsNone(job_got, msg='The job must have been deleted!')
def check_if_job_exists(job_id): """ Checks if the job exists, if it exists, does nothing, if not, raises a JobNotFoundError :param job_id: id of the job to check """ try: delayed_job_models.get_job_by_id(job_id, force_refresh=True) except delayed_job_models.JobNotFoundError: raise JobNotFoundError(f'Job with id {job_id} not found')
def test_parses_the_output_of_bjobs_when_no_jobs_were_found(self): """ Generates mock jobs, then sends a mock output to the the function to test that it interpreted the output accordingly """ self.create_test_jobs_0() sample_output = self.load_sample_file('app/job_status_daemon/test/data/sample_lsf_output_0.txt') with self.flask_app.app_context(): daemon.parse_bjobs_output(sample_output) # No status should have changed for status_must_be in [delayed_job_models.JobStatuses.CREATED, delayed_job_models.JobStatuses.QUEUED, delayed_job_models.JobStatuses.RUNNING, delayed_job_models.JobStatuses.FINISHED, delayed_job_models.JobStatuses.ERROR]: lsf_config = RUN_CONFIG.get('lsf_submission') lsf_host = lsf_config['lsf_host'] for assigned_host in [lsf_host, 'another_host']: id_to_check = f'Job-{assigned_host}-{status_must_be}' job = delayed_job_models.get_job_by_id(id_to_check) status_got = job.status self.assertEqual(status_got, status_must_be, msg='The status was modified! This should have not modified the status')
def test_update_job_status(self): """ Tests that a job can update its status """ job_type = 'SIMILARITY' params = { 'search_type': 'SIMILARITY', 'structure': '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1', 'threshold': '70' } docker_image_url = 'some url' with self.flask_app.app_context(): job_must_be = delayed_job_models.get_or_create( job_type, params, docker_image_url) job_id = job_must_be.id new_data = { 'progress': 50, 'status_log': 'Loading database', 'status_description': '{"msg":"Smiles file loaded"}' } token = token_generator.generate_job_token(job_id) headers = {'X-Job-Key': token} client = self.client response = client.patch(f'/status/{job_id}', data=new_data, headers=headers) self.assertEqual(response.status_code, 200, msg='The request should have not failed') job_got = delayed_job_models.get_job_by_id(job_id) # be sure to have a fresh version of the object DB.session.rollback() DB.session.expire(job_got) DB.session.refresh(job_got) progress_got = job_got.progress self.assertEqual(progress_got, new_data['progress'], msg=f'The progress was not updated correctly!') status_log_got = job_got.status_log self.assertIsNotNone(status_log_got, msg=f'The status log was not set correctly!') self.assertNotEqual( status_log_got, new_data['status_log'], msg=f'It seems that the status log was not saved' f'correctly. It should be accumulative') status_description_got = job_got.status_description self.assertEqual( status_description_got, new_data['status_description'], msg=f'The status description was not updated correctly!')
def get_job_status(job_id, server_base_url='http://0.0.0.0:5000'): """ Returns a dict representation of the job with the id given as parameter :param job_id: the id of the job for which the status is required :param server_base_url: url to use as base for building the output files urls :return: a dict with the public properties of a job. """ try: job = delayed_job_models.get_job_by_id(job_id, force_refresh=True) return job.public_dict(server_base_url) except delayed_job_models.JobNotFoundError: raise JobNotFoundError()
def update_job_progress(job_id, progress, status_log, status_description): """ Updates the status of the job wit the id given as parameter. :param job_id: job_id of the job to modify :param progress: the progress percentage of the job :param status_log: a message to append to the public job status log :return: a dict with the public properties of a job. """ try: delayed_job_models.update_job_progress(job_id, progress, status_log, status_description) job = delayed_job_models.get_job_by_id(job_id) return job.public_dict() except delayed_job_models.JobNotFoundError: raise JobNotFoundError()
def delete_all_outputs_of_job(job_id): """ Deletes all the outputs of the job of the job given as parameter. The job must be in FINISHED state :param job_id: id of the job for which the outputs will be deleted. :return: """ try: job = delayed_job_models.get_job_by_id(job_id) output_dir_path = job.output_dir_path for item in os.listdir(output_dir_path): item_path = str(Path(output_dir_path).joinpath(item)) try: shutil.rmtree(item_path) except NotADirectoryError: os.remove(item_path) return f'All outputs of the job {job_id} were deleted!' except delayed_job_models.JobNotFoundError: raise abort(404)
def test_job_with_custom_config_can_be_submitted(self): """ Test that a job with a custom config can be submitted """ with self.flask_app.app_context(): # remember that if generate_default_config: True, it generates the default config in the delayed job models job_type = 'DOWNLOAD' docker_image_url = 'some_url' input_files_desc, input_files_hashes, params = self.prepare_mock_job_args( ) submission_result = job_submission_service.submit_job( job_type, input_files_desc, input_files_hashes, docker_image_url, params) job_id = submission_result.get('job_id') job_run_dir_must_be = os.path.join( job_submission_service.JOBS_RUN_DIR, job_id) params_file_must_be = os.path.join( job_run_dir_must_be, job_submission_service.RUN_PARAMS_FILENAME) params_file = open(params_file_must_be, 'r') params_got = yaml.load(params_file, Loader=yaml.FullLoader) job_got = delayed_job_models.get_job_by_id(job_id) configs_must_be = delayed_job_models.get_custom_config_values( job_got.type) custom_config_got = params_got.get('custom_job_config') for config in configs_must_be: key = config.key value_must_be = config.value value_got = custom_config_got.get(key) self.assertEqual(value_got, value_must_be, msg=f'{key} was not set up correctly!')
def test_job_can_be_submitted(self): """ Test that a job can be submitted """ with self.flask_app.app_context(): job_type = 'TEST' docker_image_url = 'some_url' input_files_desc, input_files_hashes, params = self.prepare_mock_job_args( ) print('TEST INPUT ') print(input_files_desc) submission_result = job_submission_service.submit_job( job_type, input_files_desc, input_files_hashes, docker_image_url, params) job_id = submission_result.get('job_id') job_data = delayed_job_models.get_job_by_id(job_id).public_dict() # ----------------------------------------------- # Test Run Dir # ----------------------------------------------- job_run_dir_must_be = os.path.join( job_submission_service.JOBS_RUN_DIR, job_id) self.assertTrue( os.path.isdir(job_run_dir_must_be), msg= f'The run dir for the job ({job_run_dir_must_be}) has not been created!' ) input_files_dir_must_be = os.path.join( job_run_dir_must_be, job_submission_service.INPUT_FILES_DIR_NAME) self.assertTrue( os.path.isdir(input_files_dir_must_be), msg= f'The input files dir for the job ({input_files_dir_must_be}) has not been created!' ) # ----------------------------------------------- # Test Run Params # ----------------------------------------------- params_file_must_be = os.path.join( job_run_dir_must_be, job_submission_service.RUN_PARAMS_FILENAME) self.assertTrue( os.path.isfile(params_file_must_be), msg= f'The run params file for the job ({params_file_must_be}) has not been created!' ) params_file = open(params_file_must_be, 'r') params_got = yaml.load(params_file, Loader=yaml.FullLoader) params_file.close() token_must_be = token_generator.generate_job_token(job_id) token_got = params_got.get('job_token') self.assertEqual(token_must_be, token_got, msg='The token was not generated correctly') job_id_must_be = job_id job_id_got = params_got.get('job_id') self.assertEqual(job_id_must_be, job_id_got, msg='The job id was not generated correctly') status_update_url_must_be = f'http://0.0.0.0:5000/status/{job_id}' status_update_url_got = params_got.get( 'status_update_endpoint').get('url') self.assertEqual( status_update_url_must_be, status_update_url_got, msg='The status update url was not set correctly!') status_update_method_must_be = 'PATCH' status_update_method_got = params_got.get( 'status_update_endpoint').get('method') self.assertEqual( status_update_method_must_be, status_update_method_got, msg='The status update method was not set correctly!') job_params_got = params_got.get('job_params') raw_job_params_must_be = job_data.get('raw_params') self.assertEqual(json.dumps(job_params_got, sort_keys=True), raw_job_params_must_be, msg='The job params were not set correctly') custom_statistics_url_must_be = f'http://0.0.0.0:5000' \ f'/custom_statistics/submit_statistics/{job_type.lower()}_job/{job_id}' custom_statistics_url_got = params_got.get( 'custom_statistics_endpoint', {}).get('url') self.assertEqual( custom_statistics_url_must_be, custom_statistics_url_got, msg='The status update method was not set correctly!') custom_statistics_method_must_be = 'POST' custom_statistics_method_got = params_got.get( 'custom_statistics_endpoint', {}).get('method') self.assertEqual( custom_statistics_method_must_be, custom_statistics_method_got, msg='The custom statistics method was not set correctly!') # ----------------------------------------------- # Test Input Files # ----------------------------------------------- job_input_files_desc_got = params_got.get('inputs') for key, tmp_path in input_files_desc.items(): run_path_must_be = job_input_files_desc_got[key] self.assertTrue( os.path.isfile(run_path_must_be), msg= f'The input file for the job ({run_path_must_be}) has not been created!' ) job_got = delayed_job_models.get_job_by_id(job_id) input_files_got = job_got.input_files num_inputs_files_must_be = len(os.listdir(input_files_dir_must_be)) self.assertEquals( num_inputs_files_must_be, len(input_files_got), msg='The input files were not registered correctly!') for input_file in input_files_got: internal_path_got = input_file.internal_path self.assertTrue( os.path.isfile(internal_path_got), msg= f'The internal path of an input file {internal_path_got} ' f'seems that does not exist!') # ----------------------------------------------- # Test Output Directory # ----------------------------------------------- output_dir_must_be = Path( job_submission_service.JOBS_OUTPUT_DIR).joinpath(job_id) output_dir_got = params_got.get('output_dir') self.assertEqual(str(output_dir_got), str(output_dir_must_be), msg='The job output dir was not set correctly') self.assertTrue( os.path.isdir(output_dir_must_be), msg= f'The output dir for the job ({output_dir_must_be}) has not been created!' ) # ----------------------------------------------- # Submission script file # ----------------------------------------------- submission_script_file_must_be = \ os.path.join(job_run_dir_must_be, job_submission_service.SUBMISSION_FILE_NAME) self.assertTrue( os.path.isfile(submission_script_file_must_be), msg= f'The script file for submitting the job ({submission_script_file_must_be}) ' f'has not been created!') self.assertTrue( os.access(submission_script_file_must_be, os.X_OK), msg= f'The script file for the job ({submission_script_file_must_be}) is not executable!' )