def create_test_jobs_1(self): """ This will create: - 2 Jobs in error state, each running in a different lsf cluster - 2 Jobs in finished state, each running in a different lsf cluster """ run_environment = RUN_CONFIG.get('run_env') lsf_config = RUN_CONFIG.get('lsf_submission') lsf_host = lsf_config['lsf_host'] with self.flask_app.app_context(): i = 0 for status in [delayed_job_models.JobStatuses.FINISHED, delayed_job_models.JobStatuses.ERROR]: for assigned_host in [lsf_host, 'another_host']: job = delayed_job_models.DelayedJob( id=f'Job-{assigned_host}-{status}', type='TEST', lsf_job_id=i, status=status, lsf_host=assigned_host, run_environment=run_environment, created_at=datetime.utcnow(), started_at=datetime.utcnow() + timedelta(seconds=1), finished_at=datetime.utcnow() + timedelta(seconds=2) ) job.output_dir_path = job_submission_service.get_job_output_dir_path(job) os.makedirs(job.output_dir_path, exist_ok=True) delayed_job_models.save_job(job) i += 1
def test_calculates_correctly_the_number_of_input_files(self): """ test that calculates correctly the number of input files """ job = delayed_job_models.DelayedJob( id=f'Job-Finished', type='TEST' ) tmp_dir = Path('tmp').joinpath(f'{random.randint(1, 1000000)}') os.makedirs(tmp_dir, exist_ok=True) input_files_desc = { 'input1': str(Path.joinpath(tmp_dir, 'input1.txt')), 'input2': str(Path.joinpath(tmp_dir, 'input2.txt')) } for key, path in input_files_desc.items(): with open(path, 'w') as input_file: input_file.write(f'This is input file {key}') job_input_file = delayed_job_models.InputFile( internal_path=str(path), ) job.input_files.append(job_input_file) num_input_files_must_be = len(input_files_desc) num_input_files_got = statistics_generator.get_num_input_files_of_job(job) self.assertEqual(num_input_files_got, num_input_files_must_be, msg='The number of input files was not calculated correctly') shutil.rmtree(tmp_dir)
def test_calculates_correctly_the_number_of_output_files(self): """ test that calculates correctly the number of output files """ job = delayed_job_models.DelayedJob( id=f'Job-Finished', type='TEST' ) tmp_dir = Path('tmp').joinpath(f'{random.randint(1, 1000000)}') os.makedirs(tmp_dir, exist_ok=True) num_output_files_created = 0 for subdir in ['', 'subdir/']: out_file_name = f'output_{num_output_files_created}.txt' out_file_path = f'{tmp_dir}/{subdir}{out_file_name}' os.makedirs(Path(out_file_path).parent, exist_ok=True) with open(out_file_path, 'wt') as out_file: out_file.write(f'This is output file {num_output_files_created}') job_output_file = delayed_job_models.OutputFile( internal_path=out_file_path ) job.output_files.append(job_output_file) num_output_files_created += 1 num_output_files_must_be = num_output_files_created num_output_files_got = statistics_generator.get_num_output_files_of_job(job) self.assertEqual(num_output_files_must_be, num_output_files_got, msg='The number of output files was not calculated correctly') shutil.rmtree(tmp_dir)
def test_calculates_time_from_running_to_finished(self): """ test that it calculates the time from running to finished """ current_time = datetime.now() started_at = current_time seconds_must_be = 1.5 finished_at = started_at + timedelta(seconds=seconds_must_be) job = delayed_job_models.DelayedJob( id=f'Job-Finished', type='TEST', started_at=started_at, finished_at=finished_at ) seconds_got = statistics_generator.get_seconds_from_running_to_finished(job) self.assertEqual(seconds_got, seconds_must_be, msg='The seconds from created to queued were not calculated correctly!')