Exemplo n.º 1
0
    def test_a_job_is_created_only_once(self):
        """
        Tests that there can only exist one job instance with given a set of parameters.
        """

        with self.flask_app.app_context():
            job_type = 'SIMILARITY'
            params = {
                'search_type': 'SIMILARITY',
                'structure': '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
                'threshold': '70'
            }
            docker_image_url = 'some_url'

            # Create a job and set status as finished
            job_0 = delayed_job_models.get_or_create(job_type, params, docker_image_url)
            status_must_be = delayed_job_models.JobStatuses.FINISHED
            job_0.status = status_must_be

            # Create a job with exactly the same params
            job_1 = delayed_job_models.get_or_create(job_type, params, docker_image_url)

            # they must be the same
            self.assertEqual(job_0.id, job_1.id, msg='A job with the same params was created twice!')
            self.assertEqual(job_1.status, status_must_be, msg='A job with the same params was created twice!')
    def simulate_finished_job_of_a_type(self, job_type):
        """
        Creates a database a job that is finished. It will be of the type given as parameter.
        There will be some randomness in the parameters to generate some random ids
        :param type: type that you want the job to be
        """

        params = {
            'structure':
            ''.join(random.choice(string.ascii_lowercase) for i in range(10)),
        }
        docker_image_url = 'some_url'

        job = delayed_job_models.get_or_create(job_type, params,
                                               docker_image_url)
        # simulate it finished

        job_run_dir = os.path.join(self.ABS_RUN_DIR_PATH, job.id)
        job.run_dir_path = job_run_dir
        os.makedirs(job_run_dir, exist_ok=True)

        output_dir = os.path.join(self.ABS_OUT_DIR_PATH, job.id)
        job.output_dir_path = output_dir
        os.makedirs(output_dir, exist_ok=True)

        # Add some outputs
        utils.simulate_outputs_of_job(job, output_dir)

        job.status = delayed_job_models.JobStatuses.FINISHED
        delayed_job_models.save_job(job)

        return job
Exemplo n.º 3
0
    def test_get_existing_job_status(self):
        """
        Tests that the status of an existing job is returned correctly.
        """

        job_type = 'SIMILARITY'
        params = {
            'search_type': 'SIMILARITY',
            'structure':
            '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
            'threshold': '70'
        }
        docker_image_url = 'some url'

        with self.flask_app.app_context():
            job_must_be = delayed_job_models.get_or_create(
                job_type, params, docker_image_url)
            job_id = job_must_be.id

            client = self.client
            response = client.get(f'/status/{job_id}')
            resp_data = json.loads(response.data.decode('utf-8'))

            for prop in [
                    'type', 'status', 'status_log', 'progress', 'created_at',
                    'started_at', 'finished_at', 'raw_params', 'expires_at',
                    'api_initial_url', 'docker_image_url', 'timezone',
                    'num_failures', 'status_description'
            ]:
                type_must_be = str(getattr(job_must_be, prop))
                type_got = resp_data[prop]
                self.assertEqual(
                    type_must_be,
                    type_got,
                    msg=f'The returned job {prop} is not correct.')
Exemplo n.º 4
0
    def test_update_job_status(self):
        """
        Tests that a job can update its status
        """

        job_type = 'SIMILARITY'
        params = {
            'search_type': 'SIMILARITY',
            'structure':
            '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
            'threshold': '70'
        }
        docker_image_url = 'some url'

        with self.flask_app.app_context():
            job_must_be = delayed_job_models.get_or_create(
                job_type, params, docker_image_url)
            job_id = job_must_be.id
            new_data = {
                'progress': 50,
                'status_log': 'Loading database',
                'status_description': '{"msg":"Smiles file loaded"}'
            }

            token = token_generator.generate_job_token(job_id)
            headers = {'X-Job-Key': token}

            client = self.client
            response = client.patch(f'/status/{job_id}',
                                    data=new_data,
                                    headers=headers)
            self.assertEqual(response.status_code,
                             200,
                             msg='The request should have not failed')

            job_got = delayed_job_models.get_job_by_id(job_id)
            # be sure to have a fresh version of the object
            DB.session.rollback()
            DB.session.expire(job_got)
            DB.session.refresh(job_got)

            progress_got = job_got.progress
            self.assertEqual(progress_got,
                             new_data['progress'],
                             msg=f'The progress was not updated correctly!')

            status_log_got = job_got.status_log
            self.assertIsNotNone(status_log_got,
                                 msg=f'The status log was not set correctly!')
            self.assertNotEqual(
                status_log_got,
                new_data['status_log'],
                msg=f'It seems that the status log was not saved'
                f'correctly. It should be accumulative')

            status_description_got = job_got.status_description
            self.assertEqual(
                status_description_got,
                new_data['status_description'],
                msg=f'The status description was not updated correctly!')
Exemplo n.º 5
0
    def test_a_job_is_created(self):
        """
        Tests that a job is created correctly
        """

        with self.flask_app.app_context():
            job_type = 'SIMILARITY'
            params = {
                'search_type': 'SIMILARITY',
                'structure': '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
                'threshold': '70'
            }
            docker_image_url_must_be = 'some_url'

            job_must_be = delayed_job_models.get_or_create(job_type, params, docker_image_url_must_be)
            job_id_must_be = job_must_be.id
            job_got = delayed_job_models.DelayedJob.query.filter_by(id=job_id_must_be).first()
            self.assertEqual(job_type, job_got.type, msg='The job type was not saved correctly.')
            self.assertEqual(delayed_job_models.JobStatuses.CREATED, job_got.status,
                             msg='The job status was not saved correctly.')
            self.assertEqual(0, job_got.progress, msg='The job progress was not saved correctly.')

            created_at_got = job_got.created_at.timestamp()
            created_at_must_be = datetime.datetime.utcnow().timestamp()
            self.assertAlmostEqual(created_at_got, created_at_must_be, places=1,
                                   msg='The created time was not calculated correctly')

            params_must_be = json.dumps(params)
            params_got = job_got.raw_params
            self.assertEqual(params_got, params_must_be, msg='The parameters where not saved correctly')

            docker_image_url_got = job_got.docker_image_url
            self.assertEqual(docker_image_url_got, docker_image_url_must_be,
                             msg='The docker image url was not saved correctly')
Exemplo n.º 6
0
    def test_does_not_return_an_expired_job(self):
        """
        Tests that when getting a job by id, it is not returned and it is deleted if has is expired
        """
        with self.flask_app.app_context():
            job_type = 'SIMILARITY'
            params = {
                'search_type': 'SIMILARITY',
                'structure': '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
                'threshold': '70'
            }

            input_files_hashes = {
                'input1': 'hash1-hash1-hash1-hash1-hash1-hash1-hash1',
                'input2': 'hash2-hash2-hash2-hash2-hash2-hash2-hash2',
            }
            docker_image_url = 'some_url'

            job = delayed_job_models.get_or_create(job_type, params, docker_image_url, input_files_hashes)
            expired_time = datetime.datetime.utcnow() - datetime.timedelta(days=1)
            job.expires_at = expired_time

            id_got = job.id

            with self.assertRaises(delayed_job_models.JobNotFoundError, msg='The job should have not been returned!'):
                delayed_job_models.get_job_by_id(id_got)

            job_got = delayed_job_models.DelayedJob.query.filter_by(id=id_got).first()
            self.assertIsNone(job_got, msg='The job must have been deleted!')
    def test_gets_lsf_job_resources_params_correctly_when_script_returns_some_params(
            self):
        """
        Tests that if can get the parameters string for the bsub command correctly when script returns default
        """
        with self.flask_app.app_context():
            job_type = 'TEST'
            docker_image_url = 'some_url'

            input_files_desc, input_files_hashes, params = self.prepare_mock_job_args(
            )
            job = delayed_job_models.get_or_create(job_type, params,
                                                   docker_image_url,
                                                   input_files_hashes)
            job_submission_service.create_job_run_dir(job)

            test_job_config = delayed_job_models.get_job_config(job_type)

            print('job.run_dir_path: ', job.run_dir_path)
            resources_params_must_be = '-n 2 -M 8192 -R "rusage[mem=8192]"'
            source_requirements_script_path = 'requirements.py'
            with open(source_requirements_script_path,
                      'wt') as requirements_script:
                requirements_script.write('#!/usr/bin/env python3\n')
                requirements_script.write(
                    f'print(\'{resources_params_must_be}\')\n')

            test_job_config.requirements_script_path = source_requirements_script_path

            resources_params_got = job_submission_service.get_job_resources_params(
                job)

            self.assertEqual(
                resources_params_got,
                resources_params_must_be,
                msg='The resources params were not calculated correctly!')

            job_requirements_script_path = Path(
                job.run_dir_path).joinpath('requirements_calculation.py')
            print('job_requirements_script_path: ',
                  job_requirements_script_path)

            self.assertTrue(os.path.isfile(job_requirements_script_path),
                            msg='The requirements script was not created!')

            os.remove(source_requirements_script_path)
Exemplo n.º 8
0
def simulate_finished_job(run_dir_path, expires_at=None):
    """
    Creates a database a job that is finished. It will expire at the date passed as parameter.
    :param run_dir_path: path of the run dir of the job
    :param expires_at: Expiration date that you want for the job. None if it is not necessary for your test
    """

    # create a job
    job_type = 'SIMILARITY'
    params = {
        'search_type':
        'SIMILARITY',
        'structure':
        ''.join(random.choice(string.ascii_lowercase) for i in range(10)),
        'threshold':
        '70'
    }
    docker_image_url = 'some_url'

    job = delayed_job_models.get_or_create(job_type, params, docker_image_url)
    # simulate it finished

    job_run_dir = os.path.join(run_dir_path, job.id)
    job.run_dir_path = job_run_dir
    os.makedirs(job_run_dir, exist_ok=True)

    output_dir = os.path.join(run_dir_path, job.id).join('outputs')
    job.output_dir_path = output_dir
    os.makedirs(output_dir, exist_ok=True)

    # Add some inputs
    simulate_inputs_to_job(job, job_run_dir)

    # Add some outputs
    simulate_outputs_of_job(job, output_dir)

    job.status = delayed_job_models.JobStatuses.FINISHED
    job.expires_at = expires_at
    delayed_job_models.save_job(job)

    return job
Exemplo n.º 9
0
def create_and_submit_job(job_type, input_files_desc, input_files_hashes,
                          docker_image_url, job_params):
    """
    Creates a job and submits if to LSF
    :param job_type: type of job to submit
    :param input_files_desc: dict with the paths of the input files
    :param input_files_hashes: dict with the hashes of the input files
    :param docker_image_url: image of the container to use
    :param job_params: parameters of the job
    :return: the job object created
    """
    job = delayed_job_models.get_or_create(job_type, job_params,
                                           docker_image_url,
                                           input_files_hashes)
    job.progress = 0
    job.started_at = None
    job.finished_at = None
    delayed_job_models.save_job(job)
    app_logging.debug(f'Submitting Job: {job.id}')
    prepare_job_and_submit(job, input_files_desc)
    return job
 def test_job_token_is_generated(self):
     """
         Test that the token for a job is generated
     """
     with self.flask_app.app_context():
         job_type = 'SIMILARITY'
         params = {
             'search_type': 'SIMILARITY',
             'structure':
             '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
             'threshold': '70'
         }
         docker_image_url = 'some_url'
         job_must_be = delayed_job_models.get_or_create(
             job_type, params, docker_image_url)
         token_got = token_generator.generate_job_token(job_must_be.id)
         key = RUN_CONFIG.get('server_secret_key')
         data_got = jwt.decode(token_got, key, algorithms=['HS256'])
         self.assertEqual(data_got.get('job_id'),
                          job_must_be.id,
                          msg='The token was not generated correctly!')
    def test_gets_lsf_job_resources_params_correctly_when_no_script(self):
        """
        Tests that if can get the parameters string for the bsub command correctly when there is no script defined
        """
        with self.flask_app.app_context():
            job_type = 'TEST'
            docker_image_url = 'some_url'

            input_files_desc, input_files_hashes, params = self.prepare_mock_job_args(
            )
            job = delayed_job_models.get_or_create(job_type, params,
                                                   docker_image_url,
                                                   input_files_hashes)

            resources_params_got = job_submission_service.get_job_resources_params(
                job)
            resources_params_must_be = ''

            self.assertEqual(
                resources_params_got,
                resources_params_must_be,
                msg='The resources params were not calculated correctly!')
    def simulate_finished_job(self, expires_at):
        """
        Creates a database a job that is finished. It will expire at the date passed as parameter.
        :param expires_at: Expiration date that you want for the job
        """

        # create a job
        job_type = 'SIMILARITY'
        params = {
            'search_type':
            'SIMILARITY',
            'structure':
            ''.join(random.choice(string.ascii_lowercase) for i in range(10)),
            'threshold':
            '70'
        }
        docker_image_url = 'some_url'

        job = delayed_job_models.get_or_create(job_type, params,
                                               docker_image_url)
        # simulate it finished

        job_run_dir = os.path.join(self.ABS_RUN_DIR_PATH, job.id)
        job.run_dir_path = job_run_dir
        os.makedirs(job_run_dir, exist_ok=True)

        output_dir = os.path.join(self.ABS_OUT_DIR_PATH, job.id)
        job.output_dir_path = output_dir
        os.makedirs(output_dir, exist_ok=True)

        # Add some outputs
        utils.simulate_outputs_of_job(job, output_dir)

        job.status = delayed_job_models.JobStatuses.FINISHED
        job.expires_at = expires_at
        delayed_job_models.save_job(job)

        return job
Exemplo n.º 13
0
    def test_a_job_cannot_update_another_jobs_statistics(self):
        """
        Tests that a job cannot update the statistics of another job
        """
        job_type = 'TEST'
        params = {
            'instruction':
            'RUN_NORMALLY',
            'seconds':
            1,
            'api_url':
            'https://www.ebi.ac.uk/chembl/api/data/similarity/CN1C(=O)C=C(c2cccc(Cl)c2)c3cc(ccc13)C@@(c4ccc(Cl)cc4)c5cncn5C/80.json'
        }
        docker_image_url = 'some url'

        with self.flask_app.app_context():
            job_must_be = delayed_job_models.get_or_create(
                job_type, params, docker_image_url)
            job_id = job_must_be.id
            statistics = {
                'duration': 1,
            }

            token = token_generator.generate_job_token('another_id')
            headers = {'X-JOB-KEY': token}
            client = self.client
            response = client.post(
                f'/custom_statistics/submit_statistics/test_job/{job_id}',
                data=statistics,
                headers=headers)

            self.assertEqual(
                response.status_code,
                401,
                msg=
                'I should not be authorised to upload statistics of another job'
            )
Exemplo n.º 14
0
    def test_a_job_cannot_update_another_job_progress(self):
        """
        Tests that a job can not use its token to update another job's status
        """

        job_type = 'SIMILARITY'
        params = {
            'search_type': 'SIMILARITY',
            'structure':
            '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
            'threshold': '70'
        }
        docker_image_url = 'some url'

        with self.flask_app.app_context():
            job_must_be = delayed_job_models.get_or_create(
                job_type, params, docker_image_url)
            job_id = job_must_be.id
            new_data = {
                'progress': 50,
                'status_log': 'Loading database',
            }

            token = token_generator.generate_job_token('another_id')
            headers = {'X-JOB-KEY': token}
            client = self.client
            response = client.patch(f'/status/{job_id}',
                                    data=new_data,
                                    headers=headers)

            self.assertEqual(
                response.status_code,
                401,
                msg=
                'I should not be authorised to modify the status of another job'
            )
Exemplo n.º 15
0
    def test_2_jobs_with_the_same_params_are_actually_the_same_job(self):
        """
        test that when getting a job with some params, they point the exactly to the same job
        """
        with self.flask_app.app_context():
            job_type = 'SIMILARITY'
            params = {
                'search_type': 'SIMILARITY',
                'structure': '[H]C1(CCCN1C(=N)N)CC1=NC(=NO1)C1C=CC(=CC=1)NC1=NC(=CS1)C1C=CC(Br)=CC=1',
                'threshold': '70'
            }

            input_files_hashes = {
                'input1': 'hash1-hash1-hash1-hash1-hash1-hash1-hash1',
                'input2': 'hash2-hash2-hash2-hash2-hash2-hash2-hash2',
            }
            docker_image_url = 'some_url'

            job_0 = delayed_job_models.get_or_create(job_type, params, docker_image_url, input_files_hashes)
            id_must_be = job_0.id

            job_got = delayed_job_models.get_job_by_params(job_type, params, docker_image_url, input_files_hashes)
            id_got = job_got.id
            self.assertEqual(id_must_be, id_got, msg='The job was not found searching from its params!')