예제 #1
0
파일: tests.py 프로젝트: blawney/cnap_v2
 def _create_submission(self):
     '''
     helper function for use when checking job status.  Creates a database
     object that is necessary to execute loop
     '''
     # create a job that we are pretending to query
     mock_uuid = 'e442e52a-9de1-47f0-8b4f-e6e565008cf1'
     job = SubmittedJob(project=self.analysis_project,
                        job_id=mock_uuid,
                        job_status='Submitted')
     job.save()
     return job
예제 #2
0
파일: tests.py 프로젝트: blawney/cnap_v2
    def test_handle_error_with_resource_interbucket_copy(self, mock_time):
        '''
        This test covers the case where an inter-bucket copy fails due to some
        reason on google's end.  Test that we try multiple times and eventually
        gracefully fail.
        '''
        mock_objects_func = mock.MagicMock()
        mock_objects = mock.MagicMock()
        mock_objects_func.return_value = mock_objects

        mock_copy = mock.MagicMock()
        mock_that_raises_ex = mock.MagicMock(
            side_effect=Exception('Some backend ex!'))
        mock_copy.execute = mock_that_raises_ex
        mock_objects.copy.return_value = mock_copy

        class MockStorageClient(object):
            def __init__(self, object_attr_mock):
                self.objects = object_attr_mock

        mock_storage_client = MockStorageClient(mock_objects_func)

        mock_time.sleep = mock.MagicMock()

        job = SubmittedJob(project=self.analysis_project, job_id='some_job_id')

        with self.assertRaises(JobOutputCopyException):
            move_resource_to_user_bucket(mock_storage_client, job,
                                         'gs://some-bucket/some-path.txt')
예제 #3
0
    def test_interbucket_copy_success(self, mock_storage, mock_time):
        '''
        This test covers the case where an inter-bucket copy works the first time.
        we assert that the sleep function is not called.
        '''
        # in the method, we instantiate storage.Client and storage.Blob instances
        mock_client = mock.MagicMock()
        mock_blob = mock.MagicMock()
        mock_bucket = mock.MagicMock()

        # returning None from the copy_blob method would
        # indicate success
        mock_bucket.copy_blob.return_value = None

        # add the mocks to the callers:
        mock_client.get_bucket.return_value = mock_bucket
        mock_storage.Client.return_value = mock_client
        mock_storage.Blob.return_value = mock_blob

        mock_time.sleep = mock.MagicMock()

        job = SubmittedJob(project=self.analysis_project, job_id='some_job_id')

        r = move_resource_to_user_bucket(job, 'gs://some-bucket/some-path.txt')
        destination_bucket = settings.CONFIG_PARAMS[ \
            'storage_bucket_prefix' \
            ][len(settings.CONFIG_PARAMS['google_storage_gs_prefix']):]
        expected_r = 'gs://%s-%s/%s/%s/some-path.txt' % (destination_bucket, \
            job.project.owner.user_uuid,
            job.project.analysis_uuid,
            job.job_id
        )
        self.assertTrue(r == expected_r)
        mock_time.sleep.assert_not_called()
예제 #4
0
    def test_handle_error_with_resource_interbucket_copy(
            self, mock_storage, mock_time):
        '''
        This test covers the case where an inter-bucket copy fails due to some
        reason on google's end.  Test that we try multiple times and eventually
        gracefully fail.
        '''
        # in the method, we instantiate storage.Client and storage.Blob instances
        mock_client = mock.MagicMock()
        mock_blob = mock.MagicMock()
        mock_bucket = mock.MagicMock()

        # we call the get_bucket method twice.  The first time is to retrieve
        # a destination bucket, the second time is to get the source
        # bucket.  We ultimately call the 'copy_blob' method on the
        # source bucket, so we mock the return to contain a copy_blob
        # method which raises some exception (faking a problem with the copy)
        copy_ex = Exception('Some copy problem!')
        mock_bucket.copy_blob.side_effect = copy_ex

        # add the mocks to the callers:
        mock_client.get_bucket.return_value = mock_bucket
        mock_storage.Client.return_value = mock_client
        mock_storage.Blob.return_value = mock_blob

        mock_time.sleep = mock.MagicMock()

        job = SubmittedJob(project=self.analysis_project, job_id='some_job_id')

        with self.assertRaises(JobOutputCopyException):
            move_resource_to_user_bucket(job, 'gs://some-bucket/some-path.txt')
예제 #5
0
파일: tests.py 프로젝트: blawney/cnap_v2
    def test_unknown_response_generates_notification_case2(
            self, mock_requests, mock_handle_ex):
        '''
        This covers a case where the Cromwell server responds to a workflow status query with 200,
        but has an unexpected status payload
        '''
        # create a SubmittedJob to start
        mock_uuid = 'e442e52a-9de1-47f0-8b4f-e6e565008cf1'
        job = SubmittedJob(project=self.analysis_project,
                           job_id=mock_uuid,
                           job_status='Submitted')
        job.save()

        mock_return = mock.MagicMock()
        mock_return.status_code = 200
        mock_return.text = json.dumps({'status': 'Something unexpected...'})
        mock_requests.get.return_value = mock_return

        # now mock a query that returns something we do not expect:
        check_job()
        self.assertTrue(mock_handle_ex.called)  # notification was sent
예제 #6
0
    def test_interbucket_copy_recovers_from_initial_failure(
            self, mock_storage, mock_time):
        '''
        This test covers the case where an inter-bucket copy fails due to some
        reason on google's end.  Eventually it works, however.
        '''
        # in the method, we instantiate storage.Client and storage.Blob instances
        mock_client = mock.MagicMock()
        mock_blob = mock.MagicMock()
        mock_bucket = mock.MagicMock()

        # we call the get_bucket method twice.  The first time is to retrieve
        # a destination bucket, the second time is to get the source
        # bucket.  We ultimately call the 'copy_blob' method on the
        # source bucket, so we mock the return to contain a copy_blob
        # method which raises some exception (faking a problem with the copy)
        # a couple of times, but ultimately succeeds (by returning None)
        mock_error = mock.MagicMock(side_effect=[
            Exception('Some copy problem!'),
            Exception('Some copy problem!'), None
        ])
        mock_bucket.copy_blob = mock_error

        # add the mocks to the callers:
        mock_client.get_bucket.return_value = mock_bucket
        mock_storage.Client.return_value = mock_client
        mock_storage.Blob.return_value = mock_blob

        mock_time.sleep = mock.MagicMock()

        job = SubmittedJob(project=self.analysis_project, job_id='some_job_id')

        r = move_resource_to_user_bucket(job, 'gs://some-bucket/some-path.txt')
        destination_bucket = settings.CONFIG_PARAMS[ \
            'storage_bucket_prefix' \
            ][len(settings.CONFIG_PARAMS['google_storage_gs_prefix']):]
        expected_r = 'gs://%s-%s/%s/%s/some-path.txt' % (destination_bucket, \
            job.project.owner.user_uuid,
            job.project.analysis_uuid,
            job.job_id
        )
        self.assertTrue(r == expected_r)
예제 #7
0
파일: tests.py 프로젝트: blawney/cnap_v2
    def test_interbucket_copy_recovers_from_initial_failure(self, mock_time):
        '''
        This test covers the case where an inter-bucket copy fails due to some
        reason on google's end.  Eventually it works, however.
        '''
        mock_objects_func = mock.MagicMock()
        mock_objects = mock.MagicMock()
        mock_objects_func.return_value = mock_objects

        mock_copy = mock.MagicMock()
        mock_that_raises_ex = mock.MagicMock(side_effect=[
            Exception('Some backend ex!'),
            Exception('Some backend ex!'), None
        ])
        mock_copy.execute = mock_that_raises_ex
        mock_objects.copy.return_value = mock_copy

        class MockStorageClient(object):
            def __init__(self, object_attr_mock):
                self.objects = object_attr_mock

        mock_storage_client = MockStorageClient(mock_objects_func)

        mock_time.sleep = mock.MagicMock()

        job = SubmittedJob(project=self.analysis_project, job_id='some_job_id')

        r = move_resource_to_user_bucket(mock_storage_client, job,
                                         'gs://some-bucket/some-path.txt')
        destination_bucket = settings.CONFIG_PARAMS[ \
            'storage_bucket_prefix' \
            ][len(settings.CONFIG_PARAMS['google_storage_gs_prefix']):]
        expected_r = 'gs://%s/%s/%s/%s/some-path.txt' % (destination_bucket, \
            job.project.owner.user_uuid,
            job.project.analysis_uuid,
            job.job_id
        )
        self.assertTrue(r == expected_r)
예제 #8
0
파일: tests.py 프로젝트: blawney/cnap_v2
    def test_interbucket_copy_success(self, mock_time):
        '''
        This test covers the case where an inter-bucket copy works the first time.
        we assert that the sleep function is not called.
        '''
        mock_objects_func = mock.MagicMock()
        mock_objects = mock.MagicMock()
        mock_objects_func.return_value = mock_objects

        mock_copy = mock.MagicMock()
        mock_execute = mock.MagicMock()
        mock_copy.execute = mock_execute
        mock_objects.copy.return_value = mock_copy

        class MockStorageClient(object):
            def __init__(self, object_attr_mock):
                self.objects = object_attr_mock

        mock_storage_client = MockStorageClient(mock_objects_func)

        mock_time.sleep = mock.MagicMock()

        job = SubmittedJob(project=self.analysis_project, job_id='some_job_id')

        r = move_resource_to_user_bucket(mock_storage_client, job,
                                         'gs://some-bucket/some-path.txt')
        destination_bucket = settings.CONFIG_PARAMS[ \
            'storage_bucket_prefix' \
            ][len(settings.CONFIG_PARAMS['google_storage_gs_prefix']):]
        expected_r = 'gs://%s/%s/%s/%s/some-path.txt' % (destination_bucket, \
            job.project.owner.user_uuid,
            job.project.analysis_uuid,
            job.job_id
        )
        self.assertTrue(r == expected_r)
        mock_time.sleep.assert_not_called()
예제 #9
0
파일: tasks.py 프로젝트: qbrc-cnap/cnap
def execute_wdl(analysis_project, staging_dir, run_precheck=False):
    '''
    This function performs the actual work of submitting the job
    '''

    # read config to get the names/locations/parameters for job submission
    config_path = os.path.join(THIS_DIR, 'wdl_job_config.cfg')
    config_dict = utils.load_config(config_path)

    # the path of the input json file:
    wdl_input_path = os.path.join(staging_dir, WDL_INPUTS)

    # pull together the components of the POST request to the Cromwell server
    submission_endpoint = config_dict['submit_endpoint']
    submission_url = settings.CROMWELL_SERVER_URL + submission_endpoint
    payload = {}
    payload = {'workflowType': config_dict['workflow_type'], \
        'workflowTypeVersion': config_dict['workflow_type_version']
    }

    # load the options file so we can fill-in the zones:
    options_json = {}
    current_zone = get_zone_as_string()
    if current_zone:
        options_json['default_runtime_attributes'] = {'zones': current_zone}

    options_json_str = json.dumps(options_json)
    options_io = io.BytesIO(options_json_str.encode('utf-8'))

    files = {
        'workflowOptions': options_io,
        'workflowInputs': open(wdl_input_path, 'rb')
    }

    if run_precheck:
        files['workflowSource'] = open(
            os.path.join(staging_dir, settings.PRECHECK_WDL), 'rb')
    else:
        files['workflowSource'] = open(
            os.path.join(staging_dir, settings.MAIN_WDL), 'rb')

    zip_archive = os.path.join(staging_dir, ZIPNAME)
    if os.path.exists(zip_archive):
        files['workflowDependencies'] = open(zip_archive, 'rb')

    # start the job:
    try:
        response = requests.post(submission_url, data=payload, files=files)
    except Exception as ex:
        print('An exception was raised when requesting cromwell server:')
        print(ex)
        message = 'An exception occurred when trying to submit a job to Cromwell. \n'
        message += 'Project ID was: %s' % str(analysis_project.analysis_uuid)
        message += str(ex)

        analysis_project.status = '''
            Error on job submission.  An administrator has been automatically notified of this error.
            Thank you for your patience.
            '''
        analysis_project.error = True
        analysis_project.save()
        handle_exception(ex, message=message)
        raise ex
    response_json = json.loads(response.text)
    if response.status_code == 201:
        if response_json['status'] == 'Submitted':
            job_id = response_json['id']

            if run_precheck:
                job_status = 'Checking input data...'
            else:
                job_status = 'Job submitted...'

            job = SubmittedJob(project=analysis_project,
                               job_id=job_id,
                               job_status=job_status,
                               job_staging_dir=staging_dir,
                               is_precheck=run_precheck)
            job.save()

            # update the project also:
            analysis_project.started = True  # should already be set
            analysis_project.start_time = datetime.datetime.now()
            analysis_project.status = job_status
            analysis_project.save()
        else:
            # In case we get other types of responses, inform the admins:
            message = 'Job was submitted, but received an unexpected response from Cromwell:\n'
            message += response.text
            handle_exception(None, message=message)
    else:
        message = 'Did not submit job-- status code was %d, and response text was: %s' % (
            response.status_code, response.text)
        analysis_project.status = '''
            Error on job submission.  An administrator has been automatically notified of this error.
            Thank you for your patience.
            '''
        analysis_project.error = True
        analysis_project.save()
        handle_exception(None, message=message)