def test_query_jobs_by_label(self):
            labels = {
                'label_key': 'the_label_value',
                'matching_key': 'some_value',
                'overlap_key': 'overlap_value'
            }
            other_labels = {
                'diff_label_key': 'other_label_value',
                'matching_key': 'non_matching_value',
                'overlap_key': 'overlap_value'
            }

            label_job = self.start_job('echo LABEL',
                                       labels=labels,
                                       name='labeljob')
            label_job_id = self.api_job_id(label_job)
            other_label_job = self.start_job('echo OTHER',
                                             labels=other_labels,
                                             name='otherlabeljob')
            other_label_job_id = self.api_job_id(other_label_job)
            no_label_job = self.start_job('echo NO_LABEL', name='nolabeljob')
            no_label_job_id = self.api_job_id(no_label_job)

            self.assert_query_matches(QueryJobsRequest(labels=labels),
                                      [label_job])
            self.assert_query_matches(
                QueryJobsRequest(labels={'overlap_key': 'overlap_value'}),
                [label_job, other_label_job])
 def test_query_jobs_by_name(self):
     name_job = self.start_job('echo NAME', name='named-job')
     other_name_job = self.start_job('echo OTHER', name='other-job')
     no_name_job = self.start_job('echo UNSPECIFIED')
     self.assert_query_matches(QueryJobsRequest(name='named-job'),
                               [name_job])
     self.assert_query_matches(QueryJobsRequest(name='job'), [])
        def test_query_jobs_submission_pagination(self):
            job1 = self.start_job('echo FIRST_JOB', name='job_z')
            time.sleep(1)
            min_time = datetime.datetime.now()
            job2 = self.start_job('echo SECOND_JOB', name='job_y')
            job3 = self.start_job('echo THIRD_JOB', name='job_x')
            job4 = self.start_job('echo FOURTH_JOB', name='job_w')
            job5 = self.start_job('echo FIFTH_JOB', name='job_v')
            job6 = self.start_job('echo SIXTH_JOB', name='job_u')

            response = self.assert_query_matches(
                QueryJobsRequest(
                    page_size=2,
                    extensions=ExtendedQueryFields(submission=min_time)),
                [job5, job6])
            response = self.assert_query_matches(
                QueryJobsRequest(
                    page_size=2,
                    page_token=response.next_page_token,
                    extensions=ExtendedQueryFields(submission=min_time)),
                [job3, job4])
            response = self.assert_query_matches(
                QueryJobsRequest(
                    page_size=2,
                    page_token=response.next_page_token,
                    extensions=ExtendedQueryFields(submission=min_time)),
                [job2])
Beispiel #4
0
 def test_query_jobs_by_submitted_status(self):
     job1 = self.start_job('echo job1 && sleep 30', name='job1')
     self.assert_query_matches(
         QueryJobsRequest(statuses=[ApiStatus.SUBMITTED]), [job1])
     self.wait_status(self.api_job_id(job1), ApiStatus.RUNNING)
     job2 = self.start_job('echo job2 && sleep 30', name='job2')
     self.assert_query_matches(
         QueryJobsRequest(statuses=[ApiStatus.SUBMITTED]), [job2])
     self.assert_query_matches(
         QueryJobsRequest(statuses=[ApiStatus.RUNNING]), [job1])
 def test_query_jobs_by_label_task_id(self):
     started = self.start_job('echo BY_TASK_ID',
                              name='by_task_id',
                              task_count=2)
     jobs = self.must_query_jobs(
         QueryJobsRequest(labels={'job-id': started['job-id']}))
     for task_id in started['task-id']:
         task = started.copy()
         task['task-id'] = task_id
         self.assert_query_matches(
             QueryJobsRequest(labels={'task-id': task_id}), [task])
        def test_query_jobs_by_submission_end(self):
            first_time = datetime.datetime.now()
            first_job = self.start_job('echo ONE', name='job1', wait=True)
            second_time = datetime.datetime.now()
            second_job = self.start_job('echo TWO', name='job2', wait=True)
            third_time = datetime.datetime.now()
            third_job = self.start_job('echo THREE', name='job3', wait=True)
            fourth_time = datetime.datetime.now()

            self.assert_query_matches(
                QueryJobsRequest(extensions=ExtendedQueryFields(
                    submission=first_time)),
                [first_job, second_job, third_job])
            self.assert_query_matches(
                QueryJobsRequest(extensions=ExtendedQueryFields(
                    submission=second_time)), [second_job, third_job])
            self.assert_query_matches(
                QueryJobsRequest(extensions=ExtendedQueryFields(
                    submission=third_time)), [third_job])
            self.assert_query_matches(QueryJobsRequest(end=second_time),
                                      [first_job])
            self.assert_query_matches(QueryJobsRequest(end=third_time),
                                      [first_job, second_job])
            self.assert_query_matches(QueryJobsRequest(end=fourth_time),
                                      [first_job, second_job, third_job])
            self.assert_query_matches(
                QueryJobsRequest(
                    end=fourth_time,
                    extensions=ExtendedQueryFields(submission=second_time)),
                [second_job, third_job])
 def test_query_jobs_by_status(self):
     succeeded = self.start_job('echo SUCCEEDED', name='succeeded')
     self.wait_status(self.api_job_id(succeeded), ApiStatus.SUCCEEDED)
     running = self.start_job('echo RUNNING && sleep 30',
                              name='running')
     self.wait_status(self.api_job_id(running), ApiStatus.RUNNING)
     self.assert_query_matches(
         QueryJobsRequest(statuses=[ApiStatus.SUCCEEDED]), [succeeded])
     self.assert_query_matches(
         QueryJobsRequest(statuses=[ApiStatus.RUNNING]), [running])
     self.assert_query_matches(
         QueryJobsRequest(
             statuses=[ApiStatus.RUNNING, ApiStatus.SUCCEEDED]),
         [succeeded, running])
     self.assert_query_matches(
         QueryJobsRequest(
             statuses=[ApiStatus.SUCCEEDED, ApiStatus.RUNNING]),
         [succeeded, running])
Beispiel #8
0
 def test_query_jobs_invalid_project(self):
     params = QueryJobsRequest(extensions=ExtendedQueryFields(
         project_id='some-bogus-project-id'))
     resp = self.client.open('/jobs/query',
                             method='POST',
                             data=flask.json.dumps(params),
                             content_type='application/json')
     self.assert_status(resp, 404)
     self.assertEqual(resp.json['detail'],
                      'Project \"some-bogus-project-id\" not found')
        def test_query_jobs_pagination(self):
            # Jobs are sorted first by create-time then by job-id. We cannot
            # guarantee these start at the exact same second, but we know some
            # of them will. Thus, lets make the job name sort in the same order
            # as create-time so the order is deterministic.
            job1 = self.start_job('echo FIRST_JOB', name='job_z')
            job2 = self.start_job('echo SECOND_JOB', name='job_y')
            job3 = self.start_job('echo THIRD_JOB', name='job_x')
            job4 = self.start_job('echo FOURTH_JOB', name='job_w')
            job5 = self.start_job('echo FIFTH_JOB', name='job_v')

            response = self.assert_query_matches(QueryJobsRequest(page_size=2),
                                                 [job4, job5])
            response = self.assert_query_matches(
                QueryJobsRequest(page_size=2,
                                 page_token=response.next_page_token),
                [job2, job3])
            response = self.assert_query_matches(
                QueryJobsRequest(page_size=2,
                                 page_token=response.next_page_token), [job1])
 def test_empty_cromwell_query_params(self):
     query = QueryJobsRequest()
     self.assertEqual(
         sorted(jobs_controller.cromwell_query_params(query, 1, 64)),
         sorted([{
             'page': '1'
         }, {
             'pageSize': '64'
         }, {
             'additionalQueryResultFields': 'parentWorkflowId'
         }, {
             'additionalQueryResultFields': 'labels'
         }]))
def query_jobs(body, **kwargs):
    """
    Query jobs by various filter criteria. Additional jobs are requested if the number of results is less than the
    requested page size. The returned jobs are ordered from newest to oldest submission time.

    :param body:
    :type body: dict | bytes

    :rtype: QueryJobsResponse
    """
    auth = kwargs.get('auth')
    headers = kwargs.get('auth_headers')
    query = QueryJobsRequest.from_dict(body)
    query_page_size = query.page_size or _DEFAULT_PAGE_SIZE
    # Request more than query.page_size from cromwell since subworkflows will get filtered out
    page_size = query_page_size * 2
    total_results = get_total_results(query, auth, headers)

    results = []
    offset = page_tokens.decode_offset(query.page_token) or 0
    page = page_from_offset(offset, page_size)
    last_page = get_last_page(total_results, page_size)

    while len(results) < query_page_size and page <= last_page:
        page_from_end = last_page - page + 1

        response = requests.post(_get_base_url() + '/query',
                                 json=cromwell_query_params(
                                     query, page_from_end, page_size),
                                 auth=auth,
                                 headers=headers)

        if response.status_code == BadRequest.code:
            raise BadRequest(response.json().get('message'))
        elif response.status_code == InternalServerError.code:
            raise InternalServerError(response.json().get('message'))
        response.raise_for_status()

        # Only list parent jobs
        now = datetime.utcnow()
        jobs_list = [
            format_job(job, now) for job in response.json()['results']
            if _is_parent_workflow(job)
        ]
        jobs_list.reverse()
        results.extend(jobs_list)
        offset = offset + page_size
        page = page_from_offset(offset, page_size)

    next_page_token = page_tokens.encode_offset(offset)
    return QueryJobsResponse(results=results, next_page_token=next_page_token)
Beispiel #12
0
def query_jobs(body, **kwargs):
    """
    Query jobs by various filter criteria. Additional jobs are requested if the number of results is less than the
    requested page size. The returned jobs are ordered from newest to oldest submission time.

    :param body:
    :type body: dict | bytes

    :rtype: QueryJobsResponse
    """
    auth = kwargs.get('auth')
    headers = kwargs.get('auth_headers')
    query = QueryJobsRequest.from_dict(body)
    query.labels = _format_query_labels(query.labels)
    query_page_size = query.page_size or _DEFAULT_PAGE_SIZE
    offset = 0
    if query.page_token is not None:
        offset = page_tokens.decode_offset(query.page_token)
    page = page_from_offset(offset, query_page_size)

    has_auth = headers is not None

    response = requests.post(_get_base_url() + '/query',
                             json=cromwell_query_params(
                                 query, page, query_page_size, has_auth),
                             auth=auth,
                             headers=headers)

    if response.status_code != 200:
        handle_error(response)

    total_results = int(response.json()['totalResultsCount'])
    last_page = get_last_page(total_results, query_page_size)

    jobs_list = [
        format_job(job, datetime.utcnow())
        for job in response.json()['results']
    ]
    if page >= last_page:
        return QueryJobsResponse(results=jobs_list, total_size=total_results)
    next_page_token = page_tokens.encode_offset(offset + query_page_size)
    return QueryJobsResponse(results=jobs_list,
                             total_size=total_results,
                             next_page_token=next_page_token)
    def test_query_jobs_returns_200(self, mock_request):
        """
        Test case for query_jobs

        Query jobs by various filter criteria. Returned jobs are ordered from newest to oldest submission time.
        """
        def _request_callback(request, context):
            context.status_code = 200
            return {'results': [], 'totalResultsCount': 0}

        query_url = self.base_url + '/query'
        mock_request.post(query_url, json=_request_callback)

        query = QueryJobsRequest()
        response = self.client.open('/jobs/query',
                                    method='POST',
                                    data=json.dumps(query),
                                    content_type='application/json')
        self.assertStatus(response, 200)
 def test_cromwell_query_params(self):
     datetime_format = '%Y-%m-%dT%H:%M:%S.%fZ'
     query = QueryJobsRequest(
         name='test',
         start=datetime.strptime('2017-10-30T18:04:47.271Z',
                                 datetime_format),
         end=datetime.strptime('2017-10-31T18:04:47.271Z', datetime_format),
         status=['Submitted', 'Running', 'Succeeded'],
         labels={
             'label-key-1': 'label-val-1',
             'label-key-2': 'label-val-2'
         },
         page_size=100)
     query_params = [{
         'name': query.name
     }, {
         'start':
         datetime.strftime(query.start, datetime_format)
     }, {
         'end': datetime.strftime(query.end, datetime_format)
     }, {
         'pageSize': '100'
     }, {
         'page': '23'
     }, {
         'label': 'label-key-1:label-val-1'
     }, {
         'label': 'label-key-2:label-val-2'
     }, {
         'additionalQueryResultFields': 'parentWorkflowId'
     }, {
         'additionalQueryResultFields': 'labels'
     }, {
         'includeSubworkflows': 'false'
     }]
     query_params.extend([{'status': s} for s in query.status])
     self.assertItemsEqual(
         sorted(query_params),
         sorted(jobs_controller.cromwell_query_params(
             query, 23, 100, False)))
Beispiel #15
0
 def test_query_jobs_by_start(self):
     date = datetime.datetime.now()
     job = self.start_job('sleep 30', name='job_by_start')
     self.assert_query_matches(QueryJobsRequest(start=date), [])
     self.wait_status(self.api_job_id(job), ApiStatus.RUNNING)
     self.assert_query_matches(QueryJobsRequest(start=date), [job])
Beispiel #16
0
def query_jobs(body):
    """
    Query jobs by various filter criteria.

    Args:
        body (dict): The JSON request body.

    Returns:
        QueryJobsResponse: Response containing results from query
    """
    query = QueryJobsRequest.from_dict(body)
    proj_id = query.extensions.project_id if query.extensions else None
    provider = providers.get_provider(_provider_type(), proj_id, _auth_token())
    create_time_max, offset_id = page_tokens.decode_create_time_max(
        query.page_token) or (None, None)
    query.page_size = min(query.page_size or _DEFAULT_PAGE_SIZE,
                          _MAX_PAGE_SIZE)

    query.start = query.start.replace(tzinfo=tzlocal()).replace(
        microsecond=0) if query.start else None
    query.end = query.end.replace(tzinfo=tzlocal()).replace(
        microsecond=0) if query.end else None
    if query.submission:
        query.submission = query.submission.replace(tzinfo=tzlocal()).replace(
            microsecond=0)

    if query.page_size < 0:
        raise BadRequest("The pageSize query parameter must be non-negative.")
    if query.start and query.end and query.start >= query.end:
        raise BadRequest("Invalid query: start date must precede end date.")
    if query.start and create_time_max and query.start > create_time_max:
        raise BadRequest(
            "Invalid query: start date is invalid with pagination token.")
    if query.submission:
        if query.start and query.submission > query.start:
            raise BadRequest(
                "Invalid query: submission date must be <= start date.")
        if query.end and query.submission >= query.end:
            raise BadRequest(
                "Invalid query: submission date must precede end date.")

    generator = jobs_generator.generate_jobs(provider, query, create_time_max,
                                             offset_id)
    jobs = []
    try:
        for job in generator:
            jobs.append(job)
            if len(jobs) == query.page_size:
                break
    except apiclient.errors.HttpError as error:
        _handle_http_error(error, proj_id)

    try:
        next_job = generator.next()
        next_ct = next_job.submission
        last_ct = jobs[-1].submission
        offset_id = next_job.id if next_ct == last_ct else None
        token = page_tokens.encode_create_time_max(next_ct, offset_id)
        return QueryJobsResponse(results=jobs, next_page_token=token)
    except StopIteration:
        return QueryJobsResponse(results=jobs)
 def test_query_jobs_by_label_user_id(self):
     job = self.start_job('echo BY_USER_ID', name='by_user_id')
     self.assert_query_matches(
         QueryJobsRequest(extensions=ExtendedQueryFields(
             user_id=job['user-id'])), [job])
 def test_query_jobs_by_label_job_id(self):
     job = self.start_job('echo BY_JOB_ID', name='by_job_id')
     self.assert_query_matches(
         QueryJobsRequest(labels={'job-id': job['job-id']}), [job])