Exemplo n.º 1
0
def query_jobs(body, **kwargs):
    """
    Query jobs by various filter criteria. Additional jobs are requested if the number of results is less than the
    requested page size. The returned jobs are ordered from newest to oldest submission time.

    :param body:
    :type body: dict | bytes

    :rtype: QueryJobsResponse
    """
    auth = kwargs.get('auth')
    headers = kwargs.get('auth_headers')
    query = QueryJobsRequest.from_dict(body)
    query.labels = _format_query_labels(query.labels)
    query_page_size = query.page_size or _DEFAULT_PAGE_SIZE
    offset = 0
    if query.page_token is not None:
        offset = page_tokens.decode_offset(query.page_token)
    page = page_from_offset(offset, query_page_size)

    has_auth = headers is not None

    response = requests.post(_get_base_url() + '/query',
                             json=cromwell_query_params(
                                 query, page, query_page_size, has_auth),
                             auth=auth,
                             headers=headers)

    if response.status_code != 200:
        handle_error(response)

    total_results = int(response.json()['totalResultsCount'])
    last_page = get_last_page(total_results, query_page_size)

    jobs_list = [
        format_job(job, datetime.utcnow())
        for job in response.json()['results']
    ]
    if page >= last_page:
        return QueryJobsResponse(results=jobs_list, total_size=total_results)
    next_page_token = page_tokens.encode_offset(offset + query_page_size)
    return QueryJobsResponse(results=jobs_list,
                             total_size=total_results,
                             next_page_token=next_page_token)
Exemplo n.º 2
0
 def must_query_jobs(self, parameters):
     if self.testing_project:
         if parameters.extensions:
             parameters.extensions.project_id = self.testing_project
         else:
             parameters.extensions = ExtendedQueryFields(
                 project_id=self.testing_project)
     resp = self.client.open('/jobs/query',
                             method='POST',
                             data=flask.json.dumps(parameters),
                             content_type='application/json')
     self.assert_status(resp, 200)
     return QueryJobsResponse.from_dict(resp.json)
Exemplo n.º 3
0
def query_jobs(body, **kwargs):
    """
    Query jobs by various filter criteria. Additional jobs are requested if the number of results is less than the
    requested page size. The returned jobs are ordered from newest to oldest submission time.

    :param body:
    :type body: dict | bytes

    :rtype: QueryJobsResponse
    """
    auth = kwargs.get('auth')
    headers = kwargs.get('auth_headers')
    query = QueryJobsRequest.from_dict(body)
    query_page_size = query.page_size or _DEFAULT_PAGE_SIZE
    # Request more than query.page_size from cromwell since subworkflows will get filtered out
    page_size = query_page_size * 2
    total_results = get_total_results(query, auth, headers)

    results = []
    offset = page_tokens.decode_offset(query.page_token) or 0
    page = page_from_offset(offset, page_size)
    last_page = get_last_page(total_results, page_size)

    while len(results) < query_page_size and page <= last_page:
        page_from_end = last_page - page + 1

        response = requests.post(_get_base_url() + '/query',
                                 json=cromwell_query_params(
                                     query, page_from_end, page_size),
                                 auth=auth,
                                 headers=headers)

        if response.status_code == BadRequest.code:
            raise BadRequest(response.json().get('message'))
        elif response.status_code == InternalServerError.code:
            raise InternalServerError(response.json().get('message'))
        response.raise_for_status()

        # Only list parent jobs
        now = datetime.utcnow()
        jobs_list = [
            format_job(job, now) for job in response.json()['results']
            if _is_parent_workflow(job)
        ]
        jobs_list.reverse()
        results.extend(jobs_list)
        offset = offset + page_size
        page = page_from_offset(offset, page_size)

    next_page_token = page_tokens.encode_offset(offset)
    return QueryJobsResponse(results=results, next_page_token=next_page_token)
Exemplo n.º 4
0
def query_jobs(body):
    """
    Query jobs by various filter criteria.

    Args:
        body (dict): The JSON request body.

    Returns:
        QueryJobsResponse: Response containing results from query
    """
    query = QueryJobsRequest.from_dict(body)
    proj_id = query.extensions.project_id if query.extensions else None
    provider = providers.get_provider(_provider_type(), proj_id, _auth_token())
    create_time_max, offset_id = page_tokens.decode_create_time_max(
        query.page_token) or (None, None)
    query.page_size = min(query.page_size or _DEFAULT_PAGE_SIZE,
                          _MAX_PAGE_SIZE)

    query.start = query.start.replace(tzinfo=tzlocal()).replace(
        microsecond=0) if query.start else None
    query.end = query.end.replace(tzinfo=tzlocal()).replace(
        microsecond=0) if query.end else None
    if query.submission:
        query.submission = query.submission.replace(tzinfo=tzlocal()).replace(
            microsecond=0)

    if query.page_size < 0:
        raise BadRequest("The pageSize query parameter must be non-negative.")
    if query.start and query.end and query.start >= query.end:
        raise BadRequest("Invalid query: start date must precede end date.")
    if query.start and create_time_max and query.start > create_time_max:
        raise BadRequest(
            "Invalid query: start date is invalid with pagination token.")
    if query.submission:
        if query.start and query.submission > query.start:
            raise BadRequest(
                "Invalid query: submission date must be <= start date.")
        if query.end and query.submission >= query.end:
            raise BadRequest(
                "Invalid query: submission date must precede end date.")

    generator = jobs_generator.generate_jobs(provider, query, create_time_max,
                                             offset_id)
    jobs = []
    try:
        for job in generator:
            jobs.append(job)
            if len(jobs) == query.page_size:
                break
    except apiclient.errors.HttpError as error:
        _handle_http_error(error, proj_id)

    try:
        next_job = generator.next()
        next_ct = next_job.submission
        last_ct = jobs[-1].submission
        offset_id = next_job.id if next_ct == last_ct else None
        token = page_tokens.encode_create_time_max(next_ct, offset_id)
        return QueryJobsResponse(results=jobs, next_page_token=token)
    except StopIteration:
        return QueryJobsResponse(results=jobs)