def _JobSummariesForProject(self, project_id, args, filter_predicate):
        """Get the list of job summaries that match the predicate.

    Args:
      project_id: The project ID to retrieve
      args: parsed command line arguments
      filter_predicate: The filter predicate to apply

    Returns:
      An iterator over all the matching jobs.
    """
        request = None
        service = None
        status_filter = self._StatusArgToFilter(args.status, args.region)
        if args.region:
            request = apis.Jobs.LIST_REQUEST(projectId=project_id,
                                             location=args.region,
                                             filter=status_filter)
            service = apis.Jobs.GetService()
        else:
            request = apis.Jobs.AGGREGATED_LIST_REQUEST(projectId=project_id,
                                                        filter=status_filter)
            service = apis.GetClientInstance().projects_jobs

        return dataflow_util.YieldFromList(project_id=project_id,
                                           region_id=args.region,
                                           service=service,
                                           request=request,
                                           limit=args.limit,
                                           batch_size=args.page_size,
                                           field='jobs',
                                           batch_size_attribute='pageSize',
                                           predicate=filter_predicate)
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: all the arguments that were provided to this command invocation.

    Returns:
      None on success, or a string containing the error message.
    """
        job_ref = job_utils.ExtractJobRef(args)

        importance_enum = (
            apis.Messages.LIST_REQUEST.MinimumImportanceValueValuesEnum)
        importance_map = {
            'debug': importance_enum.JOB_MESSAGE_DEBUG,
            'detailed': importance_enum.JOB_MESSAGE_DETAILED,
            'error': importance_enum.JOB_MESSAGE_ERROR,
            'warning': importance_enum.JOB_MESSAGE_WARNING,
        }

        request = apis.Messages.LIST_REQUEST(
            projectId=job_ref.projectId,
            jobId=job_ref.jobId,
            location=job_ref.location,
            minimumImportance=(args.importance
                               and importance_map[args.importance]),

            # Note: It if both are present, startTime > endTime, because we will
            # return messages with actual time [endTime, startTime).
            startTime=args.after and time_util.Strftime(args.after),
            endTime=args.before and time_util.Strftime(args.before))

        return dataflow_util.YieldFromList(job_id=job_ref.jobId,
                                           project_id=job_ref.projectId,
                                           region_id=job_ref.location,
                                           service=apis.Messages.GetService(),
                                           request=request,
                                           batch_size=args.limit,
                                           batch_size_attribute='pageSize',
                                           field='jobMessages')
Пример #3
0
    def _JobSummariesForProject(self, project_id, args, filter_predicate):
        """Get the list of job summaries that match the predicate.

    Args:
      project_id: The project ID to retrieve
      args: parsed command line arguments
      filter_predicate: The filter predicate to apply

    Returns:
      An iterator over all the matching jobs.
    """
        request = None
        service = None
        status_filter = self._StatusArgToFilter(args.status, args.region)
        if args.region:
            request = apis.Jobs.LIST_REQUEST(projectId=project_id,
                                             location=args.region,
                                             filter=status_filter)
            service = apis.Jobs.GetService()
        else:
            log.status.Print(
                '`--region` not set; getting jobs from all available regions. '
                + 'Some jobs may be missing in the event of an outage. ' +
                'https://cloud.google.com/dataflow/docs/concepts/regional-endpoints'
            )
            request = apis.Jobs.AGGREGATED_LIST_REQUEST(projectId=project_id,
                                                        filter=status_filter)
            service = apis.GetClientInstance().projects_jobs

        return dataflow_util.YieldFromList(project_id=project_id,
                                           region_id=args.region,
                                           service=service,
                                           request=request,
                                           limit=args.limit,
                                           batch_size=args.page_size,
                                           field='jobs',
                                           batch_size_attribute='pageSize',
                                           predicate=filter_predicate)
Пример #4
0
    def _JobSummariesForProject(self, project_id, args, filter_predicate):
        """Get the list of job summaries that match the predicate.

    Args:
      project_id: The project ID to retrieve
      args: parsed command line arguments
      filter_predicate: The filter predicate to apply

    Returns:
      An iterator over all the matching jobs.
    """
        request = apis.Jobs.LIST_REQUEST(projectId=project_id,
                                         filter=self._StatusArgToFilter(
                                             args.status))

        return dataflow_util.YieldFromList(project_id=project_id,
                                           service=apis.Jobs.GetService(),
                                           request=request,
                                           limit=args.limit,
                                           batch_size=args.page_size,
                                           field='jobs',
                                           batch_size_attribute='pageSize',
                                           predicate=filter_predicate)