Пример #1
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())

        project = properties.VALUES.core.project.GetOrFail()
        region = util.ResolveRegion()

        request = self.GetRequest(dataproc.messages, project, region, args)

        return list_pager.YieldFromList(
            dataproc.client.projects_regions_clusters,
            request,
            limit=args.limit,
            field='clusters',
            batch_size=args.page_size,
            batch_size_attribute='pageSize')
Пример #2
0
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())

    region_callback = lambda: util.ResolveRegion(self.ReleaseTrack())
    # Parse Operations endpoint.
    project_callback = properties.VALUES.core.project.GetOrFail
    operation_list_ref = dataproc.resources.Parse(
        None,
        params={'regionId': region_callback, 'projectId': project_callback},
        collection='dataproc.projects.regions.operations_list')

    filter_dict = dict()
    if args.state_filter:
      filter_dict[STATE_MATCHER_FILTER] = STATE_MATCHER_MAP[args.state_filter]
    if args.cluster:
      filter_dict[CLUSTER_NAME_FILTER] = args.cluster

    op_filter = None
    if args.filter:
      # Prefer new filter argument if present
      op_filter = args.filter
      # Explicitly null out args.filter if present because by default
      # args.filter also acts as a postfilter to the things coming back from the
      # backend
      args.filter = None
    else:
      op_filter = json.dumps(filter_dict)

    request = dataproc.messages.DataprocProjectsRegionsOperationsListRequest(
        name=operation_list_ref.RelativeName(), filter=op_filter)

    return list_pager.YieldFromList(
        dataproc.client.projects_regions_operations,
        request,
        limit=args.limit, field='operations',
        batch_size=args.page_size,
        batch_size_attribute='pageSize')
Пример #3
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())

        project = properties.VALUES.core.project.GetOrFail()
        region = util.ResolveRegion()

        request = self.GetRequest(dataproc.messages, project, region, args)

        if args.cluster:
            request.clusterName = args.cluster

        if args.state_filter:
            state = STATE_MATCHER_ENUM_MAP.get(args.state_filter)
            request.jobStateMatcher = (
                dataproc.messages.DataprocProjectsRegionsJobsListRequest.
                JobStateMatcherValueValuesEnum.lookup_by_name(state))

        jobs = list_pager.YieldFromList(dataproc.client.projects_regions_jobs,
                                        request,
                                        limit=args.limit,
                                        field='jobs',
                                        batch_size=args.page_size,
                                        batch_size_attribute='pageSize')
        return (TypedJob(job) for job in jobs)