示例#1
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: all the arguments that were provided to this command invocation.
    """
        drained = []
        failed = []
        output_stream = log.status.GetConsoleWriterStream()
        for job_ref in job_utils.ExtractJobRefs(self.context, args):
            output_stream.flush()
            try:
                output_stream.write(
                    'Starting drain for job \'{0}\' ... '.format(
                        job_ref.jobId))
                self._DrainJob(job_ref)
                output_stream.write('Success\n')
                drained.append(job_ref.jobId)
            except exceptions.HttpError as error:
                reason = dataflow_util.GetErrorMessage(error)
                output_stream.write('Failure: {0}\n'.format(reason))
                failed.append(job_ref.jobId)
        if drained:
            log.status.Print('Started draining jobs: [{0}]'.format(
                ','.join(drained)))
        if failed:
            log.status.Print('Failed to start draining jobs: [{0}]'.format(
                ','.join(failed)))
def GetJob(context, job_ref, view=JOB_VIEW_SUMMARY, required=True):
    """Retrieve a specific view of a job.

  Args:
    context: Command context.
    job_ref: To retrieve.
    view: The job view to retrieve. Should be JOB_VIEW_SUMMARY or JOB_VIEW_ALL.
    required: If true and the Job doesn't exist, will raise an exception.

  Returns:
    The requested Job message.
  """
    apitools_client = context[commands.DATAFLOW_APITOOLS_CLIENT_KEY]

    request = job_ref.Request()
    request.view = view.JobsGetRequest(context)

    try:
        return apitools_client.projects_jobs.Get(request)
    except exceptions.HttpError as error:
        if error.status_code == 404:
            msg = 'No job with ID [{0}] in project [{1}]'.format(
                job_ref.jobId, job_ref.projectId)

            if required:
                raise calliope_exceptions.ToolException(msg)
            else:
                # Turn `Not Found' exceptions into None.
                log.status.Print(msg)
                return None
        raise calliope_exceptions.HttpException(
            'Failed to get job with ID [{0}] in project [{1}]: {2}'.format(
                job_ref.jobId, job_ref.projectId,
                dataflow_util.GetErrorMessage(error)))
示例#3
0
def YieldFromList(service,
                  request,
                  limit=None,
                  batch_size=100,
                  method='List',
                  field='items',
                  predicate=None,
                  current_token_attribute='pageToken',
                  next_token_attribute='nextPageToken'):
    """Make a series of List requests, keeping track of page tokens.

  Args:
    service: base_api.BaseApiService, A service with a .List() method.
    request: protorpc.messages.Message, The request message corresponding to the
        service's .List() method, with all the attributes populated except
        the .maxResults and .pageToken attributes.
    limit: int, The maximum number of records to yield. None if all available
        records should be yielded.
    batch_size: int, The number of items to retrieve per request.
    method: str, The name of the method used to fetch resources.
    field: str, The field in the response that will be a list of items.
    predicate: lambda, A function that returns true for items to be yielded.
    current_token_attribute: str, The name of the attribute in a request message
        holding the page token for the page being requested.
    next_token_attribute: str, The name of the attribute in a response message
        holding the page token for the next page.

  Yields:
    protorpc.message.Message, The resources listed by the service.

  """
    request = copy.deepcopy(request)
    request.pageSize = batch_size
    request.pageToken = None
    while limit is None or limit:
        try:
            response = getattr(service, method)(request)
        except exceptions.HttpError as error:
            raise calliope_exceptions.HttpException('RPC Failed: {0}'.format(
                dataflow_util.GetErrorMessage(error)))
        items = getattr(response, field)
        if predicate:
            items = filter(predicate, items)
        for item in items:
            yield item
            if limit is None:
                continue
            limit -= 1
            if not limit:
                return
        token = getattr(response, next_token_attribute)
        if not token:
            return
        setattr(request, current_token_attribute, token)
示例#4
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: all the arguments that were provided to this command invocation.
    """
        for job_ref in job_utils.ExtractJobRefs(self.context, args.jobs):
            try:
                self._CancelJob(job_ref)
                log.status.Print('Cancelled job [{0}]'.format(job_ref.jobId))
            except exceptions.HttpError as error:
                log.status.Print('Failed to cancel job [{0}]: {1}'.format(
                    job_ref.jobId, dataflow_util.GetErrorMessage(error)))
示例#5
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: all the arguments that were provided to this command invocation.
    """
        output_stream = log.status.GetConsoleWriterStream()
        for job_ref in job_utils.ExtractJobRefs(self.context, args.jobs):
            output_stream.flush()
            try:
                self._DrainJob(job_ref)
                log.status.Print('Started draining job [{0}]'.format(
                    job_ref.jobId))
            except exceptions.HttpError as error:
                log.status.Print('Failed to drain job [{0}]: {1}'.format(
                    job_ref.jobId, dataflow_util.GetErrorMessage(error)))
示例#6
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: all the arguments that were provided to this command invocation.

    Returns:
      None on success, or a string containing the error message.
    """
        apitools_client = self.context[commands.DATAFLOW_APITOOLS_CLIENT_KEY]
        dataflow_messages = self.context[commands.DATAFLOW_MESSAGES_MODULE_KEY]
        job_ref = job_utils.ExtractJobRef(self.context, args)

        start_time = args.changed_after and time_util.Strftime(
            args.changed_after)
        request = dataflow_messages.DataflowProjectsJobsGetMetricsRequest(
            projectId=job_ref.projectId,
            jobId=job_ref.jobId,
            startTime=start_time)

        preds = []
        if not args.tentative and args.hide_committed:
            raise calliope_exceptions.ToolException(
                'Cannot exclude both tentative and committed metrics.')
        elif not args.tentative and not args.hide_committed:
            preds.append(
                lambda m: self._GetContextValue(m, 'tentative') != 'true')
        elif args.tentative and args.hide_committed:
            preds.append(
                lambda m: self._GetContextValue(m, 'tentative') == 'true')

        if args.changed_after:
            parsed_time = time_util.ParseTimeArg(args.changed_after)
            preds.append(
                lambda m: time_util.ParseTimeArg(m.updateTime) > parsed_time)

        try:
            response = apitools_client.projects_jobs.GetMetrics(request)
        except apitools_base.HttpError as error:
            raise calliope_exceptions.HttpException(
                'Failed to get metrics for job with ID [{0}] in project [{1}]: {2}'
                .format(job_ref.jobId, job_ref.projectId,
                        dataflow_util.GetErrorMessage(error)))

        return [
            m for m in response.metrics if all([pred(m) for pred in preds])
        ]