Exemple #1
0
def Build(messages,
          async_,
          build_config,
          hide_logs=False,
          build_region=cloudbuild_util.DEFAULT_REGION):
    """Starts the build."""
    log.debug('submitting build: ' + repr(build_config))
    client = cloudbuild_util.GetClientInstance()

    parent_resource = resources.REGISTRY.Create(
        collection='cloudbuild.projects.locations',
        projectsId=properties.VALUES.core.project.GetOrFail(),
        locationsId=build_region)

    op = client.projects_locations_builds.Create(
        messages.CloudbuildProjectsLocationsBuildsCreateRequest(
            parent=parent_resource.RelativeName(), build=build_config))

    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    # Need to set the default version to 'v1'
    build_ref = resources.REGISTRY.Parse(
        None,
        collection='cloudbuild.projects.locations.builds',
        api_version='v1',
        params={
            'projectsId': build.projectId,
            'locationsId': build_region,
            'buildsId': build.id,
        })

    if not hide_logs:
        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.Print('Logs are available at [{log_url}].'.format(
                log_url=build.logUrl))
        else:
            log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if async_:
        return build, op

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
        build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
        log.status.Print(
            'Your build timed out. Use the [--timeout=DURATION] flag to change '
            'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
        raise FailedBuildException(build)

    return build, op
Exemple #2
0
def WaitFor(poller, operation_ref, message):
    """Waits with retrues for operation to be done given poller.

  Args:
    poller: OperationPoller, poller to use during retrials.
    operation_ref: object, passed to operation poller poll method.
    message: str, string to display for progrss_tracker.

  Returns:
    poller.GetResult(operation).

  Raises:
    AbortWaitError: if ctrl-c was pressed.
    TimeoutError: if retryer has finished wihout being done.
  """
    def _CtrlCHandler(unused_signal, unused_frame):
        raise AbortWaitError('Ctrl-C aborted wait.')

    try:
        with execution_utils.CtrlCSection(_CtrlCHandler):
            try:
                with progress_tracker.ProgressTracker(message) as tracker:

                    if poller.PRE_START_SLEEP_MS:
                        _SleepMs(poller.PRE_START_SLEEP_MS)

                    def _StatusUpdate(unused_result, unused_status):
                        tracker.Tick()

                    retryer = retry.Retryer(
                        max_retrials=poller.MAX_RETRIALS,
                        max_wait_ms=poller.MAX_WAIT_MS,
                        exponential_sleep_multiplier=poller.
                        EXPONENTIAL_SLEEP_MULTIPLIER,
                        jitter_ms=poller.JITTER_MS,
                        status_update_func=_StatusUpdate)

                    def _IsNotDone(operation, unused_state):
                        return not poller.IsDone(operation)

                    operation = retryer.RetryOnResult(
                        func=poller.Poll,
                        args=(operation_ref, ),
                        should_retry_if=_IsNotDone,
                        sleep_ms=poller.SLEEP_MS)
            except retry.RetryException:
                raise TimeoutError(
                    'Operation {0} has not finished in {1} seconds'.format(
                        operation_ref, int(poller.MAX_WAIT_MS / 1000)))
    except AbortWaitError:
        # Write this out now that progress tracker is done.
        sys.stderr.write(
            'Aborting wait for operation {0}.\n'.format(operation_ref))
        raise

    return poller.GetResult(operation)
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.
    """
        # TODO(b/33234717): remove this after deprecation period
        flags.ProcessPackages(args)

        region = properties.VALUES.compute.region.Get(required=True)
        uris = jobs_prep.RunSetupAndUpload(args.packages, args.staging_bucket,
                                           args.package_path, args.job)
        log.debug('Using {0} as trainer uris'.format(uris))
        job = jobs.BuildTrainingJob(path=args.config,
                                    module_name=args.module_name,
                                    job_name=args.job,
                                    trainer_uri=uris,
                                    region=region,
                                    user_args=args.user_args)

        job = jobs.Create(job)
        if args. async:
            log.status.Print('Job [{}] submitted successfully.'.format(
                job.jobId))
            log.status.Print(_FOLLOW_UP_MESSAGE.format(job_id=job.jobId))
            return job

        log_fetcher = jobs.LogFetcher(job_id=job.jobId,
                                      polling_interval=_POLLING_INTERVAL,
                                      allow_multiline_logs=False)

        printer = resource_printer.Printer(jobs.LogFetcher.LOG_FORMAT,
                                           out=log.err)

        def _CtrlCHandler(signal, frame):
            del signal, frame  # Unused
            raise KeyboardInterrupt

        with execution_utils.CtrlCSection(_CtrlCHandler):
            try:
                printer.Print(log_fetcher.YieldLogs())
            except KeyboardInterrupt:
                log.status.Print('Received keyboard interrupt.')
                log.status.Print(_FOLLOW_UP_MESSAGE.format(job_id=job.jobId))

        job = jobs.Get(job.jobId)
        # If the job itself failed, we will return a failure status.
        if job.state is not job.StateValueValuesEnum.SUCCEEDED:
            self.exit_code = 1

        return job
Exemple #4
0
def _RunBuild(name, arch, function_type, source):
    """Builds the Edge function image with Cloud Build.

  Args:
    name: str, name of the Edge Function
    arch: str, target architecture,
      should be one of 'x86-64', 'armhf', 'aarch64'
    function_type: str, type of function,
      should be one of 'on-demand', 'stream-processing'
    source: str, GCS URI to source archive object or
      local path to source directory

  Returns:
    Finished cloudbuild.Build message. build.results.images contains
      built image's name and digest

  Raises:
    FailedBuildException: If the build is completed and not 'SUCCESS'
    FunctionBuilderError: For invalid arguments
  """
    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    build_config = _EdgeFunctionBuildMessage(name, arch, function_type, source)

    log.debug('submitting build: ' + repr(build_config))

    # Start the build.
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config,
            projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Stream logs from GCS.
    with execution_utils.CtrlCSection(mash_handler):
        build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
        raise FailedBuildException(build)

    return build
Exemple #5
0
def Build(messages, async_, build_config):
    """Starts the build."""
    log.debug('submitting build: ' + repr(build_config))
    client = cloudbuild_util.GetClientInstance()
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config,
            projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)
    if build.logUrl:
        log.status.Print(
            'Logs are available at [{log_url}].'.format(log_url=build.logUrl))
    else:
        log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if async_:
        return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
        build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
        log.status.Print(
            'Your build timed out. Use the [--timeout=DURATION] flag to change '
            'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
        raise FailedBuildException(build)

    return build
Exemple #6
0
def _RunCloudBuild(args,
                   builder,
                   build_args,
                   build_tags=None,
                   output_filter=None,
                   log_location=None,
                   backoff=lambda elapsed: 1):
    """Run a build with a specific builder on Google Cloud Builder.

  Args:
    args: an argparse namespace. All the arguments that were provided to this
      command invocation.
    builder: path to builder image
    build_args: args to be sent to builder
    build_tags: tags to be attached to the build
    output_filter: A list of strings indicating what lines from the log should
      be output. Only lines that start with one of the strings in output_filter
      will be displayed.
    log_location: GCS path to directory where logs will be stored.
    backoff: A function that takes the current elapsed time and returns
      the next sleep length. Both are in seconds.

  Returns:
    A build object that either streams the output or is displayed as a
    link to the build.

  Raises:
    FailedBuildException: If the build is completed and not 'SUCCESS'.
  """
    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    # Create the build request.
    build_config = messages.Build(
        steps=[
            messages.BuildStep(
                name=builder,
                args=build_args,
            ),
        ],
        tags=build_tags,
        timeout='{0}s'.format(args.timeout),
    )
    if log_location:
        gcs_log_dir = resources.REGISTRY.Parse(args.log_location,
                                               collection='storage.objects')

        build_config.logsBucket = ('gs://{0}/{1}'.format(
            gcs_log_dir.bucket, gcs_log_dir.object))

    # Start the build.
    build, build_ref = _CreateCloudBuild(build_config, client, messages)

    # If the command is run --async, we just print out a reference to the build.
    if args.async_:
        return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
        build = CloudBuildClientWithFiltering(
            client, messages).StreamWithFilter(build_ref,
                                               backoff,
                                               output_filter=output_filter)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
        log.status.Print(
            'Your build timed out. Use the [--timeout=DURATION] flag to change '
            'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
        raise FailedBuildException(build)

    return build
Exemple #7
0
    def _SubmitBuild(self, client, messages, build_config,
                     gcs_config_staging_path, suggest_configs, async_):
        """Submits the build.

    Args:
      client: Client used to make calls to Cloud Build API.
      messages: Cloud Build messages module. This is the value returned from
        cloudbuild_util.GetMessagesModule().
      build_config: Build to submit.
      gcs_config_staging_path: A path to a GCS subdirectory where deployed
        configs will be saved to. This value will be printed to the user.
      suggest_configs: If True, suggest YAML configs for the user to add to
        their repo.
      async_: If true, exit immediately after submitting Build, rather than
        waiting for it to complete or fail.

    Raises:
      FailedDeployException: If the build is completed and not 'SUCCESS'.
    """
        project = properties.VALUES.core.project.Get(required=True)
        op = client.projects_builds.Create(
            messages.CloudbuildProjectsBuildsCreateRequest(build=build_config,
                                                           projectId=project))
        log.debug('submitting build: ' + six.text_type(build_config))

        json = encoding.MessageToJson(op.metadata)
        build = encoding.JsonToMessage(messages.BuildOperationMetadata,
                                       json).build

        build_ref = resources.REGISTRY.Create(
            collection='cloudbuild.projects.builds',
            projectId=build.projectId,
            id=build.id)

        log.status.Print(
            'Starting Cloud Build to build and deploy to the target '
            'Google Kubernetes Engine cluster...\n')

        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.Print('Logs are available at [{log_url}].'.format(
                log_url=build.logUrl))
        else:
            log.status.Print('Logs are available in the Cloud Console.')

        suggested_configs_path = build_util.SuggestedConfigsPath(
            gcs_config_staging_path, build.id)
        expanded_configs_path = build_util.ExpandedConfigsPath(
            gcs_config_staging_path, build.id)

        if async_:
            log.status.Print(
                '\nIf successful, you can find the configuration files of the deployed '
                'Kubernetes objects stored at gs://{expanded} or by visiting '
                'https://console.cloud.google.com/storage/browser/{expanded}/.'
                .format(expanded=expanded_configs_path))
            if suggest_configs:
                log.status.Print(
                    '\nYou will also be able to find the suggested base Kubernetes '
                    'configuration files at gs://{suggested} or by visiting '
                    'https://console.cloud.google.com/storage/browser/{suggested}/.'
                    .format(suggested=suggested_configs_path))

            # Return here, otherwise, logs are streamed from GCS.
            return

        mash_handler = execution.MashHandler(
            execution.GetCancelBuildHandler(client, messages, build_ref))

        with execution_utils.CtrlCSection(mash_handler):
            build = cb_logs.CloudBuildClient(client,
                                             messages).Stream(build_ref)

        if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
            log.status.Print(
                'Your build and deploy timed out. Use the [--timeout=DURATION] flag '
                'to change the timeout threshold.')

        if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
            if build_util.SaveConfigsBuildStepIsSuccessful(messages, build):
                log.status.Print(
                    'You can find the configuration files for this attempt at gs://{}.'
                    .format(expanded_configs_path))
            raise FailedDeployException(build)

        log.status.Print(
            'Successfully deployed to your Google Kubernetes Engine cluster.\n\n'
            'You can find the configuration files of the deployed Kubernetes '
            'objects stored at gs://{expanded} or by visiting '
            'https://console.cloud.google.com/storage/browser/{expanded}/.'.
            format(expanded=expanded_configs_path))
        if suggest_configs:
            log.status.Print(
                '\nYou can also find suggested base Kubernetes configuration files at '
                'gs://{suggested} or by visiting '
                'https://console.cloud.google.com/storage/browser/{suggested}/.'
                .format(suggested=suggested_configs_path))
Exemple #8
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

    project = properties.VALUES.core.project.Get(required=True)
    safe_project = project.replace(':', '_')
    safe_project = safe_project.replace('.', '_')
    # The string 'google' is not allowed in bucket names.
    safe_project = safe_project.replace('google', 'elgoog')

    default_bucket_name = '{}_cloudbuild'.format(safe_project)

    default_gcs_source = False
    if args.gcs_source_staging_dir is None:
      default_gcs_source = True
      args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)

    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    gcs_client = storage_api.StorageClient()

    # First, create the build request.
    build_timeout = properties.VALUES.builds.timeout.Get()

    if build_timeout is not None:
      try:
        # A bare number is interpreted as seconds.
        build_timeout_secs = int(build_timeout)
      except ValueError:
        build_timeout_duration = times.ParseDuration(build_timeout)
        build_timeout_secs = int(build_timeout_duration.total_seconds)
      timeout_str = six.text_type(build_timeout_secs) + 's'
    else:
      timeout_str = None

    if args.tag is not None:
      if (properties.VALUES.builds.check_tag.GetBool() and
          'gcr.io/' not in args.tag):
        raise c_exceptions.InvalidArgumentException(
            '--tag',
            'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
      if properties.VALUES.builds.use_kaniko.GetBool():
        if args.no_cache:
          ttl = '0h'
        else:
          ttl = '{}h'.format(properties.VALUES.builds.kaniko_cache_ttl.Get())
        build_config = messages.Build(
            steps=[
                messages.BuildStep(
                    name=properties.VALUES.builds.kaniko_image.Get(),
                    args=[
                        '--destination', args.tag, '--cache', 'true',
                        '--cache-ttl', ttl
                    ],
                ),
            ],
            timeout=timeout_str,
            substitutions=cloudbuild_util.EncodeSubstitutions(
                args.substitutions, messages))
      else:
        if args.no_cache:
          raise c_exceptions.InvalidArgumentException(
              'no-cache',
              'Cannot specify --no-cache if builds/use_kaniko property is '
              'False')
        build_config = messages.Build(
            images=[args.tag],
            steps=[
                messages.BuildStep(
                    name='gcr.io/cloud-builders/docker',
                    args=[
                        'build', '--network', 'cloudbuild', '--no-cache', '-t',
                        args.tag, '.'
                    ],
                ),
            ],
            timeout=timeout_str,
            substitutions=cloudbuild_util.EncodeSubstitutions(
                args.substitutions, messages))
    elif args.config is not None:
      if args.no_cache:
        raise c_exceptions.ConflictingArgumentsException(
            '--config', '--no-cache')
      if not args.config:
        raise c_exceptions.InvalidArgumentException(
            '--config', 'Config file path must not be empty.')
      build_config = config.LoadCloudbuildConfigFromPath(
          args.config, messages, params=args.substitutions)
    else:
      raise c_exceptions.OneOfArgumentsRequiredException(
          ['--tag', '--config'],
          'Requires either a docker tag or a config file.')

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
      build_config.timeout = timeout_str

    # --no-source overrides the default --source.
    if not args.IsSpecified('source') and args.no_source:
      args.source = None

    gcs_source_staging = None
    if args.source:
      suffix = '.tgz'
      if args.source.startswith('gs://') or os.path.isfile(args.source):
        _, suffix = os.path.splitext(args.source)

      # Next, stage the source to Cloud Storage.
      staged_object = '{stamp}-{uuid}{suffix}'.format(
          stamp=times.GetTimeStampFromDateTime(times.Now()),
          uuid=uuid.uuid4().hex,
          suffix=suffix,
      )
      gcs_source_staging_dir = resources.REGISTRY.Parse(
          args.gcs_source_staging_dir, collection='storage.objects')

      # We create the bucket (if it does not exist) first. If we do an existence
      # check and then create the bucket ourselves, it would be possible for an
      # attacker to get lucky and beat us to creating the bucket. Block on this
      # creation to avoid this race condition.
      gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

      # If no bucket is specified (for the source `default_gcs_source`), check
      # that the default bucket is also owned by the project (b/33046325).
      if default_gcs_source:
        # This request returns only the buckets owned by the project.
        bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
            project=project, prefix=default_bucket_name)
        bucket_list = gcs_client.client.buckets.List(bucket_list_req)
        found_bucket = False
        for bucket in bucket_list.items:
          if bucket.id == default_bucket_name:
            found_bucket = True
            break
        if not found_bucket:
          if default_gcs_source:
            raise c_exceptions.RequiredArgumentException(
                'gcs_source_staging_dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs_source_staging_dir.'.format(default_bucket_name))

      if gcs_source_staging_dir.object:
        staged_object = gcs_source_staging_dir.object + '/' + staged_object
      gcs_source_staging = resources.REGISTRY.Create(
          collection='storage.objects',
          bucket=gcs_source_staging_dir.bucket,
          object=staged_object)

      if args.source.startswith('gs://'):
        gcs_source = resources.REGISTRY.Parse(
            args.source, collection='storage.objects')
        staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      else:
        if not os.path.exists(args.source):
          raise c_exceptions.BadFileException(
              'could not find source [{src}]'.format(src=args.source))
        if os.path.isdir(args.source):
          source_snapshot = snapshot.Snapshot(args.source,
                                              ignore_file=args.ignore_file)
          size_str = resource_transform.TransformSize(
              source_snapshot.uncompressed_size)
          log.status.Print(
              'Creating temporary tarball archive of {num_files} file(s)'
              ' totalling {size} before compression.'.format(
                  num_files=len(source_snapshot.files), size=size_str))
          staged_source_obj = source_snapshot.CopyTarballToGCS(
              gcs_client, gcs_source_staging, ignore_file=args.ignore_file)
          build_config.source = messages.Source(
              storageSource=messages.StorageSource(
                  bucket=staged_source_obj.bucket,
                  object=staged_source_obj.name,
                  generation=staged_source_obj.generation,
              ))
        elif os.path.isfile(args.source):
          unused_root, ext = os.path.splitext(args.source)
          if ext not in _ALLOWED_SOURCE_EXT:
            raise c_exceptions.BadFileException(
                'Local file [{src}] is none of ' +
                ', '.join(_ALLOWED_SOURCE_EXT))
          log.status.Print('Uploading local file [{src}] to '
                           '[gs://{bucket}/{object}].'.format(
                               src=args.source,
                               bucket=gcs_source_staging.bucket,
                               object=gcs_source_staging.object,
                           ))
          staged_source_obj = gcs_client.CopyFileToGCS(args.source,
                                                       gcs_source_staging)
          build_config.source = messages.Source(
              storageSource=messages.StorageSource(
                  bucket=staged_source_obj.bucket,
                  object=staged_source_obj.name,
                  generation=staged_source_obj.generation,
              ))
    else:
      # No source
      if not args.no_source:
        raise c_exceptions.InvalidArgumentException(
            '--no-source', 'To omit source, use the --no-source flag.')

    if args.gcs_log_dir:
      gcs_log_dir = resources.REGISTRY.Parse(
          args.gcs_log_dir, collection='storage.objects')

      build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' +
                                 gcs_log_dir.object)

    # Machine type.
    if args.machine_type is not None:
      machine_type = Submit._machine_type_flag_map.GetEnumForChoice(
          args.machine_type)
      if not build_config.options:
        build_config.options = messages.BuildOptions()
      build_config.options.machineType = machine_type

    # Disk size.
    if args.disk_size is not None:
      disk_size = compute_utils.BytesToGb(args.disk_size)
      if not build_config.options:
        build_config.options = messages.BuildOptions()
      build_config.options.diskSizeGb = int(disk_size)

    log.debug('submitting build: ' + repr(build_config))

    # Start the build.
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config, projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)
    if build.logUrl:
      log.status.Print(
          'Logs are available at [{log_url}].'.format(log_url=build.logUrl))
    else:
      log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if args.async:
      return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
      build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
      log.status.Print(
          'Your build timed out. Use the [--timeout=DURATION] flag to change '
          'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
      raise FailedBuildException(build)

    return build
def WaitFor(poller, operation_ref, message,
            pre_start_sleep_ms=1000,
            max_retrials=None,
            max_wait_ms=300000,
            exponential_sleep_multiplier=1.4,
            jitter_ms=1000,
            wait_ceiling_ms=180000,
            sleep_ms=2000):
  """Waits with retrues for operation to be done given poller.

  Args:
    poller: OperationPoller, poller to use during retrials.
    operation_ref: object, passed to operation poller poll method.
    message: str, string to display for progrss_tracker.
    pre_start_sleep_ms: int, Time to wait before making first poll request.
    max_retrials: int, max number of retrials before raising RetryException.
    max_wait_ms: int, number of ms to wait before raising WaitException.
    exponential_sleep_multiplier: float, factor to use on subsequent retries.
    jitter_ms: int, random (up to the value) additional sleep between retries.
    wait_ceiling_ms: int, Maximum wait between retries.
    sleep_ms: int or iterable: for how long to wait between trials.

  Returns:
    poller.GetResult(operation).

  Raises:
    AbortWaitError: if ctrl-c was pressed.
    TimeoutError: if retryer has finished wihout being done.
  """

  def _CtrlCHandler(unused_signal, unused_frame):
    raise AbortWaitError('Ctrl-C aborted wait.')

  try:
    with execution_utils.CtrlCSection(_CtrlCHandler):
      try:
        with progress_tracker.ProgressTracker(message) as tracker:

          if pre_start_sleep_ms:
            _SleepMs(pre_start_sleep_ms)

          def _StatusUpdate(unused_result, unused_status):
            tracker.Tick()

          retryer = retry.Retryer(
              max_retrials=max_retrials,
              max_wait_ms=max_wait_ms,
              exponential_sleep_multiplier=exponential_sleep_multiplier,
              jitter_ms=jitter_ms,
              wait_ceiling_ms=wait_ceiling_ms,
              status_update_func=_StatusUpdate)

          def _IsNotDone(operation, unused_state):
            return not poller.IsDone(operation)

          operation = retryer.RetryOnResult(
              func=poller.Poll,
              args=(operation_ref,),
              should_retry_if=_IsNotDone,
              sleep_ms=sleep_ms)
      except retry.WaitException:
        raise TimeoutError(
            'Operation {0} has not finished in {1} seconds'
            .format(operation_ref, int(max_wait_ms / 1000)))
      except retry.MaxRetrialsException as e:
        raise TimeoutError(
            'Operation {0} has not finished in {1} seconds '
            'after max {2} retrials'
            .format(operation_ref,
                    int(e.state.time_passed_ms / 1000), e.state.retrial))

  except AbortWaitError:
    # Write this out now that progress tracker is done.
    sys.stderr.write('Aborting wait for operation {0}.\n'.format(operation_ref))
    raise

  return poller.GetResult(operation)
def RunDaisyBuild(args,
                  workflow,
                  variables,
                  daisy_bucket=None,
                  tags=None,
                  user_zone=None,
                  output_filter=None):
    """Run a build with Daisy on Google Cloud Builder.

  Args:
    args: an argparse namespace. All the arguments that were provided to this
      command invocation.
    workflow: The path to the Daisy workflow to run.
    variables: A string of key-value pairs to pass to Daisy.
    daisy_bucket: A string containing the name of the GCS bucket that daisy
      should use.
    tags: A list of strings for adding tags to the Argo build.
    user_zone: The GCP zone to tell Daisy to do work in. If unspecified,
      defaults to wherever the Argo runner happens to be.
    output_filter: A list of strings indicating what lines from the log should
      be output. Only lines that start with one of the strings in output_filter
      will be displayed.

  Returns:
    A build object that either streams the output or is displayed as a
    link to the build.

  Raises:
    FailedBuildException: If the build is completed and not 'SUCCESS'.
  """
    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()
    project_id = projects_util.ParseProject(
        properties.VALUES.core.project.GetOrFail())

    CheckIamPermissions(project_id)

    # Make Daisy time out before gcloud by shaving off 2% from the timeout time,
    # up to a max of 5m (300s).
    two_percent = int(args.timeout * 0.02)
    daisy_timeout = args.timeout - min(two_percent, 300)

    daisy_bucket = daisy_bucket or GetAndCreateDaisyBucket()

    daisy_args = [
        '-gcs_path=gs://{0}/'.format(daisy_bucket),
        '-default_timeout={0}s'.format(daisy_timeout),
        '-variables={0}'.format(variables),
        workflow,
    ]
    if user_zone is not None:
        daisy_args = ['-zone={0}'.format(user_zone)] + daisy_args

    build_tags = ['gce-daisy']
    if tags:
        build_tags.extend(tags)

    # First, create the build request.
    build_config = messages.Build(
        steps=[
            messages.BuildStep(
                name=_BUILDER,
                args=daisy_args,
            ),
        ],
        tags=build_tags,
        timeout='{0}s'.format(args.timeout),
    )
    if args.log_location:
        gcs_log_dir = resources.REGISTRY.Parse(args.log_location,
                                               collection='storage.objects')

        build_config.logsBucket = ('gs://{0}/{1}'.format(
            gcs_log_dir.bucket, gcs_log_dir.object))

    # Start the build.
    build, build_ref = _CreateCloudBuild(build_config, client, messages)

    # If the command is run --async, we just print out a reference to the build.
    if args. async:
        return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
        build = DaisyCloudBuildClient(client, messages).StreamWithFilter(
            build_ref, output_filter=output_filter)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
        log.status.Print(
            'Your build timed out. Use the [--timeout=DURATION] flag to change '
            'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
        raise FailedBuildException(build)

    return build
Exemple #11
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

    project = properties.VALUES.core.project.Get()
    safe_project = project.replace(':', '_')
    safe_project = safe_project.replace('.', '_')
    # The string 'google' is not allowed in bucket names.
    safe_project = safe_project.replace('google', 'elgoog')

    default_bucket_name = '{}_cloudbuild'.format(safe_project)

    default_gcs_source = False
    if args.gcs_source_staging_dir is None:
      default_gcs_source = True
      args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)

    default_gcs_log_dir = False
    if args.gcs_log_dir is None:
      default_gcs_log_dir = True
      args.gcs_log_dir = 'gs://{}/logs'.format(default_bucket_name)

    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    gcs_client = storage_api.StorageClient()

    # First, create the build request.
    build_timeout = properties.VALUES.container.build_timeout.Get()

    if build_timeout is not None:
      try:
        # A bare number is interpreted as seconds.
        build_timeout_secs = int(build_timeout)
      except ValueError:
        build_timeout_duration = times.ParseDuration(build_timeout)
        build_timeout_secs = int(build_timeout_duration.total_seconds)
      timeout_str = str(build_timeout_secs) + 's'
    else:
      timeout_str = None

    if args.tag:
      if 'gcr.io/' not in args.tag:
        raise c_exceptions.InvalidArgumentException(
            '--tag',
            'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
      build_config = messages.Build(
          images=[args.tag],
          steps=[
              messages.BuildStep(
                  name='gcr.io/cloud-builders/docker',
                  args=['build', '--no-cache', '-t', args.tag, '.'],
              ),
          ],
          timeout=timeout_str,
          substitutions=cloudbuild_util.EncodeSubstitutions(args.substitutions,
                                                            messages)
      )
    elif args.config:
      build_config = config.LoadCloudbuildConfigFromPath(
          args.config, messages, params=args.substitutions)

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
      build_config.timeout = timeout_str

    suffix = '.tgz'
    if args.source.startswith('gs://') or os.path.isfile(args.source):
      _, suffix = os.path.splitext(args.source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        args.gcs_source_staging_dir, collection='storage.objects')

    # We first try to create the bucket, before doing all the checks, in order
    # to avoid a race condition. If we do the check first, an attacker could
    # be lucky enough to create the bucket after the check and before this
    # bucket creation.
    gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

    # If no bucket is specified (for the source `default_gcs_source` or for the
    # logs `default_gcs_log_dir`), check that the default bucket is also owned
    # by the project (b/33046325).
    if default_gcs_source or default_gcs_log_dir:
      # This request returns only the buckets owned by the project.
      bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
          project=project,
          prefix=default_bucket_name)
      bucket_list = gcs_client.client.buckets.List(bucket_list_req)
      found_bucket = False
      for bucket in bucket_list.items:
        if bucket.id == default_bucket_name:
          found_bucket = True
          break
      if not found_bucket:
        if default_gcs_source:
          raise c_exceptions.RequiredArgumentException(
              'gcs_source_staging_dir',
              'A bucket with name {} already exists and is owned by '
              'another project. Specify a bucket using '
              '--gcs_source_staging_dir.'.format(default_bucket_name))
        elif default_gcs_log_dir:
          raise c_exceptions.RequiredArgumentException(
              'gcs-log-dir',
              'A bucket with name {} already exists and is owned by '
              'another project. Specify a bucket to hold build logs '
              'using --gcs-log-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
      staged_object = gcs_source_staging_dir.object + '/' + staged_object

    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)

    if args.source.startswith('gs://'):
      gcs_source = resources.REGISTRY.Parse(
          args.source, collection='storage.objects')
      staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
      build_config.source = messages.Source(
          storageSource=messages.StorageSource(
              bucket=staged_source_obj.bucket,
              object=staged_source_obj.name,
              generation=staged_source_obj.generation,
          ))
    else:
      if not os.path.exists(args.source):
        raise c_exceptions.BadFileException(
            'could not find source [{src}]'.format(src=args.source))
      if os.path.isdir(args.source):
        source_snapshot = snapshot.Snapshot(args.source)
        size_str = resource_transform.TransformSize(
            source_snapshot.uncompressed_size)
        log.status.Print(
            'Creating temporary tarball archive of {num_files} file(s)'
            ' totalling {size} before compression.'.format(
                num_files=len(source_snapshot.files),
                size=size_str))
        staged_source_obj = source_snapshot.CopyTarballToGCS(
            gcs_client, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      elif os.path.isfile(args.source):
        unused_root, ext = os.path.splitext(args.source)
        if ext not in _ALLOWED_SOURCE_EXT:
          raise c_exceptions.BadFileException(
              'Local file [{src}] is none of '+', '.join(_ALLOWED_SOURCE_EXT))
        log.status.Print(
            'Uploading local file [{src}] to '
            '[gs://{bucket}/{object}].'.format(
                src=args.source,
                bucket=gcs_source_staging.bucket,
                object=gcs_source_staging.object,
            ))
        staged_source_obj = gcs_client.CopyFileToGCS(
            storage_util.BucketReference.FromBucketUrl(
                gcs_source_staging.bucket),
            args.source, gcs_source_staging.object)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))

    gcs_log_dir = resources.REGISTRY.Parse(
        args.gcs_log_dir, collection='storage.objects')

    if gcs_log_dir.bucket != gcs_source_staging.bucket:
      # Create the logs bucket if it does not yet exist.
      gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket)
    build_config.logsBucket = 'gs://'+gcs_log_dir.bucket+'/'+gcs_log_dir.object

    log.debug('submitting build: '+repr(build_config))

    # Start the build.
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config,
            projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)
    if build.logUrl:
      log.status.Print('Logs are available at [{log_url}].'.format(
          log_url=build.logUrl))
    else:
      log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if args.async:
      return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
      build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
      log.status.Print(
          'Your build timed out. Use the [--timeout=DURATION] flag to change '
          'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
      raise FailedBuildException(build)

    return build
Exemple #12
0
def RunDaisyBuild(args, workflow, variables):
    """Run a build with Daisy on Google Cloud Builder.

  Args:
    args: an argparse namespace. All the arguments that were provided to this
      command invocation.
    workflow: The path to the Daisy workflow to run.
    variables: A string of key-value pairs to pass to Daisy.

  Returns:
    A build object that either streams the output or is displayed as a
    link to the build.

  Raises:
    FailedBuildException: If the build is completed and not 'SUCCESS'.
  """
    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()
    project_id = projects_util.ParseProject(
        properties.VALUES.core.project.GetOrFail())

    CheckIamPermissions(project_id)

    timeout_str = '{0}s'.format(args.timeout)

    # First, create the build request.
    build_config = messages.Build(
        steps=[
            messages.BuildStep(
                name=_BUILDER,
                args=[
                    '-variables={0}'.format(variables),
                    workflow,
                ],
            ),
        ],
        timeout=timeout_str,
    )
    if args.log_location:
        gcs_log_dir = resources.REGISTRY.Parse(args.log_location,
                                               collection='storage.objects')

        build_config.logsBucket = ('gs://{0}/{1}'.format(
            gcs_log_dir.bucket, gcs_log_dir.object))

    # Start the build.
    build, build_ref = _CreateCloudBuild(build_config, client, messages)

    # If the command is run --async, we just print out a reference to the build.
    if args. async:
        return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
        build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
        log.status.Print(
            'Your build timed out. Use the [--timeout=DURATION] flag to change '
            'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
        raise FailedBuildException(build)

    return build
Exemple #13
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.
    """
        region = properties.VALUES.compute.region.Get(required=True)
        staging_location = jobs_prep.GetStagingLocation(
            staging_bucket=args.staging_bucket,
            job_id=args.job,
            job_dir=args.job_dir)
        try:
            uris = jobs_prep.UploadPythonPackages(
                packages=args.packages,
                package_path=args.package_path,
                staging_location=staging_location)
        except jobs_prep.NoStagingLocationError:
            raise flags.ArgumentError(
                'If local packages are provided, the `--staging-bucket` or '
                '`--job-dir` flag must be given.')
        log.debug('Using {0} as trainer uris'.format(uris))

        scale_tier_enum = (jobs.GetMessagesModule(
        ).GoogleCloudMlV1beta1TrainingInput.ScaleTierValueValuesEnum)
        scale_tier = scale_tier_enum(
            args.scale_tier) if args.scale_tier else None
        job = jobs.BuildTrainingJob(
            path=args.config,
            module_name=args.module_name,
            job_name=args.job,
            trainer_uri=uris,
            region=region,
            job_dir=args.job_dir.ToUrl() if args.job_dir else None,
            scale_tier=scale_tier,
            user_args=args.user_args,
            runtime_version=args.runtime_version)

        jobs_client = jobs.JobsClient()
        project_ref = resources.REGISTRY.Parse(
            properties.VALUES.core.project.Get(required=True),
            collection='ml.projects')
        job = jobs_client.Create(project_ref, job)
        log.status.Print('Job [{}] submitted successfully.'.format(job.jobId))
        if args. async:
            log.status.Print(_FOLLOW_UP_MESSAGE.format(job_id=job.jobId))
            return job

        log_fetcher = stream.LogFetcher(
            filters=log_utils.LogFilters(job.jobId),
            polling_interval=_POLLING_INTERVAL,
            continue_func=log_utils.MakeContinueFunction(job.jobId))

        printer = resource_printer.Printer(log_utils.LOG_FORMAT, out=log.err)

        def _CtrlCHandler(signal, frame):
            del signal, frame  # Unused
            raise KeyboardInterrupt

        with execution_utils.CtrlCSection(_CtrlCHandler):
            try:
                printer.Print(log_utils.SplitMultiline(
                    log_fetcher.YieldLogs()))
            except KeyboardInterrupt:
                log.status.Print('Received keyboard interrupt.')
                log.status.Print(_FOLLOW_UP_MESSAGE.format(job_id=job.jobId))

        job_ref = resources.REGISTRY.Parse(job.jobId,
                                           collection='ml.projects.jobs')
        job = jobs_client.Get(job_ref)
        # If the job itself failed, we will return a failure status.
        if job.state is not job.StateValueValuesEnum.SUCCEEDED:
            self.exit_code = 1

        return job
Exemple #14
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedDeployException: If the build is completed and not 'SUCCESS'.
    """

        client = cloudbuild_util.GetClientInstance()
        messages = cloudbuild_util.GetMessagesModule()

        build_config = self._CreateBuildFromArgs(args, messages)

        # Start the build
        project = properties.VALUES.core.project.Get(required=True)
        op = client.projects_builds.Create(
            messages.CloudbuildProjectsBuildsCreateRequest(build=build_config,
                                                           projectId=project))
        log.debug('submitting build: ' + repr(build_config))

        json = encoding.MessageToJson(op.metadata)
        build = encoding.JsonToMessage(messages.BuildOperationMetadata,
                                       json).build

        build_ref = resources.REGISTRY.Create(
            collection='cloudbuild.projects.builds',
            projectId=build.projectId,
            id=build.id)

        log.status.Print(
            'Starting Cloud Build to build and deploy to the target '
            'Google Kubernetes Engine cluster...\n')

        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.Print('Logs are available at [{log_url}].'.format(
                log_url=build.logUrl))
        else:
            log.status.Print('Logs are available in the Cloud Console.')

        if args.async_:
            return

        mash_handler = execution.MashHandler(
            execution.GetCancelBuildHandler(client, messages, build_ref))

        # Otherwise, logs are streamed from GCS.
        with execution_utils.CtrlCSection(mash_handler):
            build = cb_logs.CloudBuildClient(client,
                                             messages).Stream(build_ref)

        if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
            log.status.Print(
                'Your build and deploy timed out. Use the [--timeout=DURATION] flag '
                'to change the timeout threshold.')

        if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
            raise FailedDeployException(build)

        # Get location of suggested config bucket from gsutil build step
        suggested_configs = build.steps[-1].args[-1]

        log.status.Print(
            'Successfully deployed to your Google Kubernetes Engine cluster.\n\n'
            'You can find the configuration files of the deployed Kubernetes '
            'objects stored at {expanded} These configurations are expanded by the '
            'deployer with additional labels like app name and version.\n\n'
            'You can also find suggested base Kubernetes configuration files '
            'created by the deployer at {suggested}'.format(
                expanded=build.artifacts.objects.location,
                suggested=suggested_configs))
        return
Exemple #15
0
def Build(messages,
          async_,
          build_config,
          hide_logs=False,
          build_region=cloudbuild_util.DEFAULT_REGION,
          support_gcl=False,
          suppress_logs=False):
  """Starts the build."""
  log.debug('submitting build: ' + repr(build_config))
  client = cloudbuild_util.GetClientInstance()

  parent_resource = resources.REGISTRY.Create(
      collection='cloudbuild.projects.locations',
      projectsId=properties.VALUES.core.project.GetOrFail(),
      locationsId=build_region)

  op = client.projects_locations_builds.Create(
      messages.CloudbuildProjectsLocationsBuildsCreateRequest(
          parent=parent_resource.RelativeName(), build=build_config))

  json = encoding.MessageToJson(op.metadata)
  build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

  # Need to set the default version to 'v1'
  build_ref = resources.REGISTRY.Parse(
      None,
      collection='cloudbuild.projects.locations.builds',
      api_version='v1',
      params={
          'projectsId': build.projectId,
          'locationsId': build_region,
          'buildsId': build.id,
      })

  if not hide_logs:
    log.CreatedResource(build_ref)
    if build.logUrl:
      log.status.Print(
          'Logs are available at [{log_url}].'.format(log_url=build.logUrl))
    else:
      log.status.Print('Logs are available in the Cloud Console.')

  # If the command is run --async, we just print out a reference to the build.
  if async_:
    return build, op

  if not support_gcl and build.options and build.options.logging in [
      messages.BuildOptions.LoggingValueValuesEnum.STACKDRIVER_ONLY,
      messages.BuildOptions.LoggingValueValuesEnum.CLOUD_LOGGING_ONLY,
  ]:
    log.status.Print('\ngcloud builds submit only displays logs from Cloud'
                     ' Storage. To view logs from Cloud Logging, run:\ngcloud'
                     ' beta builds submit\n')

  mash_handler = execution.MashHandler(
      execution.GetCancelBuildHandler(client, messages, build_ref))

  out = log.out if not suppress_logs else None
  # Otherwise, logs are streamed from the chosen logging service
  # (defaulted to GCS).
  with execution_utils.CtrlCSection(mash_handler):
    build = cb_logs.CloudBuildClient(client, messages,
                                     support_gcl).Stream(build_ref, out)

  if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
    log.status.Print(
        'Your build timed out. Use the [--timeout=DURATION] flag to change '
        'the timeout threshold.')

  if build.warnings:
    for warn in build.warnings:
      log.status.Print('\n{priority}: {text}'.format(
          text=warn.text, priority=warn.priority))

    log.status.Print(
        '\n{count} message(s) issued.'.format(count=len(build.warnings)))

  if build.failureInfo:
    log.status.Print(
        '\nBUILD FAILURE: {detail}'.format(detail=build.failureInfo.detail))

  if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
    raise FailedBuildException(build)

  return build, op