def _SubmitBuild(build, image, project, parallel_build):
  """Builds and pushes a set of docker images.

  Args:
    build: A fixed up Build object.
    image: docker_image.Image, A docker image.
    project: str, The project being deployed to.
    parallel_build: bool, if True, enable parallel build and deploy.

  Returns:
    BuildArtifact, Representing the pushed container image or in-progress build.
  """
  build_timeout = properties.VALUES.app.cloud_build_timeout.Get()
  if build_timeout and build_timeout > MAX_PARALLEL_BUILD_TIME:
    parallel_build = False
    log.info(
        'Property cloud_build_timeout configured to [{0}], which exceeds '
        'the maximum build time for parallelized beta deployments of [{1}] '
        'seconds. Performing serial deployment.'.format(
            build_timeout, MAX_PARALLEL_BUILD_TIME))

  if parallel_build:
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_ASYNC_START)
    build_op = cloudbuild_build.CloudBuildClient().ExecuteCloudBuildAsync(
        build, project=project)
    return app_build.BuildArtifact.MakeBuildIdArtifactFromOp(build_op)
  else:
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START)
    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        build, project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)
    return app_build.BuildArtifact.MakeImageArtifact(image.tagged_repo)
    def DeployService(self,
                      service_name,
                      version_id,
                      service_config,
                      manifest,
                      build,
                      endpoints_info=None,
                      extra_config_settings=None):
        """Updates and deploys new app versions based on given config.

    If the build operation has not yet completed, streams the Google Cloud
    Builder logs as well.

    Args:
      service_name: str, The service to deploy.
      version_id: str, The version of the service to deploy.
      service_config: AppInfoExternal, Service info parsed from a service yaml
        file.
      manifest: Dictionary mapping source files to Google Cloud Storage
        locations.
      build: BuildArtifact, a wrapper which contains either the build
        ID for an in-progress parallel build, or the name of the container image
        for a serial build.
      endpoints_info: EndpointsServiceInfo, Endpoints service info to be added
        to the AppInfoExternal configuration. Only provided when Endpoints API
        Management feature is enabled.
      extra_config_settings: dict, client config settings to pass to the server
        as beta settings.
    Returns:
      A Version resource representing the deployed version.
    """
        version_resource = self._CreateVersionResource(service_config,
                                                       manifest, version_id,
                                                       build, endpoints_info,
                                                       extra_config_settings)
        create_request = self.messages.AppengineAppsServicesVersionsCreateRequest(
            parent=self._GetServiceRelativeName(service_name=service_name),
            version=version_resource)

        operation = self.client.apps_services_versions.Create(create_request)

        # If build operation is still in progress, stream build logs and wait for
        # completion before polling the service deployment operation for completion.
        # Service deployment can never complete before the build has finished, as it
        # is dependent on the built image.
        if build and build.IsBuildId() and build.build_op:
            cloud_build.CloudBuildClient().WaitAndStreamLogs(build.build_op)
            metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_ASYNC)

        log.debug('Received operation: [{operation}]'.format(
            operation=operation.name))

        message = 'Updating service [{service}]'.format(service=service_name)

        return operations_util.WaitForOperation(self.client.apps_operations,
                                                operation,
                                                message=message)
 def SetUp(self):
     self.mock_client = api_mock.Client(
         core_apis.GetClientClass('cloudbuild', 'v1'))
     self.mock_client.Mock()
     self.addCleanup(self.mock_client.Unmock)
     self.messages = core_apis.GetMessagesModule('cloudbuild', 'v1')
     # Intercept logging output.
     self.mock_log_content = ''
     mock_log_print = self.StartPatch(
         'googlecloudsdk.api_lib.cloudbuild.logs.LogTailer._PrintLogLine')
     mock_log_print.side_effect = self._MockPrintLogLine
     self.StartPatch(
         'googlecloudsdk.core.console.console_attr_os.GetTermSize',
         return_value=(40, 100))
     self.client = build.CloudBuildClient(self.mock_client, self.messages)
     self.build = self.messages.Build(
         images=['gcr.io/my-project/output-tag'])
     self.StartObjectPatch(time, 'sleep')
def ExecuteCloudBuild(project, bucket_ref, object_name, output_image):
    """Execute a CloudBuild to build an app and wait for it to finish.

  Args:
    project: the cloud project ID.
    bucket_ref: Reference to GCS bucket containing source to build. The same
      bucket will be used for streaming logs.
    object_name: GCS object name containing source to build.
    output_image: GCR location for the output docker image;
                  eg, gcr.io/test-gae/hardcoded-output-tag.

  Raises:
    BuildFailedError: when the build fails.
  """
    builder = properties.VALUES.app.container_builder_image.Get()
    log.debug('Using builder image: [{0}]'.format(builder))
    logs_bucket = bucket_ref.bucket

    build_timeout = properties.VALUES.app.cloud_build_timeout.Get()
    timeout_str = GetServiceTimeoutString(build_timeout)

    cloudbuild_client = cloudbuild_build.CloudBuildClient()
    build = cloudbuild_client.messages.Build(
        timeout=timeout_str,
        source=cloudbuild_client.messages.Source(
            storageSource=cloudbuild_client.messages.StorageSource(
                bucket=bucket_ref.bucket,
                object=object_name,
            ), ),
        steps=[
            cloudbuild_client.messages.BuildStep(
                name=builder, args=['build', '-t', output_image, '.'])
        ],
        images=[output_image],
        logsBucket=logs_bucket)
    cloudbuild_client.ExecuteCloudBuild(build, project=project)
def BuildAndPushDockerImage(project,
                            service,
                            source_dir,
                            version_id,
                            code_bucket_ref,
                            use_runtime_builders=False):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    use_runtime_builders: bool, whether to use the new CloudBuild-based runtime
      builders (alternative is old externalized runtimes).

  Returns:
    str, The name of the pushed container image.
  """
    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    if not use_runtime_builders:
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)
    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_version = runtime_builders.RuntimeBuilderVersion.FromServiceInfo(
            service)
        build = builder_version.LoadCloudBuild(
            {'_OUTPUT_IMAGE': image.tagged_repo})
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo
def BuildAndPushDockerImage(
        project,
        service,
        source_dir,
        version_id,
        code_bucket_ref,
        gcr_domain,
        runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER
):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).

  Returns:
    str, The name of the pushed container image.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  """
    needs_dockerfile = _NeedsDockerfile(service, source_dir)
    use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders(
        service.runtime, needs_dockerfile)

    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    if needs_dockerfile and not use_runtime_builders:
        # The runtime builders will generate a Dockerfile in the Cloud, so we only
        # need to do this if use_runtime_builders is True
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id, gcr_domain),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START)
    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)

    if files.IsDirAncestorOf(source_dir, service.file):
        relative_yaml_path = os.path.relpath(service.file, source_dir)
    else:
        yaml_contents = files.GetFileContents(service.file)
        checksum = files.Checksum().AddContents(yaml_contents).HexDigest()
        relative_yaml_path = checksum + '.yaml'
        gen_files[relative_yaml_path] = yaml_contents

    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_reference = runtime_builders.FromServiceInfo(
            service, source_dir)
        log.info('Using runtime builder [%s]',
                 builder_reference.build_file_uri)
        builder_reference.WarnIfDeprecated()
        yaml_path = posixpath.join(*relative_yaml_path.split(os.sep))
        build = builder_reference.LoadCloudBuild({
            '_OUTPUT_IMAGE':
            image.tagged_repo,
            '_GAE_APPLICATION_YAML_PATH':
            yaml_path
        })
        # TODO(b/37542869) Remove this hack once the API can take the gs:// path
        # as a runtime name.
        service.runtime = builder_reference.runtime
        service.parsed.SetEffectiveRuntime(builder_reference.runtime)
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START)
    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo
def BuildAndPushDockerImage(
        project,
        service,
        source_dir,
        version_id,
        code_bucket_ref,
        gcr_domain,
        runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER
):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).

  Returns:
    str, The name of the pushed container image.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  """
    use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders(
        service.runtime)

    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    needs_dockerfile = _NeedsDockerfile(service, source_dir)
    if needs_dockerfile and not use_runtime_builders:
        # The runtime builders will generate a Dockerfile in the Cloud, so we only
        # need to do this if use_runtime_builders is True
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id, gcr_domain),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)
    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_version = runtime_builders.RuntimeBuilderVersion.FromServiceInfo(
            service)
        build = builder_version.LoadCloudBuild(
            {'_OUTPUT_IMAGE': image.tagged_repo})
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo