def testFixUpBuild_InvalidBuild(self):
        basic_build = cloud_build.GetDefaultBuild(self._OUTPUT_IMAGE)
        basic_build.source = self.messages.Source()
        with self.assertRaisesRegex(
                cloud_build.InvalidBuildError,
                re.escape(
                    'Field [source] was provided, but should not have been. '
                    'You may be using an improper Cloud Build pipeline.')):
            cloud_build.FixUpBuild(basic_build, self.object_ref)

        basic_build = cloud_build.GetDefaultBuild(self._OUTPUT_IMAGE)
        basic_build.timeout = '100s'
        with self.assertRaisesRegex(
                cloud_build.InvalidBuildError,
                re.escape(
                    'Field [timeout] was provided, but should not have been. '
                    'You may be using an improper Cloud Build pipeline.')):
            cloud_build.FixUpBuild(basic_build, self.object_ref)

        basic_build = cloud_build.GetDefaultBuild(self._OUTPUT_IMAGE)
        basic_build.logsBucket = 'bucket'
        with self.assertRaisesRegex(
                cloud_build.InvalidBuildError,
                re.escape(
                    'Field [logsBucket] was provided, but should not have been. '
                    'You may be using an improper Cloud Build pipeline.')):
            cloud_build.FixUpBuild(basic_build, self.object_ref)
 def testGetDefaultBuild(self):
     self.assertEqual(
         cloud_build.GetDefaultBuild(self._OUTPUT_IMAGE),
         self.messages.Build(steps=[
             self.messages.BuildStep(
                 name='gcr.io/cloud-builders/docker',
                 args=['build', '-t', self._OUTPUT_IMAGE, '.'])
         ],
                             images=[self._OUTPUT_IMAGE]))
 def testGetDefaultBuild_DifferentBuilderImage(self):
     properties.VALUES.app.container_builder_image.Set('gcr.io/other_image')
     self.assertEqual(
         cloud_build.GetDefaultBuild(self._OUTPUT_IMAGE),
         self.messages.Build(steps=[
             self.messages.BuildStep(
                 name='gcr.io/other_image',
                 args=['build', '-t', self._OUTPUT_IMAGE, '.'])
         ],
                             images=[self._OUTPUT_IMAGE]))
 def testFixUpBuild(self):
     basic_build = cloud_build.GetDefaultBuild(self._OUTPUT_IMAGE)
     self.assertEqual(
         cloud_build.FixUpBuild(basic_build, self.object_ref),
         self.messages.Build(
             steps=[
                 self.messages.BuildStep(
                     name='gcr.io/cloud-builders/docker',
                     args=['build', '-t', self._OUTPUT_IMAGE, '.'])
             ],
             images=[self._OUTPUT_IMAGE],
             logsBucket='bucket',
             source=self.messages.Source(
                 storageSource=self.messages.StorageSource(
                     bucket='bucket', object='path/object.tgz'))))
def BuildAndPushDockerImage(
    project,
    service,
    source_dir,
    version_id,
    code_bucket_ref,
    gcr_domain,
    runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER,
    parallel_build=False):
  """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).
    parallel_build: bool, if True, enable parallel build and deploy.

  Returns:
    BuildArtifact, Representing the pushed container image or in-progress build.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
    ValueError: if an unrecognized runtime_builder_strategy is given
  """
  needs_dockerfile = _NeedsDockerfile(service, source_dir)
  use_runtime_builders = ShouldUseRuntimeBuilders(service,
                                                  runtime_builder_strategy,
                                                  needs_dockerfile)

  # Nothing to do if this is not an image-based deployment.
  if not service.RequiresImage():
    return None
  log.status.Print(
      'Building and pushing image for service [{service}]'
      .format(service=service.module))

  gen_files = dict(_GetSourceContextsForUpload(source_dir))
  if needs_dockerfile and not use_runtime_builders:
    # The runtime builders will generate a Dockerfile in the Cloud, so we only
    # need to do this if use_runtime_builders is True
    gen_files.update(_GetDockerfiles(service, source_dir))

  image = docker_image.Image(
      dockerfile_dir=source_dir,
      repo=_GetImageName(project, service.module, version_id, gcr_domain),
      nocache=False,
      tag=config.DOCKER_IMAGE_TAG)

  metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START)
  object_ref = storage_util.ObjectReference(code_bucket_ref, image.tagged_repo)
  relative_yaml_path = _GetYamlPath(source_dir, service.file,
                                    service.parsed.skip_files, gen_files)

  try:
    cloud_build.UploadSource(image.dockerfile_dir, object_ref,
                             gen_files=gen_files,
                             skip_files=service.parsed.skip_files.regex)
  except (OSError, IOError) as err:
    if platforms.OperatingSystem.IsWindows():
      if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
        raise WindowMaxPathError(err.filename)
    raise
  metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

  if use_runtime_builders:
    builder_reference = runtime_builders.FromServiceInfo(service, source_dir)
    log.info('Using runtime builder [%s]', builder_reference.build_file_uri)
    builder_reference.WarnIfDeprecated()
    yaml_path = util.ConvertToPosixPath(relative_yaml_path)
    build = builder_reference.LoadCloudBuild(
        {'_OUTPUT_IMAGE': image.tagged_repo,
         '_GAE_APPLICATION_YAML_PATH': yaml_path})
  else:
    build = cloud_build.GetDefaultBuild(image.tagged_repo)

  build = cloud_build.FixUpBuild(build, object_ref)
  return _SubmitBuild(build, image, project, parallel_build)
def BuildAndPushDockerImage(project,
                            service,
                            source_dir,
                            version_id,
                            code_bucket_ref,
                            use_runtime_builders=False):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    use_runtime_builders: bool, whether to use the new CloudBuild-based runtime
      builders (alternative is old externalized runtimes).

  Returns:
    str, The name of the pushed container image.
  """
    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    if not use_runtime_builders:
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)
    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_version = runtime_builders.RuntimeBuilderVersion.FromServiceInfo(
            service)
        build = builder_version.LoadCloudBuild(
            {'_OUTPUT_IMAGE': image.tagged_repo})
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo
def BuildAndPushDockerImage(
        project,
        service,
        source_dir,
        version_id,
        code_bucket_ref,
        gcr_domain,
        runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER
):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).

  Returns:
    str, The name of the pushed container image.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  """
    needs_dockerfile = _NeedsDockerfile(service, source_dir)
    use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders(
        service.runtime, needs_dockerfile)

    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    if needs_dockerfile and not use_runtime_builders:
        # The runtime builders will generate a Dockerfile in the Cloud, so we only
        # need to do this if use_runtime_builders is True
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id, gcr_domain),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START)
    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)

    if files.IsDirAncestorOf(source_dir, service.file):
        relative_yaml_path = os.path.relpath(service.file, source_dir)
    else:
        yaml_contents = files.GetFileContents(service.file)
        checksum = files.Checksum().AddContents(yaml_contents).HexDigest()
        relative_yaml_path = checksum + '.yaml'
        gen_files[relative_yaml_path] = yaml_contents

    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_reference = runtime_builders.FromServiceInfo(
            service, source_dir)
        log.info('Using runtime builder [%s]',
                 builder_reference.build_file_uri)
        builder_reference.WarnIfDeprecated()
        yaml_path = posixpath.join(*relative_yaml_path.split(os.sep))
        build = builder_reference.LoadCloudBuild({
            '_OUTPUT_IMAGE':
            image.tagged_repo,
            '_GAE_APPLICATION_YAML_PATH':
            yaml_path
        })
        # TODO(b/37542869) Remove this hack once the API can take the gs:// path
        # as a runtime name.
        service.runtime = builder_reference.runtime
        service.parsed.SetEffectiveRuntime(builder_reference.runtime)
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START)
    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo
def BuildAndPushDockerImage(
        project,
        service,
        source_dir,
        version_id,
        code_bucket_ref,
        gcr_domain,
        runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER
):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).

  Returns:
    str, The name of the pushed container image.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  """
    use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders(
        service.runtime)

    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    needs_dockerfile = _NeedsDockerfile(service, source_dir)
    if needs_dockerfile and not use_runtime_builders:
        # The runtime builders will generate a Dockerfile in the Cloud, so we only
        # need to do this if use_runtime_builders is True
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id, gcr_domain),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)
    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_version = runtime_builders.RuntimeBuilderVersion.FromServiceInfo(
            service)
        build = builder_version.LoadCloudBuild(
            {'_OUTPUT_IMAGE': image.tagged_repo})
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo