示例#1
0
def _BuildImagesWithCloudBuild(project, services, version_id, code_bucket_ref,
                               cloudbuild_client, storage_client):
    """Build multiple services with Cloud Build."""
    images = {}
    for service, info, ensure_dockerfile, ensure_context in services:
        log.status.Print(
            'Building and pushing image for service [{service}]'.format(
                service=service))
        cleanup_dockerfile = ensure_dockerfile()
        cleanup_context = ensure_context()
        try:
            image = docker_image.Image(
                dockerfile_dir=os.path.dirname(info.file),
                tag=_GetImageName(project, service, version_id),
                nocache=False)
            cloud_build.UploadSource(image.dockerfile_dir, code_bucket_ref,
                                     image.tag, storage_client)
            metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)
            cloud_build.ExecuteCloudBuild(project, code_bucket_ref, image.tag,
                                          image.tag, cloudbuild_client)
            metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)
            images[service] = image.tag
        finally:
            cleanup_dockerfile()
            cleanup_context()
    return images
示例#2
0
def _BuildImagesWithCloudBuild(project, modules, version_id, code_bucket,
                               cloudbuild_client):
    """Build multiple modules with Cloud Build."""
    images = {}
    for module, info, ensure_dockerfile, ensure_context in modules:
        log.status.Print(
            'Building and pushing image for module [{module}]'.format(
                module=module))
        cleanup_dockerfile = ensure_dockerfile()
        cleanup_context = ensure_context()
        try:
            image = docker_image.Image(
                dockerfile_dir=os.path.dirname(info.file),
                tag=_GetImageName(project, module, version_id),
                nocache=False)
            source_gcs_uri = '/'.join([code_bucket.rstrip('/'), image.tag])
            cloud_build.UploadSource(image.dockerfile_dir, source_gcs_uri)
            metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)
            cloud_build.ExecuteCloudBuild(project, source_gcs_uri,
                                          image.repo_tag, cloudbuild_client)
            metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)
            images[module] = image.repo_tag
        finally:
            cleanup_dockerfile()
            cleanup_context()
    return images
 def testUpload(self):
     """Test basic upload with single file."""
     tmp = self.CreateTempDir()
     self.WriteFile(os.path.join(tmp, 'Dockerfile'), 'empty')
     self._ExpectUpload()
     cloud_build.UploadSource(tmp, util.FileIterator(tmp, self.re),
                              self.object_ref)
 def testFailure(self):
     """Test HttpError raises to user."""
     tmp = self.CreateTempDir()
     self.WriteFile(os.path.join(tmp, 'Dockerfile'), 'empty')
     self._ExpectUpload(exception=http_error.MakeHttpError())
     with self.assertRaises(storage_api.UploadError):
         cloud_build.UploadSource(tmp, util.FileIterator(tmp, self.re),
                                  self.object_ref)
 def testUploadWithGenFiles(self):
     """Test that generated files passed to UploadSource don't raise error."""
     tmp = self.CreateTempDir()
     self.WriteFile(os.path.join(tmp, 'main.py'), 'empty')
     gen_files = {'Dockerfile': 'empty'}
     self._ExpectUpload()
     cloud_build.UploadSource(tmp,
                              util.FileIterator(tmp, self.re),
                              self.object_ref,
                              gen_files=gen_files)
示例#6
0
def BuildAndPushDockerImage(project, service, version_id, code_bucket_ref):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.

  Returns:
    str, The name of the pushed container image.
  """
    #  Nothing to do if this is not an image based deployment.
    if not service.RequiresImage():
        return None

    dockerfile_creator = _GetDockerfileCreator(service)
    context_creator = context_util.GetSourceContextFilesCreator(
        os.path.dirname(service.file), None)

    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    cleanup_dockerfile = dockerfile_creator()
    cleanup_context = context_creator()
    try:
        image = docker_image.Image(
            dockerfile_dir=os.path.dirname(service.file),
            repo=_GetImageName(project, service.module, version_id),
            nocache=False,
            tag=config.DOCKER_IMAGE_TAG)
        try:
            cloud_build.UploadSource(image.dockerfile_dir, code_bucket_ref,
                                     image.tagged_repo)
        except (OSError, IOError) as err:
            if platforms.OperatingSystem.IsWindows():
                if len(err.filename) > _WINDOWS_MAX_PATH:
                    raise WindowMaxPathError(err.filename)
            raise
        metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)
        cloud_build.ExecuteCloudBuild(project, code_bucket_ref,
                                      image.tagged_repo, image.tagged_repo)
        metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)
        return image.tagged_repo
    finally:
        cleanup_dockerfile()
        cleanup_context()
示例#7
0
def BuildAndPushDockerImage(project, service, source_dir, version_id,
                            code_bucket_ref):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
  Returns:
    str, The name of the pushed container image.
  """
    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None

    gen_files = {}
    gen_files.update(_GetDockerfiles(service, source_dir))
    gen_files.update(_GetSourceContextsForUpload(source_dir))

    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)
    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 code_bucket_ref,
                                 image.tagged_repo,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)
    cloud_build.ExecuteCloudBuild(project, code_bucket_ref, image.tagged_repo,
                                  image.tagged_repo)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)
    return image.tagged_repo
 def testUploadWithGeneratedDockerignore(self):
     """Test that UploadSource correctly interprets generated .dockerignore."""
     tmp = self.CreateTempDir()
     create_tar_mock = self.StartObjectPatch(cloud_build, '_CreateTar')
     create_tar_mock.return_value = 1
     self.WriteFile(os.path.join(tmp, 'Dockerfile'), 'empty')
     self.WriteFile(os.path.join(tmp, 'main.py'), 'empty')
     self.WriteFile(os.path.join(tmp, 'fake.zip'), 'Dummy')
     os.mkdir(os.path.join(tmp, 'tmpsubdir'))
     self.WriteFile(os.path.join(tmp, 'tmpsubdir', 'fake2.zip'), 'Dummy')
     gen_files = {'.dockerignore': 'main.py'}
     self._ExpectUpload()
     cloud_build.UploadSource(tmp, util.FileIterator(tmp, self.re),
                              self.object_ref, gen_files)
     # Test that _CreateTar was called with the correct directory, files, and
     # exclusions
     create_tar_mock.assert_called_once_with(
         tmp, gen_files,
         {'Dockerfile', 'fake.zip',
          os.path.join('tmpsubdir', 'fake2.zip')}, mock.ANY)
def BuildAndPushDockerImage(
    project,
    service,
    source_dir,
    version_id,
    code_bucket_ref,
    gcr_domain,
    runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER,
    parallel_build=False):
  """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).
    parallel_build: bool, if True, enable parallel build and deploy.

  Returns:
    BuildArtifact, Representing the pushed container image or in-progress build.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
    ValueError: if an unrecognized runtime_builder_strategy is given
  """
  needs_dockerfile = _NeedsDockerfile(service, source_dir)
  use_runtime_builders = ShouldUseRuntimeBuilders(service,
                                                  runtime_builder_strategy,
                                                  needs_dockerfile)

  # Nothing to do if this is not an image-based deployment.
  if not service.RequiresImage():
    return None
  log.status.Print(
      'Building and pushing image for service [{service}]'
      .format(service=service.module))

  gen_files = dict(_GetSourceContextsForUpload(source_dir))
  if needs_dockerfile and not use_runtime_builders:
    # The runtime builders will generate a Dockerfile in the Cloud, so we only
    # need to do this if use_runtime_builders is True
    gen_files.update(_GetDockerfiles(service, source_dir))

  image = docker_image.Image(
      dockerfile_dir=source_dir,
      repo=_GetImageName(project, service.module, version_id, gcr_domain),
      nocache=False,
      tag=config.DOCKER_IMAGE_TAG)

  metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START)
  object_ref = storage_util.ObjectReference(code_bucket_ref, image.tagged_repo)
  relative_yaml_path = _GetYamlPath(source_dir, service.file,
                                    service.parsed.skip_files, gen_files)

  try:
    cloud_build.UploadSource(image.dockerfile_dir, object_ref,
                             gen_files=gen_files,
                             skip_files=service.parsed.skip_files.regex)
  except (OSError, IOError) as err:
    if platforms.OperatingSystem.IsWindows():
      if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
        raise WindowMaxPathError(err.filename)
    raise
  metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

  if use_runtime_builders:
    builder_reference = runtime_builders.FromServiceInfo(service, source_dir)
    log.info('Using runtime builder [%s]', builder_reference.build_file_uri)
    builder_reference.WarnIfDeprecated()
    yaml_path = util.ConvertToPosixPath(relative_yaml_path)
    build = builder_reference.LoadCloudBuild(
        {'_OUTPUT_IMAGE': image.tagged_repo,
         '_GAE_APPLICATION_YAML_PATH': yaml_path})
  else:
    build = cloud_build.GetDefaultBuild(image.tagged_repo)

  build = cloud_build.FixUpBuild(build, object_ref)
  return _SubmitBuild(build, image, project, parallel_build)
示例#10
0
def BuildAndPushDockerImage(project,
                            service,
                            source_dir,
                            version_id,
                            code_bucket_ref,
                            use_runtime_builders=False):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    use_runtime_builders: bool, whether to use the new CloudBuild-based runtime
      builders (alternative is old externalized runtimes).

  Returns:
    str, The name of the pushed container image.
  """
    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    if not use_runtime_builders:
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)
    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_version = runtime_builders.RuntimeBuilderVersion.FromServiceInfo(
            service)
        build = builder_version.LoadCloudBuild(
            {'_OUTPUT_IMAGE': image.tagged_repo})
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo
def BuildAndPushDockerImage(
        project,
        service,
        source_dir,
        version_id,
        code_bucket_ref,
        gcr_domain,
        runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER
):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).

  Returns:
    str, The name of the pushed container image.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  """
    needs_dockerfile = _NeedsDockerfile(service, source_dir)
    use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders(
        service.runtime, needs_dockerfile)

    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    if needs_dockerfile and not use_runtime_builders:
        # The runtime builders will generate a Dockerfile in the Cloud, so we only
        # need to do this if use_runtime_builders is True
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id, gcr_domain),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START)
    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)

    if files.IsDirAncestorOf(source_dir, service.file):
        relative_yaml_path = os.path.relpath(service.file, source_dir)
    else:
        yaml_contents = files.GetFileContents(service.file)
        checksum = files.Checksum().AddContents(yaml_contents).HexDigest()
        relative_yaml_path = checksum + '.yaml'
        gen_files[relative_yaml_path] = yaml_contents

    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_reference = runtime_builders.FromServiceInfo(
            service, source_dir)
        log.info('Using runtime builder [%s]',
                 builder_reference.build_file_uri)
        builder_reference.WarnIfDeprecated()
        yaml_path = posixpath.join(*relative_yaml_path.split(os.sep))
        build = builder_reference.LoadCloudBuild({
            '_OUTPUT_IMAGE':
            image.tagged_repo,
            '_GAE_APPLICATION_YAML_PATH':
            yaml_path
        })
        # TODO(b/37542869) Remove this hack once the API can take the gs:// path
        # as a runtime name.
        service.runtime = builder_reference.runtime
        service.parsed.SetEffectiveRuntime(builder_reference.runtime)
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START)
    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo
def BuildAndPushDockerImage(
        project,
        service,
        source_dir,
        version_id,
        code_bucket_ref,
        gcr_domain,
        runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER
):
    """Builds and pushes a set of docker images.

  Args:
    project: str, The project being deployed to.
    service: ServiceYamlInfo, The parsed service config.
    source_dir: str, path to the service's source directory
    version_id: The version id to deploy these services under.
    code_bucket_ref: The reference to the GCS bucket where the source will be
      uploaded.
    gcr_domain: str, Cloud Registry domain, determines the physical location
      of the image. E.g. `us.gcr.io`.
    runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether
      to use the new CloudBuild-based runtime builders (alternative is old
      externalized runtimes).

  Returns:
    str, The name of the pushed container image.

  Raises:
    DockerfileError: if a Dockerfile is present, but the runtime is not
      "custom".
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  """
    use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders(
        service.runtime)

    # Nothing to do if this is not an image-based deployment.
    if not service.RequiresImage():
        return None
    log.status.Print(
        'Building and pushing image for service [{service}]'.format(
            service=service.module))

    gen_files = dict(_GetSourceContextsForUpload(source_dir))
    needs_dockerfile = _NeedsDockerfile(service, source_dir)
    if needs_dockerfile and not use_runtime_builders:
        # The runtime builders will generate a Dockerfile in the Cloud, so we only
        # need to do this if use_runtime_builders is True
        gen_files.update(_GetDockerfiles(service, source_dir))

    image = docker_image.Image(dockerfile_dir=source_dir,
                               repo=_GetImageName(project, service.module,
                                                  version_id, gcr_domain),
                               nocache=False,
                               tag=config.DOCKER_IMAGE_TAG)

    object_ref = storage_util.ObjectReference(code_bucket_ref,
                                              image.tagged_repo)
    try:
        cloud_build.UploadSource(image.dockerfile_dir,
                                 object_ref,
                                 gen_files=gen_files,
                                 skip_files=service.parsed.skip_files.regex)
    except (OSError, IOError) as err:
        if platforms.OperatingSystem.IsWindows():
            if err.filename and len(err.filename) > _WINDOWS_MAX_PATH:
                raise WindowMaxPathError(err.filename)
        raise
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD)

    if use_runtime_builders:
        builder_version = runtime_builders.RuntimeBuilderVersion.FromServiceInfo(
            service)
        build = builder_version.LoadCloudBuild(
            {'_OUTPUT_IMAGE': image.tagged_repo})
    else:
        build = cloud_build.GetDefaultBuild(image.tagged_repo)

    cloudbuild_build.CloudBuildClient().ExecuteCloudBuild(
        cloud_build.FixUpBuild(build, object_ref), project=project)
    metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE)

    return image.tagged_repo