def BuildAndPushDockerImage( project, service, source_dir, version_id, code_bucket_ref, gcr_domain, runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER ): """Builds and pushes a set of docker images. Args: project: str, The project being deployed to. service: ServiceYamlInfo, The parsed service config. source_dir: str, path to the service's source directory version_id: The version id to deploy these services under. code_bucket_ref: The reference to the GCS bucket where the source will be uploaded. gcr_domain: str, Cloud Registry domain, determines the physical location of the image. E.g. `us.gcr.io`. runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether to use the new CloudBuild-based runtime builders (alternative is old externalized runtimes). Returns: str, The name of the pushed container image. Raises: DockerfileError: if a Dockerfile is present, but the runtime is not "custom". NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a custom runtime. UnsatisfiedRequirementsError: Raised if the code in the directory doesn't satisfy the requirements of the specified runtime type. """ needs_dockerfile = _NeedsDockerfile(service, source_dir) use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders( service.runtime, needs_dockerfile) # Nothing to do if this is not an image-based deployment. if not service.RequiresImage(): return None log.status.Print( 'Building and pushing image for service [{service}]'.format( service=service.module)) gen_files = dict(_GetSourceContextsForUpload(source_dir)) if needs_dockerfile and not use_runtime_builders: # The runtime builders will generate a Dockerfile in the Cloud, so we only # need to do this if use_runtime_builders is True gen_files.update(_GetDockerfiles(service, source_dir)) image = docker_image.Image(dockerfile_dir=source_dir, repo=_GetImageName(project, service.module, version_id, gcr_domain), nocache=False, tag=config.DOCKER_IMAGE_TAG) metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START) object_ref = storage_util.ObjectReference(code_bucket_ref, image.tagged_repo) if files.IsDirAncestorOf(source_dir, service.file): relative_yaml_path = os.path.relpath(service.file, source_dir) else: yaml_contents = files.GetFileContents(service.file) checksum = files.Checksum().AddContents(yaml_contents).HexDigest() relative_yaml_path = checksum + '.yaml' gen_files[relative_yaml_path] = yaml_contents try: cloud_build.UploadSource(image.dockerfile_dir, object_ref, gen_files=gen_files, skip_files=service.parsed.skip_files.regex) except (OSError, IOError) as err: if platforms.OperatingSystem.IsWindows(): if err.filename and len(err.filename) > _WINDOWS_MAX_PATH: raise WindowMaxPathError(err.filename) raise metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD) if use_runtime_builders: builder_reference = runtime_builders.FromServiceInfo( service, source_dir) log.info('Using runtime builder [%s]', builder_reference.build_file_uri) builder_reference.WarnIfDeprecated() yaml_path = posixpath.join(*relative_yaml_path.split(os.sep)) build = builder_reference.LoadCloudBuild({ '_OUTPUT_IMAGE': image.tagged_repo, '_GAE_APPLICATION_YAML_PATH': yaml_path }) # TODO(b/37542869) Remove this hack once the API can take the gs:// path # as a runtime name. service.runtime = builder_reference.runtime service.parsed.SetEffectiveRuntime(builder_reference.runtime) else: build = cloud_build.GetDefaultBuild(image.tagged_repo) metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START) cloudbuild_build.CloudBuildClient().ExecuteCloudBuild( cloud_build.FixUpBuild(build, object_ref), project=project) metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE) return image.tagged_repo
def BuildAndPushDockerImage( project, service, source_dir, version_id, code_bucket_ref, gcr_domain, runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER, parallel_build=False): """Builds and pushes a set of docker images. Args: project: str, The project being deployed to. service: ServiceYamlInfo, The parsed service config. source_dir: str, path to the service's source directory version_id: The version id to deploy these services under. code_bucket_ref: The reference to the GCS bucket where the source will be uploaded. gcr_domain: str, Cloud Registry domain, determines the physical location of the image. E.g. `us.gcr.io`. runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether to use the new CloudBuild-based runtime builders (alternative is old externalized runtimes). parallel_build: bool, if True, enable parallel build and deploy. Returns: BuildArtifact, Representing the pushed container image or in-progress build. Raises: DockerfileError: if a Dockerfile is present, but the runtime is not "custom". NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a custom runtime. UnsatisfiedRequirementsError: Raised if the code in the directory doesn't satisfy the requirements of the specified runtime type. ValueError: if an unrecognized runtime_builder_strategy is given """ needs_dockerfile = _NeedsDockerfile(service, source_dir) use_runtime_builders = ShouldUseRuntimeBuilders(service, runtime_builder_strategy, needs_dockerfile) # Nothing to do if this is not an image-based deployment. if not service.RequiresImage(): return None log.status.Print( 'Building and pushing image for service [{service}]' .format(service=service.module)) gen_files = dict(_GetSourceContextsForUpload(source_dir)) if needs_dockerfile and not use_runtime_builders: # The runtime builders will generate a Dockerfile in the Cloud, so we only # need to do this if use_runtime_builders is True gen_files.update(_GetDockerfiles(service, source_dir)) image = docker_image.Image( dockerfile_dir=source_dir, repo=_GetImageName(project, service.module, version_id, gcr_domain), nocache=False, tag=config.DOCKER_IMAGE_TAG) metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START) object_ref = storage_util.ObjectReference(code_bucket_ref, image.tagged_repo) relative_yaml_path = _GetYamlPath(source_dir, service.file, service.parsed.skip_files, gen_files) try: cloud_build.UploadSource(image.dockerfile_dir, object_ref, gen_files=gen_files, skip_files=service.parsed.skip_files.regex) except (OSError, IOError) as err: if platforms.OperatingSystem.IsWindows(): if err.filename and len(err.filename) > _WINDOWS_MAX_PATH: raise WindowMaxPathError(err.filename) raise metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD) if use_runtime_builders: builder_reference = runtime_builders.FromServiceInfo(service, source_dir) log.info('Using runtime builder [%s]', builder_reference.build_file_uri) builder_reference.WarnIfDeprecated() yaml_path = util.ConvertToPosixPath(relative_yaml_path) build = builder_reference.LoadCloudBuild( {'_OUTPUT_IMAGE': image.tagged_repo, '_GAE_APPLICATION_YAML_PATH': yaml_path}) else: build = cloud_build.GetDefaultBuild(image.tagged_repo) build = cloud_build.FixUpBuild(build, object_ref) return _SubmitBuild(build, image, project, parallel_build)
def _GetReferenceFromYaml(self, contents): path = self.Touch(self.temp_path, 'app.yaml', contents) return runtime_builders.FromServiceInfo( yaml_parsing.ServiceYamlInfo.FromFile(path), self.temp_path)