def _add_filesystem_to_dockerfile(self, file_name, build_dir: BuildDir): """ Put an ADD instruction into the Dockerfile (to include the filesystem into the container image to be built) """ content = 'ADD {0} /\n'.format(file_name) lines = build_dir.dockerfile.lines # as we insert elements we have to keep track of the increment for inserting offset = 1 for item in build_dir.dockerfile.structure: if item['instruction'] == 'FROM' and base_image_is_custom( item['value'].split()[0]): lines.insert(item['endline'] + offset, content) offset += 1 build_dir.dockerfile.lines = lines new_parents = [] for image in build_dir.dockerfile.parent_images: if base_image_is_custom(image): new_parents.append('scratch') else: new_parents.append(image) build_dir.dockerfile.parent_images = new_parents self.log.info('added "%s" as image filesystem', file_name)
def run(self): if not (self.workflow.builder.base_from_scratch or self.workflow.builder.custom_base_image): self._base_image_nvr = self.detect_parent_image_nvr( self.workflow.builder.base_image, inspect_data=self.workflow.builder.base_image_inspect, ) manifest_mismatches = [] for img, local_tag in self.workflow.builder.parent_images.items(): if base_image_is_custom(img.to_str()): continue nvr = self.detect_parent_image_nvr(local_tag) if local_tag else None self._parent_builds[img] = self.wait_for_parent_image_build(nvr) if nvr else None if nvr == self._base_image_nvr: self._base_image_build = self._parent_builds[img] if self._parent_builds[img]: # we need the possible floating tag check_img = copy(local_tag) check_img.tag = img.tag try: self.check_manifest_digest(check_img, self._parent_builds[img]) except ValueError as exc: manifest_mismatches.append(exc) else: err_msg = ('Could not get koji build info for parent image {}. ' 'Was this image built in OSBS?'.format(img.to_str())) self.log.error(err_msg) raise RuntimeError(err_msg) if manifest_mismatches: raise RuntimeError('Error while comparing parent images manifest digests in koji with ' 'related values from registries: {}'.format(manifest_mismatches)) return self.make_result()
def set_base_image(self, base_image, parents_pulled=True, insecure=False, dockercfg_path=None): self.base_from_scratch = base_image_is_scratch(base_image) if not self.custom_base_image: self.custom_base_image = base_image_is_custom(base_image) self.base_image = ImageName.parse(base_image) self.original_base_image = self.original_base_image or self.base_image self.recreate_parent_images() if not self.base_from_scratch: self.parent_images[self.original_base_image] = self.base_image
def run(self): if is_scratch_build(self.workflow): self.log.info('scratch build, skipping plugin') return df_images = self.workflow.data.dockerfile_images if not (df_images.base_from_scratch or df_images.custom_base_image): self._base_image_nvr = self.detect_parent_image_nvr( df_images.base_image, # Inspect any platform: the N-V-R labels should be equal for all platforms inspect_data=self.workflow.imageutil.base_image_inspect(), ) manifest_mismatches = [] for img, local_tag in df_images.items(): img_str = img.to_str() if base_image_is_custom(img_str): continue nvr = self.detect_parent_image_nvr( local_tag) if local_tag else None parent_build_info = self.wait_for_parent_image_build( nvr) if nvr else None self._parent_builds[img_str] = parent_build_info if nvr == self._base_image_nvr: self._base_image_build = parent_build_info if parent_build_info: # we need the possible floating tag check_img = copy(local_tag) check_img.tag = img.tag try: self.check_manifest_digest(check_img, parent_build_info) except ValueError as exc: manifest_mismatches.append(exc) else: err_msg = ( f'Could not get koji build info for parent image {img_str}. ' f'Was this image built in OSBS?') if self.workflow.conf.skip_koji_check_for_base_image: self.log.warning(err_msg) else: self.log.error(err_msg) raise RuntimeError(err_msg) if manifest_mismatches: mismatch_msg = ( 'Error while comparing parent images manifest digests in koji with ' 'related values from registries: %s') if self.workflow.conf.fail_on_digest_mismatch: self.log.error(mismatch_msg, manifest_mismatches) raise RuntimeError(mismatch_msg % manifest_mismatches) self.log.warning(mismatch_msg, manifest_mismatches) return self.make_result()
def run(self): if is_scratch_build(self.workflow): self.log.info('scratch build, skipping plugin') return if not (self.workflow.builder.dockerfile_images.base_from_scratch or self.workflow.builder.dockerfile_images.custom_base_image): self._base_image_nvr = self.detect_parent_image_nvr( self.workflow.builder.dockerfile_images.base_image, inspect_data=self.workflow.builder.base_image_inspect, ) if is_rebuild(self.workflow): self.ignore_isolated_autorebuilds() manifest_mismatches = [] for img, local_tag in self.workflow.builder.dockerfile_images.items(): if base_image_is_custom(img.to_str()): continue nvr = self.detect_parent_image_nvr( local_tag) if local_tag else None self._parent_builds[img] = self.wait_for_parent_image_build( nvr) if nvr else None if nvr == self._base_image_nvr: self._base_image_build = self._parent_builds[img] if self._parent_builds[img]: # we need the possible floating tag check_img = copy(local_tag) check_img.tag = img.tag try: self.check_manifest_digest(check_img, self._parent_builds[img]) except ValueError as exc: manifest_mismatches.append(exc) else: err_msg = ( 'Could not get koji build info for parent image {}. ' 'Was this image built in OSBS?'.format(img.to_str())) if get_skip_koji_check_for_base_image(self.workflow, fallback=False): self.log.warning(err_msg) else: self.log.error(err_msg) raise RuntimeError(err_msg) if manifest_mismatches: mismatch_msg = ( 'Error while comparing parent images manifest digests in koji with ' 'related values from registries: %s') if get_fail_on_digest_mismatch(self.workflow, fallback=True): self.log.error(mismatch_msg, manifest_mismatches) raise RuntimeError(mismatch_msg % manifest_mismatches) self.log.warning(mismatch_msg, manifest_mismatches) return self.make_result()
def set_base_image(self, base_image, parents_pulled=True, insecure=False): self.base_from_scratch = base_image_is_scratch(base_image) if not self.custom_base_image: self.custom_base_image = base_image_is_custom(base_image) self.base_image = ImageName.parse(base_image) self.original_base_image = self.original_base_image or self.base_image self.recreate_parent_images() if not self.base_from_scratch: self.parent_images[self.original_base_image] = self.base_image
def set_df_path(self, path): self._df_path = path dfp = df_parser(path) base = dfp.baseimage if base is None: raise RuntimeError("no base image specified in Dockerfile") self.set_base_image(base) logger.debug("base image specified in dockerfile = '%s'", self.base_image) self.parent_images.clear() custom_base_images = set() for image in dfp.parent_images: image_name = ImageName.parse(image) if base_image_is_scratch(image_name.get_repo()): image_name.tag = None self.parents_ordered.append(image_name.to_str()) continue image_str = image_name.to_str() if base_image_is_custom(image_str): custom_base_images.add(image_str) self.custom_parent_image = True self.parents_ordered.append(image_str) self.parent_images[image_name] = None if len(custom_base_images) > 1: raise NotImplementedError("multiple different custom base images" " aren't allowed in Dockerfile") # validate user has not specified COPY --from=image builders = [] for stmt in dfp.structure: if stmt['instruction'] == 'FROM': # extract "bar" from "foo as bar" and record as build stage match = re.search(r'\S+ \s+ as \s+ (\S+)', stmt['value'], re.I | re.X) builders.append(match.group(1) if match else None) elif stmt['instruction'] == 'COPY': match = re.search(r'--from=(\S+)', stmt['value'], re.I) if not match: continue stage = match.group(1) # error unless the --from is the index or name of a stage we've seen if any(stage in [str(idx), builder] for idx, builder in enumerate(builders)): continue raise RuntimeError( dedent("""\ OSBS does not support COPY --from unless it matches a build stage. Dockerfile instruction was: {} To use an image with COPY --from, specify it in a stage with FROM, e.g. FROM {} AS source FROM ... COPY --from=source <src> <dest> """).format(stmt['content'], stage))
def run(self): """ Pull parent images and retag them uniquely for this build. """ self.manifest_list_cache.clear() build_json = get_build_json() digest_fetching_exceptions = [] for nonce, parent in enumerate( self.workflow.builder.dockerfile_images.keys()): if base_image_is_custom(parent.to_str()): continue image = parent use_original_tag = False # base_image_key is an ImageName, so compare parent as an ImageName also if image == self.workflow.builder.dockerfile_images.base_image_key: use_original_tag = True image = self._resolve_base_image(build_json) self._ensure_image_registry(image) if self.check_platforms: # run only at orchestrator self._validate_platforms_in_image(image) try: self._store_manifest_digest( image, use_original_tag=use_original_tag) except RuntimeError as exc: digest_fetching_exceptions.append(exc) image_with_digest = self._get_image_with_digest(image) if image_with_digest is None: self.log.warning( "Cannot resolve manifest digest for image '%s'", image) else: self.log.info("Replacing image '%s' with '%s'", image, image_with_digest) image = image_with_digest if not self.inspect_only: image = self._pull_and_tag_image(image, build_json, str(nonce)) self.workflow.builder.dockerfile_images[parent] = image if digest_fetching_exceptions: raise RuntimeError( 'Error when extracting parent images manifest digests: {}'. format(digest_fetching_exceptions)) self.workflow.builder.parents_pulled = not self.inspect_only # generate configuration in builder for inspecting images self.workflow.builder.pull_registries = \ {reg: {'insecure': reg_cli.insecure, 'dockercfg_path': reg_cli.dockercfg_path} for reg, reg_cli in self.registry_clients.items()}
def get_image_build_conf(self): image_build_conf = None for parent in self.workflow.data.dockerfile_images: if base_image_is_custom(parent.to_str()): image_build_conf = parent.tag break if not image_build_conf or image_build_conf == 'latest': image_build_conf = 'image-build.conf' return image_build_conf
def set_df_path(self, path): self._df_path = path dfp = df_parser(path) base = dfp.baseimage if base is None: raise RuntimeError("no base image specified in Dockerfile") self.set_base_image(base) logger.debug("base image specified in dockerfile = '%s'", self.base_image) self.parent_images.clear() custom_base_images = set() for image in dfp.parent_images: image_name = ImageName.parse(image) if base_image_is_scratch(image_name.get_repo()): image_name.tag = None self.parents_ordered.append(image_name.to_str()) continue image_str = image_name.to_str() if base_image_is_custom(image_str): custom_base_images.add(image_str) self.custom_parent_image = True self.parents_ordered.append(image_str) self.parent_images[image_name] = None if len(custom_base_images) > 1: raise NotImplementedError("multiple different custom base images" " aren't allowed in Dockerfile") # validate user has not specified COPY --from=image builders = [] for stmt in dfp.structure: if stmt['instruction'] == 'FROM': # extract "bar" from "foo as bar" and record as build stage match = re.search(r'\S+ \s+ as \s+ (\S+)', stmt['value'], re.I | re.X) builders.append(match.group(1) if match else None) elif stmt['instruction'] == 'COPY': match = re.search(r'--from=(\S+)', stmt['value'], re.I) if not match: continue stage = match.group(1) # error unless the --from is the index or name of a stage we've seen if any(stage in [str(idx), builder] for idx, builder in enumerate(builders)): continue raise RuntimeError(dedent("""\ OSBS does not support COPY --from unless it matches a build stage. Dockerfile instruction was: {} To use an image with COPY --from, specify it in a stage with FROM, e.g. FROM {} AS source FROM ... COPY --from=source <src> <dest> """).format(stmt['content'], stage))
def set_base_image(self, base_image, parents_pulled=True, insecure=False): self.base_from_scratch = base_image_is_scratch(base_image) if not self.custom_base_image: self.custom_base_image = base_image_is_custom(base_image) self.base_image = ImageName.parse(base_image) self.original_base_image = self.original_base_image or self.base_image self.recreate_parent_images() if not self.base_from_scratch: self.parent_images[self.original_base_image] = self.base_image self.parents_pulled = parents_pulled self.base_image_insecure = insecure logger.info("set base image to '%s' with original base '%s'", self.base_image, self.original_base_image)
def stream_filesystem(self, task_id, filesystem_regex): filesystem = self.download_filesystem(task_id, filesystem_regex) new_parent_image = self.import_base_image(filesystem) new_imagename = ImageName.parse(new_parent_image) for parent in self.workflow.builder.dockerfile_images: if base_image_is_custom(parent.to_str()): self.workflow.builder.dockerfile_images[parent] = new_imagename break defer_removal(self.workflow, new_parent_image) return new_parent_image
def set_base_image(self, base_image, parents_pulled=True, insecure=False, dockercfg_path=None): self.base_from_scratch = base_image_is_scratch(base_image) if not self.custom_base_image: self.custom_base_image = base_image_is_custom(base_image) self.base_image = ImageName.parse(base_image) self.original_base_image = self.original_base_image or self.base_image self.recreate_parent_images() if not self.base_from_scratch: self.parent_images[self.original_base_image] = self.base_image self.parents_pulled = parents_pulled self.base_image_insecure = insecure self.base_image_dockercfg_path = dockercfg_path logger.info("set base image to '%s' with original base '%s'", self.base_image, self.original_base_image)
def run(self): """Run the plugin.""" df_images = self.workflow.data.dockerfile_images self.log.info("parent_images '%s'", df_images.keys()) unresolved = [ key for key, val in df_images.items() if not base_image_is_custom(str(key)) and not val ] if unresolved: # this would generally mean check_base_image didn't run and/or # custom plugins modified parent_images; treat it as an error. raise ParentImageUnresolved( "Parent image(s) unresolved: {}".format(unresolved)) self.workflow.build_dir.for_each_platform(self.change_from_in_df)
def stream_filesystem(self, task_id, filesystem_regex): filesystem = self.download_filesystem(task_id, filesystem_regex) new_parent_image = self.import_base_image(filesystem) new_imagename = ImageName.parse(new_parent_image) if self.workflow.builder.custom_base_image: self.workflow.builder.set_base_image(new_imagename) for parent in self.workflow.builder.parent_images.keys(): if base_image_is_custom(parent.to_str()): self.workflow.builder.parent_images[parent] = new_imagename break defer_removal(self.workflow, new_parent_image) return new_parent_image
def get_image_build_conf(self): image_build_conf = None base_image = self.workflow.builder.base_image if self.workflow.builder.custom_base_image: image_build_conf = base_image.tag # when custom base image isn't last parent, get tag (configuration file) # from latest specified custom base image else: for parent in reversed(self.workflow.builder.parents_ordered): if base_image_is_custom(parent): parent_image = ImageName.parse(parent) image_build_conf = parent_image.tag break if not image_build_conf or image_build_conf == 'latest': image_build_conf = 'image-build.conf' return image_build_conf
def get_image_build_conf(self): image_build_conf = None base_image = self.workflow.builder.base_image if self.workflow.builder.custom_base_image: image_build_conf = base_image.tag # when custom base image isn't last parent, get tag (configuration file) # from latest specified custom base image else: for parent in reversed(self.workflow.builder.parents_ordered): if base_image_is_custom(parent): parent_image = ImageName.parse(parent) image_build_conf = parent_image.tag break if not image_build_conf or image_build_conf == 'latest': image_build_conf = 'image-build.conf' return image_build_conf
def run(self): """ Check parent images to ensure they only come from allowed registries. """ self.manifest_list_cache.clear() digest_fetching_exceptions = [] for parent in self.workflow.data.dockerfile_images.keys(): if base_image_is_custom(parent.to_str()) or base_image_is_scratch( parent.to_str()): continue image = parent # base_image_key is an ImageName, so compare parent as an ImageName also if image == self.workflow.data.dockerfile_images.base_image_key: image = self._resolve_base_image() self._ensure_image_registry(image) self._validate_platforms_in_image(image) try: digest = self._fetch_manifest_digest(image) except RuntimeError as exc: digest_fetching_exceptions.append(exc) continue image_with_digest = self._pin_to_digest(image, digest) self.log.info("Replacing image '%s' with '%s'", image, image_with_digest) self.workflow.data.dockerfile_images[parent] = image_with_digest self.workflow.data.parent_images_digests[str( image_with_digest)] = digest if digest_fetching_exceptions: raise RuntimeError( 'Error when extracting parent images manifest digests: {}'. format(digest_fetching_exceptions))
def run(self): if not (self.workflow.builder.base_from_scratch or self.workflow.builder.custom_base_image): nvr = self._base_image_nvr = self.detect_parent_image_nvr( self.workflow.builder.base_image, inspect_data=self.workflow.builder.base_image_inspect, ) if nvr: self._base_image_build = self.wait_for_parent_image_build(nvr) for img, local_tag in self.workflow.builder.parent_images.items(): if base_image_is_custom(img.to_str()): continue nvr = self.detect_parent_image_nvr( local_tag) if local_tag else None if nvr == self._base_image_nvr: # don't look up base image a second time self._parent_builds[img] = self._base_image_build continue self._parent_builds[img] = self.wait_for_parent_image_build( nvr) if nvr else None return self.make_result()
def run(self): builder = self.workflow.builder dfp = df_parser(builder.df_path) organization = get_registries_organization(self.workflow) df_base = ImageName.parse(dfp.baseimage) if organization and not base_image_is_custom(dfp.baseimage): df_base.enclose(organization) build_base = builder.base_image if not self.workflow.builder.base_from_scratch: # do some sanity checks to defend against bugs and rogue plugins self._sanity_check(df_base, build_base, builder) self.log.info("parent_images '%s'", builder.parent_images) unresolved = [key for key, val in builder.parent_images.items() if not val] if unresolved: # this would generally mean pull_base_image didn't run and/or # custom plugins modified parent_images; treat it as an error. raise ParentImageUnresolved("Parent image(s) unresolved: {}".format(unresolved)) # enclose images from dfp enclosed_parent_images = [] for df_img in dfp.parent_images: if base_image_is_scratch(df_img): enclosed_parent_images.append(df_img) continue parent = ImageName.parse(df_img) if organization and not base_image_is_custom(df_img): parent.enclose(organization) enclosed_parent_images.append(parent) missing = [df_img for df_img in enclosed_parent_images if df_img not in builder.parent_images] missing_set = set(missing) if SCRATCH_FROM in missing_set: missing_set.remove(SCRATCH_FROM) if missing_set: # this would indicate another plugin modified parent_images out of sync # with the Dockerfile or some other code bug raise ParentImageMissing("Lost parent image(s) from Dockerfile: {}".format(missing_set)) # docker inspect all parent images so we can address them by Id parent_image_ids = {} for img, new_img in builder.parent_images.items(): inspection = builder.parent_image_inspect(new_img) try: parent_image_ids[img] = inspection['Id'] except KeyError: # unexpected code bugs or maybe docker weirdness self.log.error( "Id for image %s is missing in inspection: '%s'", new_img, inspection) raise NoIdInspection("Could not inspect Id for image " + str(new_img)) # update the parents in Dockerfile new_parents = [] for parent in enclosed_parent_images: if base_image_is_scratch(parent): new_parents.append(parent) continue pid = parent_image_ids[parent] self.log.info("changed FROM: '%s' -> '%s'", parent, pid) new_parents.append(pid) dfp.parent_images = new_parents # update builder's representation of what will be built builder.parent_images = parent_image_ids if self.workflow.builder.base_from_scratch: return builder.set_base_image(parent_image_ids[df_base]) self.log.debug( "for base image '%s' using local image '%s', id '%s'", df_base, build_base, parent_image_ids[df_base] )
def run(self): """ Pull parent images and retag them uniquely for this build. """ build_json = get_build_json() current_platform = platform.processor() or 'x86_64' self.manifest_list_cache = {} organization = get_registries_organization(self.workflow) for nonce, parent in enumerate( sorted(self.workflow.builder.parent_images.keys(), key=str)): if base_image_is_custom(parent.to_str()): continue image = parent is_base_image = False # original_base_image is an ImageName, so compare parent as an ImageName also if image == self.workflow.builder.original_base_image: is_base_image = True image = self._resolve_base_image(build_json) image = self._ensure_image_registry(image) if organization: image.enclose(organization) parent.enclose(organization) if self.check_platforms: # run only at orchestrator self._validate_platforms_in_image(image) self._collect_image_digests(image) # try to stay with digests image_with_digest = self._get_image_with_digest( image, current_platform) if image_with_digest is None: self.log.warning( "Cannot resolve platform '%s' specific digest for image '%s'", current_platform, image) else: self.log.info("Replacing image '%s' with '%s'", image, image_with_digest) image = image_with_digest if self.check_platforms: new_arch_image = self._get_image_for_different_arch( image, current_platform) if new_arch_image: image = new_arch_image if self.inspect_only: new_image = image else: new_image = self._pull_and_tag_image(image, build_json, str(nonce)) self.workflow.builder.recreate_parent_images() self.workflow.builder.parent_images[parent] = new_image if is_base_image: if organization: # we want to be sure we have original_base_image enclosed as well self.workflow.builder.original_base_image.enclose( organization) self.workflow.builder.set_base_image( str(new_image), insecure=self.parent_registry_insecure) self.workflow.builder.parents_pulled = not self.inspect_only self.workflow.builder.base_image_insecure = self.parent_registry_insecure
def run(self): builder = self.workflow.builder dfp = df_parser(builder.df_path) organization = get_registries_organization(self.workflow) df_base = ImageName.parse(dfp.baseimage) if organization and not base_image_is_custom(dfp.baseimage): df_base.enclose(organization) build_base = builder.base_image if not self.workflow.builder.base_from_scratch: # do some sanity checks to defend against bugs and rogue plugins self._sanity_check(df_base, build_base, builder) self.log.info("parent_images '%s'", builder.parent_images) unresolved = [ key for key, val in builder.parent_images.items() if not val ] if unresolved: # this would generally mean pull_base_image didn't run and/or # custom plugins modified parent_images; treat it as an error. raise ParentImageUnresolved( "Parent image(s) unresolved: {}".format(unresolved)) # enclose images from dfp enclosed_parent_images = [] for df_img in dfp.parent_images: if base_image_is_scratch(df_img): enclosed_parent_images.append(df_img) continue parent = ImageName.parse(df_img) if organization and not base_image_is_custom(df_img): parent.enclose(organization) enclosed_parent_images.append(parent) missing = [ df_img for df_img in enclosed_parent_images if df_img not in builder.parent_images ] missing_set = set(missing) if SCRATCH_FROM in missing_set: missing_set.remove(SCRATCH_FROM) if missing_set: # this would indicate another plugin modified parent_images out of sync # with the Dockerfile or some other code bug raise ParentImageMissing( "Lost parent image(s) from Dockerfile: {}".format(missing_set)) # docker inspect all parent images so we can address them by Id parent_image_ids = {} for img, new_img in builder.parent_images.items(): inspection = builder.parent_image_inspect(new_img) try: parent_image_ids[img] = inspection['Id'] except KeyError: # unexpected code bugs or maybe docker weirdness self.log.error( "Id for image %s is missing in inspection: '%s'", new_img, inspection) raise NoIdInspection("Could not inspect Id for image " + str(new_img)) # update the parents in Dockerfile new_parents = [] for parent in enclosed_parent_images: if base_image_is_scratch(parent): new_parents.append(parent) continue pid = parent_image_ids[parent] self.log.info("changed FROM: '%s' -> '%s'", parent, pid) new_parents.append(pid) dfp.parent_images = new_parents # update builder's representation of what will be built builder.parent_images = parent_image_ids if self.workflow.builder.base_from_scratch: return builder.set_base_image(parent_image_ids[df_base]) self.log.debug("for base image '%s' using local image '%s', id '%s'", df_base, build_base, parent_image_ids[df_base])
def image_is_inspectable(image: Union[str, ImageName]) -> bool: """Check if we should expect the image to be inspectable.""" im = str(image) return not (util.base_image_is_scratch(im) or util.base_image_is_custom(im))
def run(self): """ Pull parent images and retag them uniquely for this build. """ self.manifest_list_cache.clear() build_json = get_build_json() organization = get_registries_organization(self.workflow) digest_fetching_exceptions = [] for nonce, parent in enumerate( sorted(self.workflow.builder.parent_images.keys(), key=str)): if base_image_is_custom(parent.to_str()): continue image = parent is_base_image = False use_original_tag = False # original_base_image is an ImageName, so compare parent as an ImageName also if image == self.workflow.builder.original_base_image: is_base_image = True use_original_tag = True image = self._resolve_base_image(build_json) image = self._ensure_image_registry(image) if organization: image.enclose(organization) parent.enclose(organization) if self.check_platforms: # run only at orchestrator self._validate_platforms_in_image(image) try: self._store_manifest_digest( image, use_original_tag=use_original_tag) except RuntimeError as exc: digest_fetching_exceptions.append(exc) image_with_digest = self._get_image_with_digest(image) if image_with_digest is None: self.log.warning( "Cannot resolve manifest digest for image '%s'", image) else: self.log.info("Replacing image '%s' with '%s'", image, image_with_digest) image = image_with_digest if not self.inspect_only: image = self._pull_and_tag_image(image, build_json, str(nonce)) self.workflow.builder.recreate_parent_images() self.workflow.builder.parent_images[parent] = image if is_base_image: if organization: # we want to be sure we have original_base_image enclosed as well self.workflow.builder.original_base_image.enclose( organization) self.workflow.builder.set_base_image( str(image), insecure=self.parent_registry_insecure, dockercfg_path=self.parent_registry_dockercfg_path) if digest_fetching_exceptions: raise RuntimeError( 'Error when extracting parent images manifest digests: {}'. format(digest_fetching_exceptions)) self.workflow.builder.parents_pulled = not self.inspect_only self.workflow.builder.base_image_insecure = self.parent_registry_insecure