def run(self): try: build_json = json.loads(os.environ["BUILD"]) except KeyError: self.log.error("No $BUILD env variable. Probably not running in build container.") return kwargs = {} metadata = build_json.get("metadata", {}) if 'namespace' in metadata: kwargs['namespace'] = metadata['namespace'] try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl) osbs = OSBS(osbs_conf, osbs_conf) # usually repositories formed from NVR labels # these should be used for pulling and layering primary_repositories = [] for registry in self.workflow.push_conf.all_registries: for image in self.workflow.tag_conf.primary_images: registry_image = image.copy() registry_image.registry = registry.uri primary_repositories.append(registry_image.to_str()) # unique unpredictable repositories unique_repositories = [] for registry in self.workflow.push_conf.all_registries: for image in self.workflow.tag_conf.unique_images: registry_image = image.copy() registry_image.registry = registry.uri unique_repositories.append(registry_image.to_str()) repositories = { "primary": primary_repositories, "unique": unique_repositories, } try: commit_id = self.workflow.source.lg.commit_id except AttributeError: commit_id = "" labels = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), "logs": "\n".join(self.workflow.build_logs), "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)), "repositories": json.dumps(repositories), "commit_id": commit_id, } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: labels["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) osbs.set_annotations_on_build(build_id, labels, **kwargs) return labels
def run(self): metadata = get_build_json().get("metadata", {}) try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" try: base_image_id = self.workflow.base_image_inspect['Id'] except docker.errors.NotFound: base_image_id = "" labels = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), # We no longer store the 'docker build' logs as an annotation "logs": '', # We no longer store the rpm packages as an annotation "rpm-packages": '', "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": base_image_id, "base-image-name": self.workflow.builder.base_image.to_str(), "image-id": self.workflow.builder.image_id, "digests": json.dumps(self.get_pullspecs(self.get_digests())), "plugins-metadata": json.dumps(self.get_plugin_metadata()) } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get( "md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get( "sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: labels["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) osbs.set_annotations_on_build(build_id, labels) return labels
def run(self): metadata = get_build_json().get("metadata", {}) try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" try: base_image_id = self.workflow.base_image_inspect['Id'] except docker.errors.NotFound: base_image_id = "" annotations = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), # We no longer store the 'docker build' logs as an annotation "logs": '', # We no longer store the rpm packages as an annotation "rpm-packages": '', "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": base_image_id, "base-image-name": self.workflow.builder.base_image.to_str(), "image-id": self.workflow.builder.image_id or '', "digests": json.dumps(self.get_pullspecs(self.get_digests())), "plugins-metadata": json.dumps(self.get_plugin_metadata()) } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: annotations["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) try: osbs.set_annotations_on_build(build_id, annotations) except OsbsResponseException: self.log.debug("annotations: %r", annotations) raise labels = self.make_labels() if labels: try: osbs.update_labels_on_build(build_id, labels) except OsbsResponseException: self.log.debug("labels: %r", labels) raise return {"annotations": annotations, "labels": labels}
def run(self): try: build_json = json.loads(os.environ["BUILD"]) except KeyError: self.log.error("No $BUILD env variable. Probably not running in build container.") return kwargs = {} metadata = build_json.get("metadata", {}) if "namespace" in metadata: kwargs["namespace"] = metadata["namespace"] try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration( conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, ) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" labels = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), "logs": "\n".join(self.workflow.build_logs), "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)), "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": self.workflow.base_image_inspect["Id"], "base-image-name": self.workflow.builder.base_image.to_str(), "image-id": self.workflow.builder.image_id, "digests": json.dumps(self.get_digests()), } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and tar_path is not None: labels["tar_metadata"] = json.dumps( { "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), } ) osbs.set_annotations_on_build(build_id, labels, **kwargs) return labels
def run(self): try: build_json = json.loads(os.environ["BUILD"]) except KeyError: self.log.error("No $BUILD env variable. Probably not running in build container.") return kwargs = {} metadata = build_json.get("metadata", {}) if 'namespace' in metadata: kwargs['namespace'] = metadata['namespace'] try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" labels = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), "logs": "\n".join(self.workflow.build_logs), "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)), "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": self.workflow.base_image_inspect['Id'], "base-image-name": self.workflow.builder.base_image.to_str(), "image-id": self.workflow.builder.image_id, "digests": json.dumps(self.get_digests()), } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: labels["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) osbs.set_annotations_on_build(build_id, labels, **kwargs) return labels
def run(self): metadata = get_build_json().get("metadata", {}) try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" try: base_image_id = self.workflow.base_image_inspect['Id'] except KeyError: base_image_id = "" annotations = { "dockerfile": open(self.workflow.builder.df_path).read(), # We no longer store the 'docker build' logs as an annotation "logs": '', # We no longer store the rpm packages as an annotation "rpm-packages": '', "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": base_image_id, "base-image-name": self.workflow.builder.base_image.to_str(), "image-id": self.workflow.builder.image_id or '', "digests": json.dumps(self.get_pullspecs(self.get_digests())), "plugins-metadata": json.dumps(self.get_plugin_metadata()) } help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key) if isinstance( help_result, dict ) and 'help_file' in help_result and 'status' in help_result: if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND: annotations['help_file'] = json.dumps(None) elif help_result['status'] == AddHelpPlugin.HELP_GENERATED: annotations['help_file'] = json.dumps(help_result['help_file']) else: self.log.error("Unknown result from add_help plugin: %s", help_result) tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get( "md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get( "sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: annotations["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) annotations.update(self.get_config_map()) self.apply_build_result_annotations(annotations) try: osbs.set_annotations_on_build(build_id, annotations) except OsbsResponseException: self.log.debug("annotations: %r", annotations) raise labels = self.make_labels() if labels: try: osbs.update_labels_on_build(build_id, labels) except OsbsResponseException: self.log.debug("labels: %r", labels) raise return {"annotations": annotations, "labels": labels}
def run(self): try: build_json = json.loads(os.environ["BUILD"]) except KeyError: self.log.error( "No $BUILD env variable. Probably not running in build container." ) return kwargs = {} metadata = build_json.get("metadata", {}) if 'namespace' in metadata: kwargs['namespace'] = metadata['namespace'] try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl) osbs = OSBS(osbs_conf, osbs_conf) # usually repositories formed from NVR labels # these should be used for pulling and layering primary_repositories = [] for registry in self.workflow.push_conf.all_registries: for image in self.workflow.tag_conf.primary_images: registry_image = image.copy() registry_image.registry = registry.uri primary_repositories.append(registry_image.to_str()) # unique unpredictable repositories unique_repositories = [] for registry in self.workflow.push_conf.all_registries: for image in self.workflow.tag_conf.unique_images: registry_image = image.copy() registry_image.registry = registry.uri unique_repositories.append(registry_image.to_str()) repositories = { "primary": primary_repositories, "unique": unique_repositories, } try: commit_id = self.workflow.source.lg.commit_id except AttributeError: commit_id = "" labels = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), "logs": "\n".join(self.workflow.build_logs), "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)), "repositories": json.dumps(repositories), "commit_id": commit_id, } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get( "md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get( "sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: labels["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) osbs.set_annotations_on_build(build_id, labels, **kwargs) return labels
def run(self): metadata = get_build_json().get("metadata", {}) try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" base_image = self.workflow.builder.base_image if base_image is not None: base_image_name = base_image.to_str() try: base_image_id = self.workflow.base_image_inspect['Id'] except KeyError: base_image_id = "" else: base_image_name = "" base_image_id = "" try: with open(self.workflow.builder.df_path) as f: dockerfile_contents = f.read() except AttributeError: dockerfile_contents = "" annotations = { "dockerfile": dockerfile_contents, # We no longer store the 'docker build' logs as an annotation "logs": '', # We no longer store the rpm packages as an annotation "rpm-packages": '', "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": base_image_id, "base-image-name": base_image_name, "image-id": self.workflow.builder.image_id or '', "digests": json.dumps(self.get_pullspecs(self.get_digests())), "plugins-metadata": json.dumps(self.get_plugin_metadata()) } help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key) if isinstance( help_result, dict ) and 'help_file' in help_result and 'status' in help_result: if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND: annotations['help_file'] = json.dumps(None) elif help_result['status'] == AddHelpPlugin.HELP_GENERATED: annotations['help_file'] = json.dumps(help_result['help_file']) else: self.log.error("Unknown result from add_help plugin: %s", help_result) pulp_push_results = self.workflow.postbuild_results.get( PLUGIN_PULP_PUSH_KEY) if pulp_push_results: top_layer, _ = pulp_push_results annotations['v1-image-id'] = top_layer media_types = [] if pulp_push_results: media_types += [MEDIA_TYPE_DOCKER_V1] # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin pulp_pull_results = ( self.workflow.postbuild_results.get(PulpPullPlugin.key) or self.workflow.exit_results.get(PulpPullPlugin.key)) if isinstance(pulp_pull_results, Exception): pulp_pull_results = None if pulp_pull_results: media_types += pulp_pull_results if media_types: annotations['media-types'] = json.dumps( sorted(list(set(media_types)))) tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get( "md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get( "sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: annotations["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) annotations.update(self.get_config_map()) self.apply_build_result_annotations(annotations) # For arrangement version 4 onwards (where group_manifests # runs in the orchestrator build), restore the repositories # metadata which orchestrate_build adjusted. if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results: annotations['repositories'] = json.dumps(self.get_repositories()) try: osbs.set_annotations_on_build(build_id, annotations) except OsbsResponseException: self.log.debug("annotations: %r", annotations) raise labels = self.make_labels() if labels: try: osbs.update_labels_on_build(build_id, labels) except OsbsResponseException: self.log.debug("labels: %r", labels) raise return {"annotations": annotations, "labels": labels}
def run(self): metadata = get_build_json().get("metadata", {}) try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" base_image = self.workflow.builder.base_image if base_image is not None: base_image_name = base_image.to_str() try: base_image_id = self.workflow.base_image_inspect['Id'] except KeyError: base_image_id = "" else: base_image_name = "" base_image_id = "" try: with open(self.workflow.builder.df_path) as f: dockerfile_contents = f.read() except AttributeError: dockerfile_contents = "" annotations = { "dockerfile": dockerfile_contents, # We no longer store the 'docker build' logs as an annotation "logs": '', # We no longer store the rpm packages as an annotation "rpm-packages": '', "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": base_image_id, "base-image-name": base_image_name, "image-id": self.workflow.builder.image_id or '', "digests": json.dumps(self.get_pullspecs(self.get_digests())), "plugins-metadata": json.dumps(self.get_plugin_metadata()) } help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key) if isinstance(help_result, dict) and 'help_file' in help_result and 'status' in help_result: if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND: annotations['help_file'] = json.dumps(None) elif help_result['status'] == AddHelpPlugin.HELP_GENERATED: annotations['help_file'] = json.dumps(help_result['help_file']) else: self.log.error("Unknown result from add_help plugin: %s", help_result) pulp_push_results = self.workflow.postbuild_results.get(PLUGIN_PULP_PUSH_KEY) if pulp_push_results: top_layer, _ = pulp_push_results annotations['v1-image-id'] = top_layer media_types = [] if pulp_push_results: media_types += [MEDIA_TYPE_DOCKER_V1] # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin pulp_pull_results = (self.workflow.postbuild_results.get(PulpPullPlugin.key) or self.workflow.exit_results.get(PulpPullPlugin.key)) if isinstance(pulp_pull_results, Exception): pulp_pull_results = None if pulp_pull_results: media_types += pulp_pull_results if media_types: annotations['media-types'] = json.dumps(sorted(list(set(media_types)))) tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: annotations["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) annotations.update(self.get_config_map()) self.apply_build_result_annotations(annotations) # For arrangement version 4 onwards (where group_manifests # runs in the orchestrator build), restore the repositories # metadata which orchestrate_build adjusted. if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results: annotations['repositories'] = json.dumps(self.get_repositories()) try: osbs.set_annotations_on_build(build_id, annotations) except OsbsResponseException: self.log.debug("annotations: %r", annotations) raise labels = self.make_labels() if labels: try: osbs.update_labels_on_build(build_id, labels) except OsbsResponseException: self.log.debug("labels: %r", labels) raise return {"annotations": annotations, "labels": labels}