def _update_extra(self, extra): if not isinstance(self.workflow.source, GitSource): raise RuntimeError('git source required') try: isolated = self.workflow.user_params['isolated'] except (IndexError, AttributeError, KeyError): isolated = False self.log.info("build is isolated: %r", isolated) extra['image']['isolated'] = isolated fs_koji_task_id = self._filesystem_koji_task_id if fs_koji_task_id is not None: extra['filesystem_koji_task_id'] = fs_koji_task_id extra['image'].update(get_parent_image_koji_data(self.workflow)) resolve_comp_result = self.workflow.data.prebuild_results.get( PLUGIN_RESOLVE_COMPOSES_KEY) if resolve_comp_result['composes']: extra['image']['odcs'] = { 'compose_ids': [item['id'] for item in resolve_comp_result['composes']], 'signing_intent': resolve_comp_result['signing_intent'], 'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'], } if self.workflow.data.all_yum_repourls: extra['image'][ 'yum_repourls'] = self.workflow.data.all_yum_repourls if is_flatpak_build(self.workflow): flatpak_util = FlatpakUtil( workflow_config=self.workflow.conf, source_config=self.workflow.source.config, composes=resolve_comp_result['composes']) flatpak_compose_info = flatpak_util.get_flatpak_compose_info() if flatpak_compose_info: koji_metadata = flatpak_compose_info.koji_metadata() extra['image'].update(koji_metadata) extra['osbs_build']['subtypes'].append('flatpak') self.set_help(extra) self.set_operators_metadata(extra) self.set_pnc_build_metadata(extra) self.set_remote_sources_metadata(extra) self.set_remote_source_file_metadata(extra) self.set_go_metadata(extra) self.set_group_manifest_info(extra) extra['osbs_build']['kind'] = KOJI_KIND_IMAGE_BUILD # OSBS2 TBD extra['osbs_build']['engine'] = 'podman' if has_operator_appregistry_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_APPREGISTRY) if has_operator_bundle_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_BUNDLE) if self.userdata: extra['custom_user_metadata'] = self.userdata
def isolated_from_scratch_build(self): """Isolated builds for FROM scratch builds are prohibited except operator bundle images""" if (self.workflow.data.dockerfile_images.base_from_scratch and is_isolated_build(self.workflow) and not has_operator_bundle_manifest(self.workflow)): raise RuntimeError('"FROM scratch" image build cannot be isolated ' '(except operator bundle images)')
def run(self) -> Optional[str]: """ Run the plugin. This plugin extracts the operator manifest files from an image, saves them as a zip archive, and returns its path :return: str, path to operator manifests zip file """ if not (has_operator_bundle_manifest(self.workflow) or has_operator_appregistry_manifest(self.workflow)): self.log.info( "Operator manifests label not set in Dockerfile. Skipping") return None platforms = get_platforms(self.workflow.data) image: ImageName = self.workflow.data.tag_conf.get_unique_images_with_platform( platforms[0])[0] tmp_dir = tempfile.mkdtemp( dir=self.workflow.build_dir.any_platform.path) manifests_dir = os.path.join(tmp_dir, MANIFESTS_DIR_NAME) os.mkdir(manifests_dir) self.workflow.imageutil.extract_file_from_image( image, IMG_MANIFESTS_PATH, manifests_dir) if has_operator_bundle_manifest(self.workflow): self._verify_csv(manifests_dir) manifests_zipfile_path = (self.workflow.build_dir.any_platform.path / OPERATOR_MANIFESTS_ARCHIVE) with zipfile.ZipFile(manifests_zipfile_path, 'w') as archive: for root, _, files in os.walk(manifests_dir): for f in files: filedir = os.path.relpath(root, manifests_dir) filepath = os.path.join(filedir, f) archive.write(os.path.join(root, f), filepath, zipfile.ZIP_DEFLATED) manifest_files = archive.namelist() self.log.debug("Archiving operator manifests: %s", manifest_files) shutil.rmtree(tmp_dir) return str(manifests_zipfile_path)
def appregistry_bundle_label_mutually_exclusive(self): """Labels com.redhat.com.delivery.appregistry and com.redhat.delivery.operator.bundle are mutually exclusive. Fail when both are specified. """ msg = ("only one of labels com.redhat.com.delivery.appregistry " "and com.redhat.delivery.operator.bundle is allowed") self.log.debug("Running check: %s", msg) if (has_operator_appregistry_manifest(self.workflow) and has_operator_bundle_manifest(self.workflow)): raise ValueError(msg)
def should_run(self): """ Determine if this is an operator manifest bundle build :return: bool, should plugin run? """ if has_operator_bundle_manifest(self.workflow): return True else: self.log.info("Not an operator manifest bundle build, skipping plugin") return False
def operator_bundle_from_scratch(self): """Only from scratch image can be used for operator bundle build""" msg = "Operator bundle build can be only 'FROM scratch' build (single stage)" self.log.debug("Running check: %s", msg) if not has_operator_bundle_manifest(self.workflow): return df_images = self.workflow.data.dockerfile_images if not df_images.base_from_scratch or len( df_images.original_parents) > 1: raise ValueError(msg)
def operator_bundle_from_scratch(self): """Only from scratch image can be used for operator bundle build""" msg = "Operator bundle build can be only 'FROM scratch' build (single stage)" self.log.debug("Running check: %s", msg) if not has_operator_bundle_manifest(self.workflow): return if ( not self.workflow.builder.base_from_scratch or len(self.workflow.builder.parents_ordered) > 1 ): raise ValueError(msg)
def should_run(self): """ Determine if this is an operator manifest bundle build :return: bool, should plugin run? """ if not has_operator_bundle_manifest(self.workflow): self.log.info("Not an operator manifest bundle build, skipping plugin") return False if not self.workflow.conf.operator_manifests: msg = "operator_manifests configuration missing in reactor config map, aborting" self.log.warning(msg) return False return True
def should_run(self): """ Check if the plugin should run or skip execution. :return: bool, False if plugin should skip execution """ if self.is_in_orchestrator(): self.log.warning("%s plugin set to run on orchestrator. Skipping", self.key) return False if self.operator_manifests_extract_platform != self.platform: self.log.info( "Only platform [%s] will upload operators metadata. Skipping", self.operator_manifests_extract_platform) return False if not (has_operator_bundle_manifest(self.workflow) or has_operator_appregistry_manifest(self.workflow)): self.log.info( "Operator manifests label not set in Dockerfile. Skipping") return False return True
def should_run(self): """ Check if the plugin should run or skip execution. :return: bool, False if plugin should skip execution """ if not self.is_in_orchestrator(): self.log.warning("%s plugin set to run on worker. Skipping", self.key) return False if not get_omps_config(self.workflow, None): self.log.info("Integration with OMPS is not configured. Skipping") return False if has_operator_bundle_manifest(self.workflow): self.log.info( "Operator bundle format is not compatible with appregistry. " "Skipping publishing into appregistry.") return False if not has_operator_appregistry_manifest(self.workflow): self.log.info("Not an operator build. Skipping") return False if is_scratch_build(): self.log.info('Scratch build. Skipping') return False if is_rebuild(self.workflow): self.log.info('Autorebuild. Skipping') return False if is_isolated_build(): self.log.info('Isolated build. Skipping') return False return True
def _update_extra(self, extra, metadata, worker_metadatas): extra['image']['autorebuild'] = self.rebuild if not isinstance(self.workflow.source, GitSource): raise RuntimeError('git source required') if self.workflow.triggered_after_koji_task: extra['image']['triggered_after_koji_task'] =\ self.workflow.triggered_after_koji_task try: isolated = str(metadata['labels']['isolated']).lower() == 'true' except (IndexError, AttributeError, KeyError): isolated = False self.log.info("build is isolated: %r", isolated) extra['image']['isolated'] = isolated fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key) if fs_result is not None: try: fs_task_id = fs_result['filesystem-koji-task-id'] except KeyError: self.log.error("%s: expected filesystem-koji-task-id in result", AddFilesystemPlugin.key) else: try: task_id = int(fs_task_id) except ValueError: self.log.error("invalid task ID %r", fs_task_id, exc_info=1) else: extra['filesystem_koji_task_id'] = task_id extra['image'].update(get_parent_image_koji_data(self.workflow)) flatpak_compose_info = get_flatpak_compose_info(self.workflow) if flatpak_compose_info: koji_metadata = flatpak_compose_info.koji_metadata() koji_metadata['flatpak'] = True extra['image'].update(koji_metadata) extra['osbs_build']['subtypes'].append('flatpak') resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY) if resolve_comp_result: extra['image']['odcs'] = { 'compose_ids': [item['id'] for item in resolve_comp_result['composes']], 'signing_intent': resolve_comp_result['signing_intent'], 'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'], } if self.workflow.all_yum_repourls: extra['image']['yum_repourls'] = self.workflow.all_yum_repourls self.set_help(extra, worker_metadatas) self.set_operators_metadata(extra, worker_metadatas) self.set_remote_sources_metadata(extra) self.set_go_metadata(extra) self.set_group_manifest_info(extra, worker_metadatas) extra['osbs_build']['kind'] = KOJI_KIND_IMAGE_BUILD extra['osbs_build']['engine'] = self.workflow.builder.tasker.build_method if has_operator_appregistry_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_APPREGISTRY) if has_operator_bundle_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_BUNDLE)
def run(self): """ Run the plugin. This plugin extracts the operator manifest files from an image, saves them as a zip archive, and returns its path :return: str, path to operator manifests zip file """ if not self.should_run(): return manifests_archive_dir = tempfile.mkdtemp() image = self.workflow.image # As in flatpak_create_oci, we specify command to prevent possible docker daemon errors. container_dict = self.tasker.create_container(image, command=['/bin/bash']) container_id = container_dict['Id'] try: bits, _ = self.tasker.get_archive(container_id, IMG_MANIFESTS_PATH) except APIError as ex: msg = ('Could not extract operator manifest files. ' 'Is there a %s path in the image?' % (IMG_MANIFESTS_PATH)) self.log.debug('Error while trying to extract %s from image: %s', IMG_MANIFESTS_PATH, ex) self.log.error(msg) raise RuntimeError('%s %s' % (msg, ex)) from ex except Exception as ex: raise RuntimeError('%s' % ex) from ex finally: try: self.tasker.remove_container(container_id) except Exception as ex: self.log.warning('Failed to remove container %s: %s', container_id, ex) with tempfile.NamedTemporaryFile() as extracted_file: for chunk in bits: extracted_file.write(chunk) extracted_file.flush() tar_archive = tarfile.TarFile(extracted_file.name) tar_archive.extractall(manifests_archive_dir) manifests_path = os.path.join(manifests_archive_dir, MANIFESTS_DIR_NAME) if has_operator_bundle_manifest(self.workflow): self._verify_csv(manifests_path) manifests_zipfile_path = os.path.join(manifests_archive_dir, OPERATOR_MANIFESTS_ARCHIVE) with zipfile.ZipFile(manifests_zipfile_path, 'w') as archive: for root, _, files in os.walk(manifests_path): for f in files: filedir = os.path.relpath(root, manifests_path) filepath = os.path.join(filedir, f) archive.write(os.path.join(root, f), filepath, zipfile.ZIP_DEFLATED) manifest_files = archive.namelist() if not manifest_files: self.log.error('Empty operator manifests directory') raise RuntimeError('Empty operator manifests directory') self.log.debug("Archiving operator manifests: %s", manifest_files) shutil.rmtree(manifests_path) return manifests_zipfile_path
def get_build(self, metadata, worker_metadatas): start_time = int(atomic_reactor_start_time) extra = {'image': {}, 'osbs_build': {'subtypes': []}} if not self.source_build: labels = Labels(df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels) _, component = labels.get_name_and_value(Labels.LABEL_TYPE_COMPONENT) _, version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION) _, release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE) source = self.workflow.source if not isinstance(source, GitSource): raise RuntimeError('git source required') extra['image']['autorebuild'] = is_rebuild(self.workflow) if self.workflow.triggered_after_koji_task: extra['image']['triggered_after_koji_task'] =\ self.workflow.triggered_after_koji_task try: isolated = str(metadata['labels']['isolated']).lower() == 'true' except (IndexError, AttributeError, KeyError): isolated = False self.log.info("build is isolated: %r", isolated) extra['image']['isolated'] = isolated fs_result = self.workflow.prebuild_results.get(AddFilesystemPlugin.key) if fs_result is not None: try: fs_task_id = fs_result['filesystem-koji-task-id'] except KeyError: self.log.error("%s: expected filesystem-koji-task-id in result", AddFilesystemPlugin.key) else: try: task_id = int(fs_task_id) except ValueError: self.log.error("invalid task ID %r", fs_task_id, exc_info=1) else: extra['filesystem_koji_task_id'] = task_id extra['image'].update(get_parent_image_koji_data(self.workflow)) flatpak_compose_info = get_flatpak_compose_info(self.workflow) if flatpak_compose_info: koji_metadata = flatpak_compose_info.koji_metadata() koji_metadata['flatpak'] = True extra['image'].update(koji_metadata) extra['osbs_build']['subtypes'].append('flatpak') resolve_comp_result = self.workflow.prebuild_results.get(PLUGIN_RESOLVE_COMPOSES_KEY) if resolve_comp_result: extra['image']['odcs'] = { 'compose_ids': [item['id'] for item in resolve_comp_result['composes']], 'signing_intent': resolve_comp_result['signing_intent'], 'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'], } if self.workflow.all_yum_repourls: extra['image']['yum_repourls'] = self.workflow.all_yum_repourls self.set_help(extra, worker_metadatas) self.set_operators_metadata(extra, worker_metadatas) self.set_remote_sources_metadata(extra) self.set_go_metadata(extra) self.set_group_manifest_info(extra, worker_metadatas) extra['osbs_build']['kind'] = KOJI_KIND_IMAGE_BUILD extra['osbs_build']['engine'] = self.workflow.builder.tasker.build_method if has_operator_appregistry_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_APPREGISTRY) if has_operator_bundle_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_BUNDLE) else: source_result = self.workflow.prebuild_results[PLUGIN_FETCH_SOURCES_KEY] extra['image']['sources_for_nvr'] = source_result['sources_for_nvr'] extra['image']['sources_signing_intent'] = source_result['signing_intent'] extra['osbs_build']['kind'] = KOJI_KIND_IMAGE_SOURCE_BUILD extra['osbs_build']['engine'] = KOJI_SOURCE_ENGINE koji_task_id = metadata.get('labels', {}).get('koji-task-id') if koji_task_id is not None: self.log.info("build configuration created by Koji Task ID %s", koji_task_id) try: extra['container_koji_task_id'] = int(koji_task_id) except ValueError: self.log.error("invalid task ID %r", koji_task_id, exc_info=1) koji_task_owner = get_koji_task_owner(self.session, koji_task_id).get('name') extra['submitter'] = self.session.getLoggedInUser()['name'] self.set_media_types(extra, worker_metadatas) build = { 'start_time': start_time, 'end_time': int(time.time()), 'extra': extra, 'owner': koji_task_owner, } if self.source_build: build.update({ 'name': self.workflow.koji_source_nvr['name'], 'version': self.workflow.koji_source_nvr['version'], 'release': self.workflow.koji_source_nvr['release'], 'source': self.workflow.koji_source_source_url, }) else: build.update({ 'name': component, 'version': version, 'release': release, 'source': "{0}#{1}".format(source.uri, source.commit_id), }) return build