def run(self): """ run the plugin """ if not is_flatpak_build(self.workflow): self.log.info('not flatpak build, skipping plugin') return resolve_comp_result = self.workflow.data.prebuild_results.get( PLUGIN_RESOLVE_COMPOSES_KEY) flatpak_util = FlatpakUtil(workflow_config=self.workflow.conf, source_config=self.workflow.source.config, composes=resolve_comp_result['composes']) compose_info = flatpak_util.get_flatpak_compose_info() source = flatpak_util.get_flatpak_source_info() builder = FlatpakBuilder(source, None, None) builder.precheck() flatpak_update = functools.partial(self.update_dockerfile, builder, compose_info) self.workflow.build_dir.for_each_platform(flatpak_update) create_files = functools.partial( self.create_includepkgs_file_and_cleanupscript, builder) self.workflow.build_dir.for_all_platforms_copy(create_files)
def test_flatpak_create_dockerfile(workflow, source_dir, config_name, override_base_image, breakage): config = CONFIGS[config_name] modules = None if breakage == 'no_modules': modules = [] expected_exception = "a module is required for Flatpaks" elif breakage == 'multiple_modules': modules = ['eog:f28:20170629213428', 'flatpak-common:f28:123456'] expected_exception = None # Just a warning else: assert breakage is None expected_exception = None data = yaml.safe_load(config['container_yaml']) if override_base_image is not None: data['flatpak']['base_image'] = override_base_image if modules is not None: data['compose']['modules'] = modules container_yaml = yaml.dump(data) platforms = ["x86_64", "s390x"] mock_workflow(workflow, source_dir, container_yaml, platforms) base_image = "registry.fedoraproject.org/fedora:latest" reactor_config = { 'version': 1, 'flatpak': {'base_image': base_image}, 'source_registry': {'url': 'source_registry'}, } runner = (MockEnv(workflow) .for_plugin(FlatpakCreateDockerfilePlugin.key) .set_reactor_config(reactor_config) .create_runner()) if expected_exception: with pytest.raises(PluginFailedException) as ex: runner.run() assert expected_exception in str(ex.value) else: runner.run() flatpak_util = FlatpakUtil(workflow_config=None, source_config=workflow.source.config) source_spec = flatpak_util.get_flatpak_source_spec() assert source_spec == config['source_spec'] expect_base_image = override_base_image if override_base_image else base_image for platform in platforms: build_dir = BuildDir(workflow.build_dir.path / platform, platform) df = build_dir.dockerfile_path.read_text("utf-8") assert "FROM " + expect_base_image in df assert 'name="{}"'.format(config['name']) in df assert 'com.redhat.component="{}"'.format(config['component']) in df assert "RUN rm -f /etc/yum.repos.d/*" in df assert "ADD atomic-reactor-repos/* /etc/yum.repos.d/" in df
def _update_extra(self, extra): if not isinstance(self.workflow.source, GitSource): raise RuntimeError('git source required') try: isolated = self.workflow.user_params['isolated'] except (IndexError, AttributeError, KeyError): isolated = False self.log.info("build is isolated: %r", isolated) extra['image']['isolated'] = isolated fs_koji_task_id = self._filesystem_koji_task_id if fs_koji_task_id is not None: extra['filesystem_koji_task_id'] = fs_koji_task_id extra['image'].update(get_parent_image_koji_data(self.workflow)) resolve_comp_result = self.workflow.data.prebuild_results.get( PLUGIN_RESOLVE_COMPOSES_KEY) if resolve_comp_result['composes']: extra['image']['odcs'] = { 'compose_ids': [item['id'] for item in resolve_comp_result['composes']], 'signing_intent': resolve_comp_result['signing_intent'], 'signing_intent_overridden': resolve_comp_result['signing_intent_overridden'], } if self.workflow.data.all_yum_repourls: extra['image'][ 'yum_repourls'] = self.workflow.data.all_yum_repourls if is_flatpak_build(self.workflow): flatpak_util = FlatpakUtil( workflow_config=self.workflow.conf, source_config=self.workflow.source.config, composes=resolve_comp_result['composes']) flatpak_compose_info = flatpak_util.get_flatpak_compose_info() if flatpak_compose_info: koji_metadata = flatpak_compose_info.koji_metadata() extra['image'].update(koji_metadata) extra['osbs_build']['subtypes'].append('flatpak') self.set_help(extra) self.set_operators_metadata(extra) self.set_pnc_build_metadata(extra) self.set_remote_sources_metadata(extra) self.set_remote_source_file_metadata(extra) self.set_go_metadata(extra) self.set_group_manifest_info(extra) extra['osbs_build']['kind'] = KOJI_KIND_IMAGE_BUILD # OSBS2 TBD extra['osbs_build']['engine'] = 'podman' if has_operator_appregistry_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_APPREGISTRY) if has_operator_bundle_manifest(self.workflow): extra['osbs_build']['subtypes'].append(KOJI_SUBTYPE_OP_BUNDLE) if self.userdata: extra['custom_user_metadata'] = self.userdata
def run(self) -> Optional[Dict[str, Dict[str, Any]]]: if not is_flatpak_build(self.workflow): self.log.info('not flatpak build, skipping plugin') return None resolve_comp_result: Dict[str, Any] = self.workflow.data.plugins_results[ PLUGIN_RESOLVE_COMPOSES_KEY] flatpak_util = FlatpakUtil(workflow_config=self.workflow.conf, source_config=self.workflow.source.config, composes=resolve_comp_result['composes']) source = flatpak_util.get_flatpak_source_info() if not source: raise RuntimeError("flatpak_create_dockerfile must be run before flatpak_create_oci") build_flatpak_image = functools.partial(self.build_flatpak_image, source) return self.workflow.build_dir.for_each_platform(build_flatpak_image)
def run(self): """ run the plugin """ if not is_flatpak_build(self.workflow): self.log.info('not flatpak build, skipping plugin') return flatpak_util = FlatpakUtil(workflow_config=self.workflow.conf, source_config=self.workflow.source.config, composes=None) source_spec = flatpak_util.get_flatpak_source_spec() module_info = ModuleSpec.from_str(source_spec) # Load additional information from the flatpak section flatpak_yaml = self.workflow.source.config.flatpak base_image = flatpak_yaml.get('base_image', self.default_base_image) name = flatpak_yaml.get('name', module_info.name) component = flatpak_yaml.get('component', module_info.name) # Create the dockerfile def _create_dockerfile(build_dir: BuildDir) -> List[Path]: content = DOCKERFILE_TEMPLATE.format( name=name, component=component, cleanupscript=FLATPAK_CLEANUPSCRIPT_FILENAME, includepkgs=FLATPAK_INCLUDEPKGS_FILENAME, stream=module_info.stream.replace('-', '_'), base_image=base_image, relative_repos_path=RELATIVE_REPOS_PATH, rpm_qf_args=rpm_qf_args(), yum_repos_dir=YUM_REPOS_DIR) build_dir.dockerfile_path.write_text(content, "utf-8") return [build_dir.dockerfile_path] created_files = self.workflow.build_dir.for_all_platforms_copy( _create_dockerfile) dockerfile_path = created_files[0] self.workflow.reset_dockerfile_images(str(dockerfile_path))
def test_flatpak_update_dockerfile(workflow, build_dir, config_name, breakage): config = CONFIGS[config_name] container_yaml = config['container_yaml'] workflow = mock_workflow(workflow, build_dir, container_yaml) if breakage == 'branch_mismatch': config = deepcopy(config) base_module = config['modules'][config['base_module']] base_module['metadata'] = base_module['metadata'].replace( 'branch: f28', 'branch: MISMATCH') expected_exception = "Mismatch for 'branch'" elif breakage == 'no_compose': config = deepcopy(config) config['odcs_composes'] = [] expected_exception = "Can't find main module" else: assert breakage is None expected_exception = None mock_koji_session(config) # composes run by resolve_composes plugin setup_flatpak_composes(workflow, config) secrets_path = build_dir / "secret" secrets_path.mkdir() secrets_path.joinpath("token").write_text("green_eggs_and_ham", "utf-8") rcm = { 'version': 1, 'odcs': { 'api_url': ODCS_URL, 'auth': { 'openidc_dir': secrets_path }, 'signing_intents': [ { 'name': 'unsigned', 'keys': [], }, { 'name': 'release', 'keys': ['R123', 'R234'], }, { 'name': 'beta', 'keys': ['R123', 'B456', 'B457'], }, ], 'default_signing_intent': 'unsigned' }, 'koji': { 'auth': {}, 'hub_url': 'https://koji.example.com/hub' } } runner = (MockEnv(workflow).for_plugin( FlatpakUpdateDockerfilePlugin.key).set_reactor_config( rcm).create_runner()) if expected_exception: with pytest.raises(PluginFailedException) as ex: runner.run() assert expected_exception in str(ex.value) else: runner.run() assert os.path.exists(workflow.build_dir.any_platform.dockerfile_path) df = workflow.build_dir.any_platform.dockerfile.content m = re.search(r'module enable\s*(.*?)\s*$', df, re.MULTILINE) assert m enabled_modules = sorted(m.group(1).split()) if config_name == 'app': assert enabled_modules == ['eog:f28', 'flatpak-runtime:f28'] else: assert enabled_modules == ['flatpak-runtime:f28'] includepkgs_path = os.path.join(workflow.build_dir.any_platform.path, 'atomic-reactor-includepkgs') assert os.path.exists(includepkgs_path) with open(includepkgs_path) as f: includepkgs = f.read() assert 'librsvg2' in includepkgs if config_name == 'app': assert 'eog-0:3.28.3-1.module_2123+73a9ef6f.x86_64' in includepkgs assert os.path.exists( os.path.join(workflow.build_dir.any_platform.path, 'cleanup.sh')) resolve_comp_result = workflow.data.plugins_results.get( PLUGIN_RESOLVE_COMPOSES_KEY) flatpak_util = FlatpakUtil(workflow_config=workflow.conf, source_config=workflow.source.config, composes=resolve_comp_result['composes']) compose_info = flatpak_util.get_flatpak_compose_info() assert compose_info.source_spec == config['source_spec'] if config_name == 'app': assert compose_info.main_module.name == 'eog' assert compose_info.main_module.stream == 'f28' assert compose_info.main_module.version == '20170629213428' assert (compose_info.main_module.mmd.get_summary("C") == 'Eye of GNOME Application Module') assert compose_info.main_module.rpms == [ 'eog-0:3.28.3-1.module_2123+73a9ef6f.src.rpm', 'eog-0:3.28.3-1.module_2123+73a9ef6f.x86_64.rpm', 'eog-0:3.28.3-1.module_2123+73a9ef6f.ppc64le.rpm', ] source_info = flatpak_util.get_flatpak_source_info() assert source_info.base_module.name == config['base_module']