def _load_compose_info(self): source_spec = get_flatpak_source_spec(self.workflow) assert source_spec is not None # flatpak_create_dockerfile must be run first main_module = ModuleSpec.from_str(source_spec) resolve_comp_result = self.workflow.prebuild_results.get( PLUGIN_RESOLVE_COMPOSES_KEY) if resolve_comp_result: # In the orchestrator, we can get the compose info directly from # the resolve_composes plugin composes = resolve_comp_result['composes'] else: # But in a worker, resolve_composes doesn't run, so we have # to load the compose info ourselves assert self.compose_ids composes = self._load_composes() for compose_info in composes: if compose_info['source_type'] != SOURCE_TYPE_MODULE: continue modules = [ ModuleSpec.from_str(s) for s in compose_info['source'].split() ] for module in modules: if module.name == main_module.name and module.stream == main_module.stream: set_flatpak_compose_info(self.workflow, self._build_compose_info(modules)) return self.log.debug('Compose info: %s', composes) raise RuntimeError("Can't find main module %s in compose result" % source_spec)
def run(self): """ run the plugin """ self._load_source_spec() source_spec = get_flatpak_source_spec(self.workflow) module_info = ModuleSpec.from_str(source_spec) # Load additional information from the flatpak section flatpak_yaml = self.workflow.source.config.flatpak base_image = flatpak_yaml.get('base_image', self.default_base_image) name = flatpak_yaml.get('name', module_info.name) component = flatpak_yaml.get('component', module_info.name) # Create the dockerfile df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME) with open(df_path, 'w') as fp: fp.write(DOCKERFILE_TEMPLATE.format(name=name, component=component, cleanupscript=FLATPAK_CLEANUPSCRIPT_FILENAME, includepkgs=FLATPAK_INCLUDEPKGS_FILENAME, stream=module_info.stream.replace('-', '_'), base_image=base_image, relative_repos_path=RELATIVE_REPOS_PATH, rpm_qf_args=rpm_qf_args(), yum_repos_dir=YUM_REPOS_DIR)) self.workflow.builder.set_df_path(df_path)
def _resolve_modules(self, compose_source): koji_session = get_koji_session(self.workflow, fallback=NO_FALLBACK) resolved_modules = {} for module in compose_source.strip().split(): module_spec = ModuleSpec.from_str(module) build, rpm_list = get_koji_module_build(koji_session, module_spec) # The returned RPM list contains source RPMs and RPMs for all # architectures. rpms = ['{name}-{epochnum}:{version}-{release}.{arch}.rpm' .format(epochnum=rpm['epoch'] or 0, **rpm) for rpm in rpm_list] # strict=False - don't break if new fields are added mmd = Modulemd.ModuleStream.read_string( build['extra']['typeinfo']['module']['modulemd_str'], strict=False) # Make sure we have a version 2 modulemd file mmd = mmd.upgrade(Modulemd.ModuleStreamVersionEnum.TWO) resolved_modules[module_spec.name] = ModuleInfo(module_spec.name, module_spec.stream, module_spec.version, mmd, rpms) return resolved_modules
def test_with_context(self): module = 'eog:my-stream:20180821163756:775baa8e' module_koji_nvr = 'eog-my_stream-20180821163756.775baa8e' koji_return = { 'build_id': 1138198, 'name': 'eog', 'version': 'my_stream', 'release': '20180821163756.775baa8e', 'extra': { 'typeinfo': { 'module': { 'modulemd_str': 'document: modulemd\nversion: 2' } } } } spec = ModuleSpec.from_str(module) session = flexmock() (session .should_receive('getBuild') .with_args(module_koji_nvr) .and_return(koji_return)) self.mock_get_rpms(session) get_koji_module_build(session, spec)
def _resolve_modules(self, compose_source): koji_session = get_koji_session(self.workflow, fallback=NO_FALLBACK) resolved_modules = {} for module in compose_source.strip().split(): module_spec = ModuleSpec.from_str(module) build, rpm_list = get_koji_module_build(koji_session, module_spec) # The returned RPM list contains source RPMs and RPMs for all # architectures. rpms = [ '{name}-{epochnum}:{version}-{release}.{arch}.rpm'.format( epochnum=rpm['epoch'] or 0, **rpm) for rpm in rpm_list ] objects = Modulemd.objects_from_string( build['extra']['typeinfo']['module']['modulemd_str']) assert len(objects) == 1 mmd = objects[0] assert isinstance(mmd, Modulemd.Module) # Make sure we have a version 2 modulemd file mmd.upgrade() resolved_modules[module_spec.name] = ModuleInfo( module_spec.name, module_spec.stream, module_spec.version, mmd, rpms) return resolved_modules
def _resolve_modules(self, compose_source): koji_session = get_koji_session(self.workflow, fallback=NO_FALLBACK) resolved_modules = {} for module in compose_source.strip().split(): module_spec = ModuleSpec.from_str(module) build, rpm_list = get_koji_module_build(koji_session, module_spec) # The returned RPM list contains source RPMs and RPMs for all # architectures. rpms = ['{name}-{epochnum}:{version}-{release}.{arch}.rpm' .format(epochnum=rpm['epoch'] or 0, **rpm) for rpm in rpm_list] objects = Modulemd.objects_from_string( build['extra']['typeinfo']['module']['modulemd_str']) assert len(objects) == 1 mmd = objects[0] assert isinstance(mmd, Modulemd.Module) # Make sure we have a version 2 modulemd file mmd.upgrade() resolved_modules[module_spec.name] = ModuleInfo(module_spec.name, module_spec.stream, module_spec.version, mmd, rpms) return resolved_modules
def get_flatpak_source_info(self) -> FlatpakSourceInfo: flatpak_yaml = self.source_config.flatpak compose_info = self.get_flatpak_compose_info() module_spec = ModuleSpec.from_str(compose_info.source_spec) source_info = FlatpakSourceInfo(flatpak_yaml, compose_info.modules, compose_info.main_module, module_spec.profile) return source_info
def _load_source(self): flatpak_yaml = self.workflow.source.config.flatpak compose_info = get_flatpak_compose_info(self.workflow) module_spec = ModuleSpec.from_str(compose_info.source_spec) source_info = FlatpakSourceInfo(flatpak_yaml, compose_info.modules, compose_info.main_module, module_spec.profile) set_flatpak_source_info(self.workflow, source_info)
def get_flatpak_compose_info(self) -> ComposeInfo: source_spec = self.get_flatpak_source_spec() main_module = ModuleSpec.from_str(source_spec) for compose_info in self.composes: if compose_info['source_type'] != SOURCE_TYPE_MODULE: continue modules = [ ModuleSpec.from_str(s) for s in compose_info['source'].split() ] for module in modules: if module.name == main_module.name and module.stream == main_module.stream: resolved_modules = self.resolve_modules(modules) return self.build_compose_info(resolved_modules, source_spec) logger.debug('Compose info: %s', self.composes) raise RuntimeError("Can't find main module %s in compose result" % source_spec)
def build_compose_info(self, modules, source_spec) -> ComposeInfo: main_module = ModuleSpec.from_str(source_spec) main_module_info = modules[main_module.name] assert main_module_info.stream == main_module.stream if main_module.version is not None: assert main_module_info.version == main_module.version return ComposeInfo(source_spec=source_spec, main_module=main_module_info, modules=modules)
def setup_flatpak_source_info(config=None): config = APP_CONFIG if config is None else config compose = setup_flatpak_compose_info(config) flatpak_yaml = yaml.safe_load(config['container_yaml'])['flatpak'] module_spec = ModuleSpec.from_str(compose.source_spec) source = FlatpakSourceInfo(flatpak_yaml, compose.modules, compose.main_module, module_spec.profile) return source
def setup_flatpak_source_info(workflow, config=APP_CONFIG): compose = setup_flatpak_compose_info(workflow, config) flatpak_yaml = yaml.safe_load(config['container_yaml'])['flatpak'] module_spec = ModuleSpec.from_str(compose.source_spec) source = FlatpakSourceInfo(flatpak_yaml, compose.modules, compose.base_module, module_spec.profile) set_flatpak_source_info(workflow, source) return source
def test_with_context_without_build(self): module = 'eog:my-stream:20180821163756:775baa8e' module_koji_nvr = 'eog-my_stream-20180821163756.775baa8e' koji_return = None spec = ModuleSpec.from_str(module) session = flexmock() (session.should_receive('getBuild').with_args( module_koji_nvr).and_return(koji_return)) with pytest.raises(Exception) as e: get_koji_module_build(session, spec) assert 'No build found' in str(e.value)
def _load_source(self): flatpak_yaml = self.workflow.source.config.flatpak compose_info = get_compose_info(self.workflow) if compose_info is None: raise RuntimeError( "resolve_module_compose must be run before flatpak_create_dockerfile" ) module_spec = ModuleSpec.from_str(compose_info.source_spec) return FlatpakSourceInfo(flatpak_yaml, compose_info.modules, compose_info.base_module, module_spec.profile)
def _load_source(self): flatpak_yaml = self.workflow.source.config.flatpak compose_info = get_compose_info(self.workflow) if compose_info is None: raise RuntimeError( "resolve_module_compose must be run before flatpak_create_dockerfile") module_spec = ModuleSpec.from_str(compose_info.source_spec) return FlatpakSourceInfo(flatpak_yaml, compose_info.modules, compose_info.base_module, module_spec.profile)
class MockConfiguration(object): container = { 'tags': additional_tags or [], 'compose': { 'modules': ['mod_name:mod_stream:mod_version'] } } module = container['compose']['modules'][0] container_module_specs = [ModuleSpec.from_str(module)] depth = 0 def is_autorebuild_enabled(self): return False
def __init__(self, is_flatpak=False, modules=None): self.container = {'compose': {'modules': modules}} safe_modules = modules or [] self.container_module_specs = [ ModuleSpec.from_str(module) for module in safe_modules ] self.depth = 0 self.is_flatpak = is_flatpak self.flatpak_base_image = None self.flatpak_component = None self.flatpak_name = None self.git_uri = TEST_GIT_URI self.git_ref = TEST_GIT_REF self.git_branch = TEST_GIT_BRANCH
def render_modules_request(self): # In the Flatpak case, the profile is used to determine which packages # are installed into the Flatpak. But ODCS doesn't understand profiles, # and they won't affect the compose in any case. noprofile_modules = [ModuleSpec.from_str(m).to_str(include_profile=False) for m in self.modules] request = { 'source_type': 'module', 'source': ' '.join(noprofile_modules), 'sigkeys': self.signing_intent['keys'], } if self.arches: request['arches'] = self.arches return request
def _build_compose_info(self, modules): source_spec = get_flatpak_source_spec(self.workflow) assert source_spec is not None # flatpak_create_dockerfile must be run first main_module = ModuleSpec.from_str(source_spec) resolved_modules = self._resolve_modules(modules) main_module_info = resolved_modules[main_module.name] assert main_module_info.stream == main_module.stream if main_module.version is not None: assert main_module_info.version == main_module.version return ComposeInfo(source_spec=source_spec, main_module=main_module_info, modules=resolved_modules)
def test_without_context(self, koji_return, should_raise): module = 'eog:master:20180821163756' spec = ModuleSpec.from_str(module) session = flexmock() (session.should_receive('getPackageID').with_args('eog').and_return( 303)) (session.should_receive('listBuilds').with_args( packageID=303, type='module', state=koji.BUILD_STATES['COMPLETE']).and_return(koji_return)) if should_raise: with pytest.raises(Exception) as e: get_koji_module_build(session, spec) assert should_raise in str(e.value) else: self.mock_get_rpms(session) get_koji_module_build(session, spec)
def render_modules_request(self): # In the Flatpak case, the profile is used to determine which packages # are installed into the Flatpak. But ODCS doesn't understand profiles, # and they won't affect the compose in any case. noprofile_modules = [ModuleSpec.from_str(m).to_str(include_profile=False) for m in self.modules] request = { 'source_type': 'module', 'source': ' '.join(noprofile_modules), 'sigkeys': self.signing_intent['keys'], } if self.module_resolve_tags: # For ODCS, modular_koji_tags has a different meaning for source_type=module # and for other source types. We use different keys for the two types. request['modular_koji_tags'] = self.module_resolve_tags if self.arches: request['arches'] = self.arches return request
def __init__(self, git_uri, git_ref, git_branch, depth): self.container = { 'tags': additional_tags or [], 'compose': { 'modules': ['mod_name:mod_stream:mod_version'] } } self.module = self.container['compose']['modules'][0] self.container_module_specs = [ModuleSpec.from_str(self.module)] self.depth = int(depth) if depth else 0 self.is_flatpak = flatpak self.flatpak_base_image = None self.flatpak_component = None self.flatpak_name = None self.git_uri = git_uri self.git_ref = git_ref self.git_branch = git_branch
def run(self): """ run the plugin """ if not is_flatpak_build(self.workflow): self.log.info('not flatpak build, skipping plugin') return flatpak_util = FlatpakUtil(workflow_config=self.workflow.conf, source_config=self.workflow.source.config, composes=None) source_spec = flatpak_util.get_flatpak_source_spec() module_info = ModuleSpec.from_str(source_spec) # Load additional information from the flatpak section flatpak_yaml = self.workflow.source.config.flatpak base_image = flatpak_yaml.get('base_image', self.default_base_image) name = flatpak_yaml.get('name', module_info.name) component = flatpak_yaml.get('component', module_info.name) # Create the dockerfile def _create_dockerfile(build_dir: BuildDir) -> List[Path]: content = DOCKERFILE_TEMPLATE.format( name=name, component=component, cleanupscript=FLATPAK_CLEANUPSCRIPT_FILENAME, includepkgs=FLATPAK_INCLUDEPKGS_FILENAME, stream=module_info.stream.replace('-', '_'), base_image=base_image, relative_repos_path=RELATIVE_REPOS_PATH, rpm_qf_args=rpm_qf_args(), yum_repos_dir=YUM_REPOS_DIR) build_dir.dockerfile_path.write_text(content, "utf-8") return [build_dir.dockerfile_path] created_files = self.workflow.build_dir.for_all_platforms_copy( _create_dockerfile) dockerfile_path = created_files[0] self.workflow.reset_dockerfile_images(str(dockerfile_path))
def _resolve_compose(self): odcs_config = get_config(self.workflow).get_odcs_config() odcs_client = get_odcs_session(self.workflow, self.odcs_fallback) self.read_configs_general() modules = self.data.get('modules', []) if not modules: raise RuntimeError('"compose" config has no modules, a module is required for Flatpaks') source_spec = modules[0] if len(modules) > 1: self.log.info("compose config contains multiple modules," "using first module %s", source_spec) module = ModuleSpec.from_str(source_spec) self.log.info("Resolving module compose for name=%s, stream=%s, version=%s", module.name, module.stream, module.version) noprofile_spec = module.to_str(include_profile=False) if self.compose_ids: if len(self.compose_ids) > 1: self.log.info("Multiple compose_ids, using first compose %d", self.compose_ids[0]) self.compose_id = self.compose_ids[0] if self.signing_intent_name is not None: signing_intent_name = self.signing_intent_name else: signing_intent_name = self.data.get('signing_intent', odcs_config.default_signing_intent) signing_intent = odcs_config.get_signing_intent_by_name(signing_intent_name) if self.compose_id is None: arches = sorted(get_platforms(self.workflow)) self.compose_id = odcs_client.start_compose(source_type='module', source=noprofile_spec, sigkeys=signing_intent['keys'], arches=arches)['id'] compose_info = odcs_client.wait_for_compose(self.compose_id) if compose_info['state_name'] != "done": raise RuntimeError("Compose cannot be retrieved, state='%s'" % compose_info['state_name']) compose_source = compose_info['source'] self.log.info("Resolved list of modules: %s", compose_source) resolved_modules = self._resolve_modules(compose_source) base_module = resolved_modules[module.name] assert base_module.stream == module.stream if module.version is not None: assert base_module.version == module.version return ComposeInfo(source_spec=source_spec, compose_id=self.compose_id, base_module=base_module, modules=resolved_modules, repo_url=compose_info['result_repo'] + '/$basearch/os/', signing_intent=signing_intent_name, signing_intent_overridden=self.signing_intent_name is not None)
def test_module_spec_to_str(self, as_str, as_str_no_profile): spec = ModuleSpec.from_str(as_str) assert spec.to_str() == as_str assert spec.to_str(include_profile=False) == as_str_no_profile