def _load_compose_info(self): source_spec = get_flatpak_source_spec(self.workflow) assert source_spec is not None # flatpak_create_dockerfile must be run first main_module = ModuleSpec.from_str(source_spec) resolve_comp_result = self.workflow.prebuild_results.get( PLUGIN_RESOLVE_COMPOSES_KEY) if resolve_comp_result: # In the orchestrator, we can get the compose info directly from # the resolve_composes plugin composes = resolve_comp_result['composes'] else: # But in a worker, resolve_composes doesn't run, so we have # to load the compose info ourselves assert self.compose_ids composes = self._load_composes() for compose_info in composes: if compose_info['source_type'] != SOURCE_TYPE_MODULE: continue modules = [ ModuleSpec.from_str(s) for s in compose_info['source'].split() ] for module in modules: if module.name == main_module.name and module.stream == main_module.stream: set_flatpak_compose_info(self.workflow, self._build_compose_info(modules)) return self.log.debug('Compose info: %s', composes) raise RuntimeError("Can't find main module %s in compose result" % source_spec)
def _build_compose_info(self, modules): source_spec = get_flatpak_source_spec(self.workflow) assert source_spec is not None # flatpak_create_dockerfile must be run first main_module = ModuleSpec.from_str(source_spec) resolved_modules = self._resolve_modules(modules) main_module_info = resolved_modules[main_module.name] assert main_module_info.stream == main_module.stream if main_module.version is not None: assert main_module_info.version == main_module.version return ComposeInfo(source_spec=source_spec, main_module=main_module_info, modules=resolved_modules)
def test_flatpak_create_dockerfile(tmpdir, docker_tasker, user_params, config_name, override_base_image, breakage): config = CONFIGS[config_name] modules = None if breakage == 'no_modules': modules = [] expected_exception = "a module is required for Flatpaks" elif breakage == 'multiple_modules': modules = ['eog:f28:20170629213428', 'flatpak-common:f28:123456'] expected_exception = None # Just a warning else: assert breakage is None expected_exception = None data = yaml.safe_load(config['container_yaml']) if override_base_image is not None: data['flatpak']['base_image'] = override_base_image if modules is not None: data['compose']['modules'] = modules container_yaml = yaml.dump(data) workflow = mock_workflow(tmpdir, container_yaml) source_spec = get_flatpak_source_spec(workflow) assert source_spec is None base_image = "registry.fedoraproject.org/fedora:latest" args = { 'base_image': base_image, } workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig({'version': 1, 'flatpak': {'base_image': base_image}, 'source_registry': {'url': 'source_registry'}}) } runner = PreBuildPluginsRunner( docker_tasker, workflow, [{ 'name': FlatpakCreateDockerfilePlugin.key, 'args': args }] ) if expected_exception: with pytest.raises(PluginFailedException) as ex: runner.run() assert expected_exception in str(ex.value) else: runner.run() assert os.path.exists(workflow.builder.df_path) with open(workflow.builder.df_path) as f: df = f.read() expect_base_image = override_base_image if override_base_image else base_image assert "FROM " + expect_base_image in df assert 'name="{}"'.format(config['name']) in df assert 'com.redhat.component="{}"'.format(config['component']) in df assert "RUN rm -f /etc/yum.repos.d/*" in df assert "ADD atomic-reactor-repos/* /etc/yum.repos.d/" in df source_spec = get_flatpak_source_spec(workflow) assert source_spec == config['source_spec']