def dumps(self): """ Generate modulemd yaml based on input parameters and return it as a string """ mod_stream = Modulemd.ModuleStreamV2.new(self.name, self.stream) mod_stream.set_version(self.version) mod_stream.set_context(self.context) mod_stream.set_summary(self.summary) mod_stream.set_description(self.description) mod_stream.add_module_license(self.module_license) for pkglicense in self.licenses: mod_stream.add_content_license(pkglicense) for nevra in self.package_nevras: mod_stream.add_rpm_artifact(nevra) dependencies = Modulemd.Dependencies() for depname, depstream in self.requires.items(): dependencies.add_runtime_stream(depname, depstream) mod_stream.add_dependencies(dependencies) profile = Modulemd.Profile.new("common") for pkgname in self.package_names: profile.add_rpm(pkgname) mod_stream.add_profile(profile) index = Modulemd.ModuleIndex.new() index.add_module_stream(mod_stream) return index.dump_to_string()
def test_v2_dependencies(self): stream = Modulemd.ModuleStreamV2.new() deps = Modulemd.Dependencies() deps.add_buildtime_stream("foo", "stable") deps.set_empty_runtime_dependencies_for_module("bar") stream.add_dependencies(deps) assert len(stream.get_dependencies()) == 1 assert len(stream.get_dependencies()) == 1 assert "foo" in stream.get_dependencies()[0].get_buildtime_modules() assert "stable" in stream.get_dependencies()[0].get_buildtime_streams( "foo" ) assert "bar" in stream.get_dependencies()[0].get_runtime_modules() retrieved_deps = stream.get_dependencies() stream.clear_dependencies() self.assertEquals(len(retrieved_deps), 1) self.assertEquals(len(stream.get_dependencies()), 0) stream.add_dependencies(deps) self.assertEquals(len(stream.get_dependencies()), 1) stream.remove_dependencies(deps) self.assertEquals(len(stream.get_dependencies()), 0)
def test_v2_dependencies(self): stream = Modulemd.ModuleStreamV2.new() deps = Modulemd.Dependencies() deps.add_buildtime_stream('foo', 'stable') deps.set_empty_runtime_dependencies_for_module('bar') stream.add_dependencies(deps) assert len(stream.get_dependencies()) == 1 assert len(stream.get_dependencies()) == 1 assert 'foo' in stream.get_dependencies()[0].get_buildtime_modules() assert 'stable' in stream.get_dependencies()[0].get_buildtime_streams( 'foo') assert 'bar' in stream.get_dependencies()[0].get_runtime_modules() retrieved_deps = stream.get_dependencies() stream.clear_dependencies() self.assertEquals(len(retrieved_deps), 1) self.assertEquals(len(stream.get_dependencies()), 0) stream.add_dependencies(deps) self.assertEquals(len(stream.get_dependencies()), 1) stream.remove_dependencies(deps) self.assertEquals(len(stream.get_dependencies()), 0)
def test_copy(self): d_orig = Modulemd.Dependencies() d = d_orig.copy() assert d assert d.get_buildtime_modules() == [] with self.expect_signal(only_on_fatal_warnings=True): d.get_buildtime_streams("foobar123") assert d.get_runtime_modules() == [] with self.expect_signal(only_on_fatal_warnings=True): d.get_runtime_streams("foobar123") d_orig.add_buildtime_stream("buildmod1", "stream2") d_orig.add_buildtime_stream("buildmod1", "stream1") d_orig.set_empty_buildtime_dependencies_for_module("builddef") d_orig.add_runtime_stream("runmod1", "stream3") d_orig.add_runtime_stream("runmod1", "stream4") d_orig.set_empty_runtime_dependencies_for_module("rundef") d = d_orig.copy() assert d assert d.get_buildtime_modules() == ["builddef", "buildmod1"] assert d.get_buildtime_streams("builddef") == [] assert d.get_buildtime_streams("buildmod1") == ["stream1", "stream2"] assert d.get_runtime_modules() == ["rundef", "runmod1"] assert d.get_runtime_streams("rundef") == [] assert d.get_runtime_streams("runmod1") == ["stream3", "stream4"]
def test_v2_dependencies(self): stream = Modulemd.ModuleStreamV2.new() deps = Modulemd.Dependencies() deps.add_buildtime_stream('foo', 'stable') deps.set_empty_runtime_dependencies_for_module('bar') stream.add_dependencies(deps) assert len(stream.get_dependencies()) == 1 assert len(stream.get_dependencies()) == 1 assert 'foo' in stream.get_dependencies()[0].get_buildtime_modules() assert 'stable' in stream.get_dependencies()[0].get_buildtime_streams( 'foo') assert 'bar' in stream.get_dependencies()[0].get_runtime_modules()
def test_constructor(self): # Test that the new() function works d = Modulemd.Dependencies.new() assert d assert d.get_buildtime_modules() == [] with self.expect_signal(only_on_fatal_warnings=True): d.get_buildtime_streams("foobar123") assert d.get_runtime_modules() == [] with self.expect_signal(only_on_fatal_warnings=True): d.get_runtime_streams("foobar123") # Test that keyword name is accepted d = Modulemd.Dependencies() assert d assert d.get_buildtime_modules() == [] with self.expect_signal(only_on_fatal_warnings=True): d.get_buildtime_streams("foobar123") assert d.get_runtime_modules() == [] with self.expect_signal(only_on_fatal_warnings=True): d.get_runtime_streams("foobar123")
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, breakage, mock_flatpak): if not mock_flatpak: # Check that we actually have flatpak available have_flatpak = False try: output = subprocess.check_output(['flatpak', '--version'], universal_newlines=True) m = re.search('(\d+)\.(\d+)\.(\d+)', output) if m and (int(m.group(1)), int(m.group(2)), int( m.group(3))) >= (0, 9, 7): have_flatpak = True except (subprocess.CalledProcessError, OSError): pass if not have_flatpak: return config = CONFIGS[config_name] if mock_flatpak: (flexmock(subprocess).should_receive("check_call").replace_with( mocked_check_call)) (flexmock(subprocess).should_receive("check_output").replace_with( mocked_check_output)) workflow = DockerBuildWorkflow({ "provider": "git", "uri": "asd" }, TEST_IMAGE) setattr(workflow, 'builder', X) setattr(workflow.builder, 'tasker', docker_tasker) filesystem_dir = os.path.join(str(tmpdir), 'filesystem') os.mkdir(filesystem_dir) filesystem_contents = config['filesystem_contents'] for path, contents in filesystem_contents.items(): parts = path.split(':', 1) path = parts[0] mode = parts[1] if len(parts) == 2 else None fullpath = os.path.join(filesystem_dir, path[1:]) parent_dir = os.path.dirname(fullpath) if not os.path.isdir(parent_dir): os.makedirs(parent_dir) if contents is None: os.mkdir(fullpath) else: with open(fullpath, 'w') as f: f.write(contents) if mode is not None: os.chmod(fullpath, int(mode, 8)) if breakage == 'no_runtime': # Copy the parts of the config we are going to change config = dict(config) config['modules'] = dict(config['modules']) config['modules']['eog'] = dict(config['modules']['eog']) module_config = config['modules']['eog'] mmd = Modulemd.Module.new_from_string(module_config['metadata']) # Clear out all dependencies. Setting via the property causes a crash # https://gitlab.gnome.org/GNOME/pygobject/issues/37 # mmd.props.dependencies = [Modulemd.Dependencies()] mmd.set_dependencies([Modulemd.Dependencies()]) module_config['metadata'] = mmd.dumps() expected_exception = 'Failed to identify runtime module' else: assert breakage is None expected_exception = None filesystem_tar = os.path.join(filesystem_dir, 'tar') with open(filesystem_tar, "wb") as f: with tarfile.TarFile(fileobj=f, mode='w') as tf: for f in os.listdir(filesystem_dir): tf.add(os.path.join(filesystem_dir, f), f) export_stream = open(filesystem_tar, "rb") def stream_to_generator(s): while True: # Yield small chunks to test the StreamAdapter code better buf = s.read(100) if len(buf) == 0: return yield buf export_generator = stream_to_generator(export_stream) (flexmock( docker_tasker.d.wrapped).should_receive('create_container').with_args( workflow.image, command=["/bin/bash"]).and_return({'Id': CONTAINER_ID})) (flexmock(docker_tasker.d.wrapped).should_receive('export').with_args( CONTAINER_ID).and_return(export_generator)) (flexmock(docker_tasker.d.wrapped).should_receive( 'remove_container').with_args(CONTAINER_ID)) setup_flatpak_source_info(workflow, config) runner = PrePublishPluginsRunner(docker_tasker, workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) if expected_exception: with pytest.raises(PluginFailedException) as ex: runner.run() assert expected_exception in str(ex) else: runner.run() dir_metadata = workflow.exported_image_sequence[-2] assert dir_metadata['type'] == IMAGE_TYPE_OCI tar_metadata = workflow.exported_image_sequence[-1] assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR # Check that the expected files ended up in the flatpak if mock_flatpak: inspector = MockInspector(tmpdir, dir_metadata) else: inspector = DefaultInspector(tmpdir, dir_metadata) files = inspector.list_files() assert sorted(files) == config['expected_contents'] components = {c['name'] for c in workflow.image_components} for n in config['expected_components']: assert n in components for n in config['unexpected_components']: assert n not in components metadata_lines = inspector.cat_file('/metadata').split('\n') assert any( re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l) for l in metadata_lines) assert any( re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l) for l in metadata_lines) if config_name == 'app': # Check that the desktop file was rewritten output = inspector.cat_file( '/export/share/applications/org.gnome.eog.desktop') lines = output.split('\n') assert 'Icon=org.gnome.eog' in lines assert 'name=org.gnome.eog' in metadata_lines assert 'tags=Viewer' in metadata_lines assert 'command=eog2' in metadata_lines elif config_name == 'runtime': # runtime # Check that permissions have been normalized assert inspector.get_file_perms('/files/etc/shadow') == '-00644' assert inspector.get_file_perms('/files/bin/mount') == '-00755' assert inspector.get_file_perms('/files/share/foo') == 'd00755' assert 'name=org.fedoraproject.Platform' in metadata_lines else: # SDK assert 'name=org.fedoraproject.Sdk' in metadata_lines
def update(mod_yaml, name=None, stream=None, version=None, context=None, arch=None, summary=None, description=None, module_licenses=None, content_licenses=None, rpms_nevras=None, requires=None, buildrequires=None, api=None, filters=None, profiles=None, components=None): """ Transform a given modulemd YAML string into another, updated one. The input string remains unchanged. This function allows to modify specified modulemd attributes while leaving the rest of them as is. For structured attributes, such as `module_licenses` which value is a list, new values are not appended to a list, but the new value is used instead. For the official documentation of the modulemd YAML format and it's values, please see https://github.com/fedora-modularity/libmodulemd/blob/main/yaml_specs/modulemd_stream_v2.yaml It will allow you to better understand the parameters of this function. Args: mod_yaml (str): An input modulelmd YAML name (str): The name of the module stream (str): Module update stream name version (int): Module version, integer, cannot be negative context (str): Module context flag arch (str): Module artifact architecture summary (str): A short summary describing the module description (str): A verbose description of the module module_licenses (list): A list of module licenses content_licenses (list): A list of licenses used by the packages in the module. rpms_nevras (list): RPM artifacts shipped with this module requires (dict): Module runtime dependencies represented as a `dict` of module names as keys and list of streams as their values. buildrequires (dict): Module buildtime dependencies represented as a `dict` of module names as keys and list of streams as their values. api (list): The module's public RPM-level API represented as a list of package names. filters (list): Module component filters represented as a list of pckage names. profiles (dict): A `dict` of profile names as keys and lists of package names as their values. components (list): Functional components of the module represented as a `dict` with package names as keys and `dict`s representing the particular components as keys. The component `dict` should contain keys like `name`, `rationale`, `repository`, etc. Returns: An updated modulemd YAML represented as string """ mod_stream = _yaml2stream(mod_yaml) name = name or mod_stream.get_module_name() stream = stream or mod_stream.get_stream_name() mod_stream = _modulemd_read_packager_string(mod_yaml, name, stream) if version: mod_stream.set_version(version) if context: mod_stream.set_context(context) if arch: mod_stream.set_arch(arch) if summary: mod_stream.set_summary(summary) if description: mod_stream.set_description(description) if module_licenses: mod_stream.clear_module_licenses() for module_license in module_licenses: mod_stream.add_module_license(module_license) if content_licenses: mod_stream.clear_content_licenses() for content_license in content_licenses: mod_stream.add_content_license(content_license) if rpms_nevras: mod_stream.clear_rpm_artifacts() for nevra in rpms_nevras: mod_stream.add_rpm_artifact(nevra) if api: mod_stream.clear_rpm_api() for rpm in api: mod_stream.add_rpm_api(rpm) if filters: mod_stream.clear_rpm_filters() for rpm in filters: mod_stream.add_rpm_filter(rpm) if profiles: mod_stream.clear_profiles() for profile_name, rpms in profiles.items(): profile = Modulemd.Profile.new(profile_name) for rpm in rpms: profile.add_rpm(rpm) mod_stream.add_profile(profile) if components: mod_stream.clear_rpm_components() for component in components: component_rpm = Modulemd.ComponentRpm.new(component.pop("name")) for key, value in component.items(): component_rpm.set_property(key, value) mod_stream.add_component(component_rpm) # Updating dependencies is quite messy because AFAIK the only operations # that `libmodoulemd` allows us to do is adding a runtime/buildtime # dependencies one be one and dropping all of them at once. # We need to help ourselves a little and drop all runtime dependencies and # re-populate them with the old ones if a new ones weren't set. Similarly # for buildrequires. old_deps = Modulemd.Dependencies() # Module can contain multiple pairs of dependencies. If we want to update # both `requires` and `buildrequires` at the same time, we can drop all # current dependencies and just set a new one. If we want to update only # one of them, we are getting to an ambiguous situation, not knowing what # pair of dependencies we should update. Let's just raise an exception. if (len(mod_stream.get_dependencies()) > 1 and (requires or buildrequires) and not (requires and buildrequires)): raise AttributeError("Provided YAML contains multiple pairs of " "dependencies. It is ambiguous which one to " "update.") if mod_stream.get_dependencies(): old_deps = mod_stream.get_dependencies()[0] new_deps = Modulemd.Dependencies() if requires: for depname, depstreams in requires.items(): for depstream in depstreams: new_deps.add_runtime_stream(depname, depstream) else: for depname in old_deps.get_runtime_modules(): for depstream in old_deps.get_runtime_streams(depname): new_deps.add_runtime_stream(depname, depstream) if buildrequires: for depname, depstreams in buildrequires.items(): for depstream in depstreams: new_deps.add_buildtime_stream(depname, depstream) else: for depname in old_deps.get_buildtime_modules(): for depstream in old_deps.get_buildtime_streams(depname): new_deps.add_buildtime_stream(depname, depstream) if requires or buildrequires: mod_stream.clear_dependencies() mod_stream.add_dependencies(new_deps) idx2 = Modulemd.ModuleIndex.new() idx2.add_module_stream(mod_stream) return idx2.dump_to_string()
def test_flatpak_create_oci(tmpdir, docker_tasker, config_name, flatpak_metadata, breakage): # Check that we actually have flatpak available have_flatpak = False try: output = subprocess.check_output(['flatpak', '--version'], universal_newlines=True) m = re.search(r'(\d+)\.(\d+)\.(\d+)', output) if m and (int(m.group(1)), int(m.group(2)), int( m.group(3))) >= (0, 9, 7): have_flatpak = True except (subprocess.CalledProcessError, OSError): pytest.skip(msg='flatpak not available') if not have_flatpak: return config = CONFIGS[config_name] workflow = DockerBuildWorkflow(TEST_IMAGE, source={ "provider": "git", "uri": "asd" }) setattr(workflow, 'builder', X) X.df_path = write_docker_file(config, str(tmpdir)) setattr(workflow.builder, 'tasker', docker_tasker) make_and_store_reactor_config_map(workflow, flatpak_metadata) filesystem_dir = os.path.join(str(tmpdir), 'filesystem') os.mkdir(filesystem_dir) filesystem_contents = config['filesystem_contents'] for path, contents in filesystem_contents.items(): parts = path.split(':', 1) path = parts[0] mode = parts[1] if len(parts) == 2 else None fullpath = os.path.join(filesystem_dir, path[1:]) parent_dir = os.path.dirname(fullpath) if not os.path.isdir(parent_dir): os.makedirs(parent_dir) if contents is None: os.mkdir(fullpath) else: with open(fullpath, 'wb') as f: f.write(contents) if mode is not None: os.chmod(fullpath, int(mode, 8)) if breakage == 'no_runtime': # Copy the parts of the config we are going to change config = dict(config) config['modules'] = dict(config['modules']) config['modules']['eog'] = dict(config['modules']['eog']) module_config = config['modules']['eog'] mmd = Modulemd.ModuleStream.read_string(module_config['metadata'], strict=True) mmd.clear_dependencies() mmd.add_dependencies(Modulemd.Dependencies()) mmd_index = Modulemd.ModuleIndex.new() mmd_index.add_module_stream(mmd) module_config['metadata'] = mmd_index.dump_to_string() expected_exception = 'Failed to identify runtime module' else: assert breakage is None expected_exception = None filesystem_tar = os.path.join(filesystem_dir, 'tar') with open(filesystem_tar, "wb") as f: with tarfile.TarFile(fileobj=f, mode='w') as tf: for f in os.listdir(filesystem_dir): tf.add(os.path.join(filesystem_dir, f), f) export_stream = open(filesystem_tar, "rb") def stream_to_generator(s): while True: # Yield small chunks to test the StreamAdapter code better buf = s.read(100) if len(buf) == 0: return yield buf export_generator = stream_to_generator(export_stream) (flexmock( docker_tasker.tasker).should_receive('export_container').with_args( CONTAINER_ID).and_return(export_generator)) (flexmock(docker_tasker.tasker.d.wrapped).should_receive( 'create_container').with_args(workflow.image, command=["/bin/bash"]).and_return( {'Id': CONTAINER_ID})) (flexmock(docker_tasker.tasker.d.wrapped).should_receive( 'remove_container').with_args(CONTAINER_ID, force=False)) setup_flatpak_source_info(workflow, config) runner = PrePublishPluginsRunner(docker_tasker, workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) if expected_exception: with pytest.raises(PluginFailedException) as ex: runner.run() assert expected_exception in str(ex.value) else: runner.run() dir_metadata = workflow.exported_image_sequence[-2] assert dir_metadata['type'] == IMAGE_TYPE_OCI tar_metadata = workflow.exported_image_sequence[-1] assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR # Check that the correct labels and annotations were written labels, annotations = load_labels_and_annotations(dir_metadata) if config_name == 'app': assert labels['name'] == 'eog' assert labels['com.redhat.component'] == 'eog' assert labels['version'] == 'f28' assert labels['release'] == '20170629213428' elif config_name == 'runtime': # runtime assert labels['name'] == 'flatpak-runtime' assert labels[ 'com.redhat.component'] == 'flatpak-runtime-container' assert labels['version'] == 'f28' assert labels['release'] == '20170701152209' else: assert labels['name'] == 'flatpak-sdk' assert labels['com.redhat.component'] == 'flatpak-sdk-container' assert labels['version'] == 'f28' assert labels['release'] == '20170701152209' if flatpak_metadata == 'annotations': assert annotations.get( 'org.flatpak.ref') == config['expected_ref_name'] assert 'org.flatpak.ref' not in labels elif flatpak_metadata == 'labels': assert 'org.flatpak.ref' not in annotations assert labels.get('org.flatpak.ref') == config['expected_ref_name'] elif flatpak_metadata == 'both': assert annotations.get( 'org.flatpak.ref') == config['expected_ref_name'] assert labels.get('org.flatpak.ref') == config['expected_ref_name'] # Check that the expected files ended up in the flatpak # Flatpak versions before 1.6 require annotations to be present, since we don't # require such a new Flatpak, skip remaining checks in the label-only case if flatpak_metadata == 'labels': return inspector = DefaultInspector(tmpdir, dir_metadata) files = inspector.list_files() assert sorted(files) == config['expected_contents'] components = {c['name'] for c in workflow.image_components} # noqa:E501; pylint: disable=not-an-iterable for n in config['expected_components']: assert n in components for n in config['unexpected_components']: assert n not in components metadata_lines = inspector.cat_file('/metadata').split('\n') assert any( re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l) for l in metadata_lines) assert any( re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l) for l in metadata_lines) if config_name == 'app': # Check that the desktop file was rewritten output = inspector.cat_file( '/export/share/applications/org.gnome.eog.desktop') lines = output.split('\n') assert 'Icon=org.gnome.eog' in lines assert 'name=org.gnome.eog' in metadata_lines assert 'tags=Viewer' in metadata_lines assert 'command=eog2' in metadata_lines elif config_name == 'runtime': # runtime # Check that permissions have been normalized assert inspector.get_file_perms('/files/etc/shadow') == '-00644' assert inspector.get_file_perms('/files/bin/mount') == '-00755' assert inspector.get_file_perms('/files/share/foo') == 'd00755' assert 'name=org.fedoraproject.Platform' in metadata_lines else: # SDK assert 'name=org.fedoraproject.Sdk' in metadata_lines
mmd.set_description(mmd.get_summary()) artifacts = Modulemd.SimpleSet() for rpm in rpms: #mmd.add_module_component(rpm.rsplit("-", 2)[0], "") artifacts.add(rpm[:-4]) mmd.set_rpm_artifacts(artifacts) for profile_name in profiles: profile = Modulemd.Profile() profile.set_name(profile_name) profile_rpms = Modulemd.SimpleSet() profile_rpms.set(profiles[profile_name]["rpms"]) profile.set_rpms(profile_rpms) mmd.add_profile(profile) if name == "httpd": dependencies = Modulemd.Dependencies() if stream == "2.4": dependencies.add_requires_single("base-runtime", "f26") elif stream == "2.2": dependencies.add_requires("base-runtime", []) mmd.add_dependencies(dependencies) # iterate through all deps and create context hash in a repeatable manner context_hash = hashlib.sha256() for dependencies in mmd.peek_dependencies(): for dep_name, dep_streams in dependencies.peek_requires().items(): if dep_streams: for dep_stream in dep_streams.get(): context_hash.update("%s:%s" % (dep_name, dep_stream)) else: context_hash.update(dep_name)
def test_flatpak_create_oci(workflow, config_name, flatpak_metadata, breakage): # Check that we actually have flatpak available have_flatpak = False try: output = subprocess.check_output(['flatpak', '--version'], universal_newlines=True) m = re.search(r'(\d+)\.(\d+)\.(\d+)', output) if m and (int(m.group(1)), int(m.group(2)), int( m.group(3))) >= (0, 9, 7): have_flatpak = True except (subprocess.CalledProcessError, OSError): pytest.skip(msg='flatpak not available') if not have_flatpak: return # Check if we have skopeo try: subprocess.check_output(['skopeo', '--version']) except (subprocess.CalledProcessError, OSError): pytest.skip(msg='skopeo not available') config = CONFIGS[config_name] platforms = ['x86_64', 'aarch64', 's390x', 'ppc64le'] workflow.user_params['flatpak'] = True write_docker_file(config, workflow.source.path) workflow.build_dir.init_build_dirs(platforms, workflow.source) mock_extract_filesystem_call = functools.partial(mock_extract_filesystem, config) (flexmock(ImageUtil).should_receive( 'extract_filesystem_layer').replace_with(mock_extract_filesystem_call)) make_and_store_reactor_config_map(workflow, flatpak_metadata) if breakage == 'no_runtime': # Copy the parts of the config we are going to change config = dict(config) config['modules'] = dict(config['modules']) config['modules']['eog'] = dict(config['modules']['eog']) module_config = config['modules']['eog'] mmd = Modulemd.ModuleStream.read_string(module_config['metadata'], strict=True) mmd.clear_dependencies() mmd.add_dependencies(Modulemd.Dependencies()) mmd_index = Modulemd.ModuleIndex.new() mmd_index.add_module_stream(mmd) module_config['metadata'] = mmd_index.dump_to_string() expected_exception = 'Failed to identify runtime module' else: assert breakage is None expected_exception = None runner = PostBuildPluginsRunner(workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) setup_flatpak_composes(workflow) source = setup_flatpak_source_info(config) (flexmock(FlatpakUtil).should_receive( 'get_flatpak_source_info').and_return(source)) if expected_exception: with pytest.raises(PluginFailedException) as ex: runner.run() assert expected_exception in str(ex.value) else: builder = FlatpakBuilder(source, workflow.build_dir.any_platform.path, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output, flatpak_metadata=FLATPAK_METADATA_ANNOTATIONS) with NamedTemporaryFile(dir=workflow.build_dir.any_platform.path) as f: f.write( config['filesystem_contents']['/var/tmp/flatpak-build.rpm_qf']) f.flush() expected_components = builder.get_components(f.name) results = runner.run() x86_64_results = results[FlatpakCreateOciPlugin.key][platforms[0]] dir_metadata = x86_64_results['metadata'] components = x86_64_results['components'] assert components == expected_components assert dir_metadata['type'] == IMAGE_TYPE_OCI # Check that the correct labels and annotations were written labels, annotations = load_labels_and_annotations(dir_metadata) if config_name == 'app': assert labels['name'] == 'eog' assert labels['com.redhat.component'] == 'eog' assert labels['version'] == 'f28' assert labels['release'] == '20170629213428' elif config_name == 'runtime': # runtime assert labels['name'] == 'flatpak-runtime' assert labels[ 'com.redhat.component'] == 'flatpak-runtime-container' assert labels['version'] == 'f28' assert labels['release'] == '20170701152209' else: assert labels['name'] == 'flatpak-sdk' assert labels['com.redhat.component'] == 'flatpak-sdk-container' assert labels['version'] == 'f28' assert labels['release'] == '20170701152209' if flatpak_metadata == 'annotations': assert annotations.get( 'org.flatpak.ref') == config['expected_ref_name'] assert 'org.flatpak.ref' not in labels elif flatpak_metadata == 'labels': assert 'org.flatpak.ref' not in annotations assert labels.get('org.flatpak.ref') == config['expected_ref_name'] elif flatpak_metadata == 'both': assert annotations.get( 'org.flatpak.ref') == config['expected_ref_name'] assert labels.get('org.flatpak.ref') == config['expected_ref_name'] # Check that the expected files ended up in the flatpak # Flatpak versions before 1.6 require annotations to be present, and Flatpak # versions 1.6 and later require labels to be present. Skip the remaining # checks unless we have both annotations and labels. if flatpak_metadata != 'both': return inspector = DefaultInspector(str(workflow.build_dir.any_platform.path), dir_metadata) files = inspector.list_files() assert sorted(files) == config['expected_contents'] metadata_lines = inspector.cat_file('/metadata').split('\n') assert any( re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', line) for line in metadata_lines) assert any( re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', line) for line in metadata_lines) if config_name == 'app': # Check that the desktop file was rewritten output = inspector.cat_file( '/export/share/applications/org.gnome.eog.desktop') lines = output.split('\n') assert 'Icon=org.gnome.eog' in lines assert 'name=org.gnome.eog' in metadata_lines assert 'tags=Viewer' in metadata_lines assert 'command=eog2' in metadata_lines elif config_name == 'runtime': # runtime # Check that permissions have been normalized assert inspector.get_file_perms('/files/etc/shadow') == '-00644' assert inspector.get_file_perms('/files/bin/mount') == '-00755' assert inspector.get_file_perms('/files/share/foo') == 'd00755' assert 'name=org.fedoraproject.Platform' in metadata_lines else: # SDK assert 'name=org.fedoraproject.Sdk' in metadata_lines