def run(self): self.source = get_flatpak_source_info(self.workflow) if self.source is None: raise RuntimeError( "flatpak_create_dockerfile must be run before flatpak_create_oci" ) self.builder = FlatpakBuilder(self.source, self.workflow.source.workdir, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output) tarred_filesystem, manifest = self._export_filesystem() self.log.info('filesystem tarfile written to %s', tarred_filesystem) self.log.info('manifest written to %s', manifest) image_components = self.builder.get_components(manifest) self.workflow.image_components = image_components ref_name, outfile, tarred_outfile = self.builder.build_container( tarred_filesystem) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI image is available as %s', outfile) metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI tarfile is available as %s', tarred_outfile)
def run(self): """ run the plugin """ if not is_flatpak_build(self.workflow): self.log.info('not flatpak build, skipping plugin') return resolve_comp_result = self.workflow.data.prebuild_results.get( PLUGIN_RESOLVE_COMPOSES_KEY) flatpak_util = FlatpakUtil(workflow_config=self.workflow.conf, source_config=self.workflow.source.config, composes=resolve_comp_result['composes']) compose_info = flatpak_util.get_flatpak_compose_info() source = flatpak_util.get_flatpak_source_info() builder = FlatpakBuilder(source, None, None) builder.precheck() flatpak_update = functools.partial(self.update_dockerfile, builder, compose_info) self.workflow.build_dir.for_each_platform(flatpak_update) create_files = functools.partial( self.create_includepkgs_file_and_cleanupscript, builder) self.workflow.build_dir.for_all_platforms_copy(create_files)
def run(self): source = get_flatpak_source_info(self.workflow) if source is None: raise RuntimeError( "flatpak_create_dockerfile must be run before flatpak_create_oci" ) self.builder = FlatpakBuilder(source, self.workflow.source.workdir, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output, flatpak_metadata=self.flatpak_metadata) df_labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels self.builder.add_labels(df_labels) tarred_filesystem, manifest = self._export_filesystem() self.log.info('filesystem tarfile written to %s', tarred_filesystem) self.log.info('manifest written to %s', manifest) image_components = self.builder.get_components(manifest) self.workflow.image_components = image_components ref_name, outfile, tarred_outfile = self.builder.build_container( tarred_filesystem) self.log.info('Marking filesystem image "%s" for removal', self.workflow.builder.image_id) defer_removal(self.workflow, self.workflow.builder.image_id) image_id = self._get_oci_image_id(outfile) self.log.info('New OCI image ID is %s', image_id) self.workflow.builder.image_id = image_id labels = Labels(df_labels) _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME) _, image_version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION) _, image_release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE) name = '{}-{}'.format(self.key, image_name) tag = '{}-{}'.format(image_version, image_release) # The OCI id is tracked by the builder. The image will be removed in the exit phase # No need to mark it for removal after pushing to the local storage self._copy_oci_to_local_storage(outfile, name, tag) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI image is available as %s', outfile) metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI tarfile is available as %s', tarred_outfile)
def run(self): self.source = get_flatpak_source_info(self.workflow) if self.source is None: raise RuntimeError("flatpak_create_dockerfile must be run before flatpak_create_oci") self.builder = FlatpakBuilder(self.source, self.workflow.source.workdir, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output) tarred_filesystem, manifest = self._export_filesystem() self.log.info('filesystem tarfile written to %s', tarred_filesystem) self.log.info('manifest written to %s', manifest) image_components = self.builder.get_components(manifest) self.workflow.image_components = image_components ref_name, outfile, tarred_outfile = self.builder.build_container(tarred_filesystem) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI image is available as %s', outfile) metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI tarfile is available as %s', tarred_outfile)
def run(self): """ run the plugin """ if not is_flatpak_build(self.workflow): self.log.info('not flatpak build, skipping plugin') return self._load_compose_info() compose_info = get_flatpak_compose_info(self.workflow) self._load_source() source = get_flatpak_source_info(self.workflow) builder = FlatpakBuilder(source, None, None) builder.precheck() # Update the dockerfile # We need to enable all the modules other than the platform pseudo-module enable_modules_str = ' '.join(builder.get_enable_modules()) install_packages_str = ' '.join(builder.get_install_packages()) replacements = { '@ENABLE_MODULES@': enable_modules_str, '@INSTALL_PACKAGES@': install_packages_str, '@RELEASE@': compose_info.main_module.version, } dockerfile = df_parser(self.workflow.builder.df_path, workflow=self.workflow) content = dockerfile.content # Perform the substitutions; simple approach - should be efficient enough for old, new in replacements.items(): content = content.replace(old, new) dockerfile.content = content # Create a file describing which packages from the base yum repositories are included includepkgs = builder.get_includepkgs() includepkgs_path = os.path.join(self.workflow.builder.df_dir, FLATPAK_INCLUDEPKGS_FILENAME) with open(includepkgs_path, 'w') as f: f.write('includepkgs = ' + ','.join(includepkgs) + '\n') # Create the cleanup script cleanupscript = os.path.join(self.workflow.builder.df_dir, FLATPAK_CLEANUPSCRIPT_FILENAME) with open(cleanupscript, 'w') as f: f.write(builder.get_cleanup_script()) os.chmod(cleanupscript, 0o0500)
def build_flatpak_image(self, source, build_dir: BuildDir) -> Dict[str, Any]: builder = FlatpakBuilder(source, build_dir.path, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output, flatpak_metadata=self.flatpak_metadata) df_labels = build_dir.dockerfile_with_parent_env( self.workflow.imageutil.base_image_inspect() ).labels builder.add_labels(df_labels) tmp_dir = tempfile.mkdtemp(dir=build_dir.path) image_filesystem = self.workflow.imageutil.extract_filesystem_layer( str(build_dir.exported_squashed_image), str(tmp_dir)) build_dir.exported_squashed_image.unlink() filesystem_path = os.path.join(tmp_dir, image_filesystem) with open(filesystem_path, 'rb') as f: # this part is 'not ideal' but this function seems to be a prerequisite # for building flatpak image since it does the setup for it flatpak_filesystem, flatpak_manifest = builder._export_from_stream(f) os.remove(filesystem_path) self.log.info('filesystem tarfile written to %s', flatpak_filesystem) image_rpm_components = builder.get_components(flatpak_manifest) ref_name, outfile, outfile_tarred = builder.build_container(flatpak_filesystem) os.remove(outfile_tarred) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name cmd = ['skopeo', 'copy', 'oci:{path}:{ref_name}'.format(**metadata), '--format=v2s2', 'docker-archive:{}'.format(str(build_dir.exported_squashed_image))] try: retries.run_cmd(cmd) except subprocess.CalledProcessError as e: self.log.error("skopeo copy failed with output:\n%s", e.output) raise RuntimeError("skopeo copy failed with output:\n{}".format(e.output)) from e self.log.info('OCI image is available as %s', outfile) shutil.rmtree(tmp_dir) self.workflow.data.image_components[build_dir.platform] = image_rpm_components return metadata
def build_flatpak_image(self, source, build_dir: BuildDir) -> Dict[str, Any]: builder = FlatpakBuilder(source, build_dir.path, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output, flatpak_metadata=self.flatpak_metadata) df_labels = build_dir.dockerfile_with_parent_env( self.workflow.imageutil.base_image_inspect()).labels builder.add_labels(df_labels) tmp_dir = tempfile.mkdtemp(dir=build_dir.path) image_filesystem = self.workflow.imageutil.extract_filesystem_layer( str(build_dir.exported_squashed_image), str(tmp_dir)) filesystem_path = os.path.join(tmp_dir, image_filesystem) with open(filesystem_path, 'rb') as f: # this part is 'not ideal' but this function seems to be a prerequisite # for building flatpak image since it does the setup for it flatpak_filesystem, flatpak_manifest = builder._export_from_stream( f) self.log.info('filesystem tarfile written to %s', flatpak_filesystem) image_rpm_components = builder.get_components(flatpak_manifest) ref_name, outfile, _ = builder.build_container(flatpak_filesystem) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name self.log.info('OCI image is available as %s', outfile) shutil.rmtree(tmp_dir) return {'metadata': metadata, 'components': image_rpm_components}
def run(self): """ run the plugin """ source = self._load_source() set_flatpak_source_info(self.workflow, source) builder = FlatpakBuilder(source, None, None) builder.precheck() # Create the dockerfile module_info = source.base_module # We need to enable all the modules other than the platform pseudo-module modules_str = ' '.join(builder.get_enable_modules()) install_packages_str = ' '.join(builder.get_install_packages()) name = source.flatpak_yaml.get('name', module_info.name) component = source.flatpak_yaml.get('component', module_info.name) df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME) with open(df_path, 'w') as fp: fp.write(DOCKERFILE_TEMPLATE.format(name=name, component=component, stream=module_info.stream.replace('-', '_'), version=module_info.version, base_image=self.base_image, modules=modules_str, packages=install_packages_str, rpm_qf_args=rpm_qf_args())) self.workflow.builder.set_df_path(df_path) includepkgs = builder.get_includepkgs() includepkgs_path = os.path.join(self.workflow.builder.df_dir, 'atomic-reactor-includepkgs') with open(includepkgs_path, 'w') as f: f.write('includepkgs = ' + ','.join(includepkgs) + '\n') # Create the cleanup script cleanupscript = os.path.join(self.workflow.builder.df_dir, "cleanup.sh") with open(cleanupscript, 'w') as f: f.write(builder.get_cleanup_script()) os.chmod(cleanupscript, 0o0755) # Add a yum-repository pointing to the compose repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format( name=module_info.name, stream=module_info.stream, version=module_info.version) compose_info = get_compose_info(self.workflow) repo = { 'name': repo_name, 'baseurl': compose_info.repo_url, 'enabled': 1, 'gpgcheck': 0, } path = YumRepo(os.path.join(YUM_REPOS_DIR, repo_name)).dst_filename self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)
class FlatpakCreateOciPlugin(PrePublishPlugin): key = 'flatpak_create_oci' is_allowed_to_fail = False def __init__(self, tasker, workflow): """ :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance """ super(FlatpakCreateOciPlugin, self).__init__(tasker, workflow) def _export_container(self, container_id): export_generator = self.tasker.d.export(container_id) export_stream = StreamAdapter(export_generator) outfile, manifestfile = self.builder._export_from_stream(export_stream) return outfile, manifestfile def _export_filesystem(self): image = self.workflow.image self.log.info("Creating temporary docker container") # The command here isn't used, since we only use the container for export, # but (in some circumstances) the docker daemon will error out if no # command is specified. container_dict = self.tasker.d.create_container(image, command=["/bin/bash"]) container_id = container_dict['Id'] try: return self._export_container(container_id) finally: self.log.info("Cleaning up docker container") self.tasker.d.remove_container(container_id) def run(self): self.source = get_flatpak_source_info(self.workflow) if self.source is None: raise RuntimeError( "flatpak_create_dockerfile must be run before flatpak_create_oci" ) self.builder = FlatpakBuilder(self.source, self.workflow.source.workdir, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output) tarred_filesystem, manifest = self._export_filesystem() self.log.info('filesystem tarfile written to %s', tarred_filesystem) self.log.info('manifest written to %s', manifest) image_components = self.builder.get_components(manifest) self.workflow.image_components = image_components ref_name, outfile, tarred_outfile = self.builder.build_container( tarred_filesystem) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI image is available as %s', outfile) metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI tarfile is available as %s', tarred_outfile)
def run(self): """ run the plugin """ source = self._load_source() set_flatpak_source_info(self.workflow, source) builder = FlatpakBuilder(source, None, None) builder.precheck() # Create the dockerfile module_info = source.base_module # We need to enable all the modules other than the platform pseudo-module modules_str = ' '.join(builder.get_enable_modules()) install_packages_str = ' '.join(builder.get_install_packages()) df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME) with open(df_path, 'w') as fp: fp.write( DOCKERFILE_TEMPLATE.format(name=module_info.name, stream=module_info.stream, version=module_info.version, base_image=self.base_image, modules=modules_str, packages=install_packages_str, rpm_qf_args=rpm_qf_args())) self.workflow.builder.set_df_path(df_path) includepkgs = builder.get_includepkgs() includepkgs_path = os.path.join(self.workflow.builder.df_dir, 'atomic-reactor-includepkgs') with open(includepkgs_path, 'w') as f: f.write('includepkgs = ' + ','.join(includepkgs) + '\n') # Create the cleanup script cleanupscript = os.path.join(self.workflow.builder.df_dir, "cleanup.sh") with open(cleanupscript, 'w') as f: f.write(builder.get_cleanup_script()) os.chmod(cleanupscript, 0o0755) # Add a yum-repository pointing to the compose repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format( name=module_info.name, stream=module_info.stream, version=module_info.version) compose_info = get_compose_info(self.workflow) repo = { 'name': repo_name, 'baseurl': compose_info.repo_url, 'enabled': 1, 'gpgcheck': 0, } path = YumRepo(os.path.join(YUM_REPOS_DIR, repo_name)).dst_filename self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False) override_build_kwarg(self.workflow, 'module_compose_id', compose_info.compose_id)
class FlatpakCreateOciPlugin(PrePublishPlugin): key = 'flatpak_create_oci' is_allowed_to_fail = False def __init__(self, tasker, workflow): """ :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance """ super(FlatpakCreateOciPlugin, self).__init__(tasker, workflow) def _export_container(self, container_id): export_generator = self.tasker.d.export(container_id) export_stream = StreamAdapter(export_generator) outfile, manifestfile = self.builder._export_from_stream(export_stream) return outfile, manifestfile def _export_filesystem(self): image = self.workflow.image self.log.info("Creating temporary docker container") # The command here isn't used, since we only use the container for export, # but (in some circumstances) the docker daemon will error out if no # command is specified. container_dict = self.tasker.d.create_container(image, command=["/bin/bash"]) container_id = container_dict['Id'] try: return self._export_container(container_id) finally: self.log.info("Cleaning up docker container") self.tasker.d.remove_container(container_id) def run(self): self.source = get_flatpak_source_info(self.workflow) if self.source is None: raise RuntimeError("flatpak_create_dockerfile must be run before flatpak_create_oci") self.builder = FlatpakBuilder(self.source, self.workflow.source.workdir, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output) tarred_filesystem, manifest = self._export_filesystem() self.log.info('filesystem tarfile written to %s', tarred_filesystem) self.log.info('manifest written to %s', manifest) image_components = self.builder.get_components(manifest) self.workflow.image_components = image_components ref_name, outfile, tarred_outfile = self.builder.build_container(tarred_filesystem) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI image is available as %s', outfile) metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI tarfile is available as %s', tarred_outfile)
class FlatpakCreateOciPlugin(PrePublishPlugin): key = 'flatpak_create_oci' is_allowed_to_fail = False def __init__(self, tasker, workflow): """ :param tasker: ContainerTasker instance :param workflow: DockerBuildWorkflow instance """ super(FlatpakCreateOciPlugin, self).__init__(tasker, workflow) self.builder = None self.flatpak_metadata = get_flatpak_metadata( workflow, FLATPAK_METADATA_ANNOTATIONS) def _export_container(self, container_id): export_generator = self.tasker.export_container(container_id) export_stream = StreamAdapter(export_generator) outfile, manifestfile = self.builder._export_from_stream(export_stream) return outfile, manifestfile def _export_filesystem(self): image = self.workflow.image self.log.info("Creating temporary docker container") # The command here isn't used, since we only use the container for export, # but (in some circumstances) the docker daemon will error out if no # command is specified. container_dict = self.tasker.create_container(image, command=["/bin/bash"]) container_id = container_dict['Id'] try: return self._export_container(container_id) finally: self.log.info("Cleaning up docker container") self.tasker.remove_container(container_id) def _get_oci_image_id(self, oci_path): cmd = ['skopeo', 'inspect', '--raw', 'oci:{}'.format(oci_path)] raw_manifest = subprocess.check_output(cmd, stderr=subprocess.STDOUT) oci_image_manifest = json.loads(raw_manifest) return oci_image_manifest['config']['digest'] def _copy_oci_to_local_storage(self, oci_path, name, tag): """Copy OCI image to internal container storage The internal storage to copy the image to is defined by the workflow. :param oci_path: str, path to OCI directory :param name: str, name to be given to the image in the internal storage :param tag: str, tag to apply to name in the internal storage """ skopeo_copy_dst = '{}:{}:{}'.format(self.workflow.storage_transport, name, tag) cmd = ['skopeo', 'copy', 'oci:{}'.format(oci_path), skopeo_copy_dst] self.log.info( "Copying built image to internal container image storage as %s:%s", name, tag) self.log.info("Calling: %s", ' '.join(cmd)) try: subprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: self.log.error("image copy failed with output:\n%s", e.output) raise def run(self): source = get_flatpak_source_info(self.workflow) if source is None: raise RuntimeError( "flatpak_create_dockerfile must be run before flatpak_create_oci" ) self.builder = FlatpakBuilder(source, self.workflow.source.workdir, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output, flatpak_metadata=self.flatpak_metadata) df_labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow).labels self.builder.add_labels(df_labels) tarred_filesystem, manifest = self._export_filesystem() self.log.info('filesystem tarfile written to %s', tarred_filesystem) self.log.info('manifest written to %s', manifest) image_components = self.builder.get_components(manifest) self.workflow.image_components = image_components ref_name, outfile, tarred_outfile = self.builder.build_container( tarred_filesystem) self.log.info('Marking filesystem image "%s" for removal', self.workflow.builder.image_id) defer_removal(self.workflow, self.workflow.builder.image_id) image_id = self._get_oci_image_id(outfile) self.log.info('New OCI image ID is %s', image_id) self.workflow.builder.image_id = image_id labels = Labels(df_labels) _, image_name = labels.get_name_and_value(Labels.LABEL_TYPE_NAME) _, image_version = labels.get_name_and_value(Labels.LABEL_TYPE_VERSION) _, image_release = labels.get_name_and_value(Labels.LABEL_TYPE_RELEASE) name = '{}-{}'.format(self.key, image_name) tag = '{}-{}'.format(image_version, image_release) # The OCI id is tracked by the builder. The image will be removed in the exit phase # No need to mark it for removal after pushing to the local storage self._copy_oci_to_local_storage(outfile, name, tag) metadata = get_exported_image_metadata(outfile, IMAGE_TYPE_OCI) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI image is available as %s', outfile) metadata = get_exported_image_metadata(tarred_outfile, IMAGE_TYPE_OCI_TAR) metadata['ref_name'] = ref_name self.workflow.exported_image_sequence.append(metadata) self.log.info('OCI tarfile is available as %s', tarred_outfile)
def test_flatpak_create_oci(workflow, config_name, flatpak_metadata, breakage): # Check that we actually have flatpak available have_flatpak = False try: output = subprocess.check_output(['flatpak', '--version'], universal_newlines=True) m = re.search(r'(\d+)\.(\d+)\.(\d+)', output) if m and (int(m.group(1)), int(m.group(2)), int( m.group(3))) >= (0, 9, 7): have_flatpak = True except (subprocess.CalledProcessError, OSError): pytest.skip(msg='flatpak not available') if not have_flatpak: return # Check if we have skopeo try: subprocess.check_output(['skopeo', '--version']) except (subprocess.CalledProcessError, OSError): pytest.skip(msg='skopeo not available') config = CONFIGS[config_name] platforms = ['x86_64', 'aarch64', 's390x', 'ppc64le'] workflow.user_params['flatpak'] = True write_docker_file(config, workflow.source.path) workflow.build_dir.init_build_dirs(platforms, workflow.source) mock_extract_filesystem_call = functools.partial(mock_extract_filesystem, config) (flexmock(ImageUtil).should_receive( 'extract_filesystem_layer').replace_with(mock_extract_filesystem_call)) make_and_store_reactor_config_map(workflow, flatpak_metadata) if breakage == 'no_runtime': # Copy the parts of the config we are going to change config = dict(config) config['modules'] = dict(config['modules']) config['modules']['eog'] = dict(config['modules']['eog']) module_config = config['modules']['eog'] mmd = Modulemd.ModuleStream.read_string(module_config['metadata'], strict=True) mmd.clear_dependencies() mmd.add_dependencies(Modulemd.Dependencies()) mmd_index = Modulemd.ModuleIndex.new() mmd_index.add_module_stream(mmd) module_config['metadata'] = mmd_index.dump_to_string() expected_exception = 'Failed to identify runtime module' else: assert breakage is None expected_exception = None runner = PostBuildPluginsRunner(workflow, [{ 'name': FlatpakCreateOciPlugin.key, 'args': {} }]) setup_flatpak_composes(workflow) source = setup_flatpak_source_info(config) (flexmock(FlatpakUtil).should_receive( 'get_flatpak_source_info').and_return(source)) if expected_exception: with pytest.raises(PluginFailedException) as ex: runner.run() assert expected_exception in str(ex.value) else: builder = FlatpakBuilder(source, workflow.build_dir.any_platform.path, 'var/tmp/flatpak-build', parse_manifest=parse_rpm_output, flatpak_metadata=FLATPAK_METADATA_ANNOTATIONS) with NamedTemporaryFile(dir=workflow.build_dir.any_platform.path) as f: f.write( config['filesystem_contents']['/var/tmp/flatpak-build.rpm_qf']) f.flush() expected_components = builder.get_components(f.name) results = runner.run() x86_64_results = results[FlatpakCreateOciPlugin.key][platforms[0]] dir_metadata = x86_64_results['metadata'] components = x86_64_results['components'] assert components == expected_components assert dir_metadata['type'] == IMAGE_TYPE_OCI # Check that the correct labels and annotations were written labels, annotations = load_labels_and_annotations(dir_metadata) if config_name == 'app': assert labels['name'] == 'eog' assert labels['com.redhat.component'] == 'eog' assert labels['version'] == 'f28' assert labels['release'] == '20170629213428' elif config_name == 'runtime': # runtime assert labels['name'] == 'flatpak-runtime' assert labels[ 'com.redhat.component'] == 'flatpak-runtime-container' assert labels['version'] == 'f28' assert labels['release'] == '20170701152209' else: assert labels['name'] == 'flatpak-sdk' assert labels['com.redhat.component'] == 'flatpak-sdk-container' assert labels['version'] == 'f28' assert labels['release'] == '20170701152209' if flatpak_metadata == 'annotations': assert annotations.get( 'org.flatpak.ref') == config['expected_ref_name'] assert 'org.flatpak.ref' not in labels elif flatpak_metadata == 'labels': assert 'org.flatpak.ref' not in annotations assert labels.get('org.flatpak.ref') == config['expected_ref_name'] elif flatpak_metadata == 'both': assert annotations.get( 'org.flatpak.ref') == config['expected_ref_name'] assert labels.get('org.flatpak.ref') == config['expected_ref_name'] # Check that the expected files ended up in the flatpak # Flatpak versions before 1.6 require annotations to be present, and Flatpak # versions 1.6 and later require labels to be present. Skip the remaining # checks unless we have both annotations and labels. if flatpak_metadata != 'both': return inspector = DefaultInspector(str(workflow.build_dir.any_platform.path), dir_metadata) files = inspector.list_files() assert sorted(files) == config['expected_contents'] metadata_lines = inspector.cat_file('/metadata').split('\n') assert any( re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', line) for line in metadata_lines) assert any( re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', line) for line in metadata_lines) if config_name == 'app': # Check that the desktop file was rewritten output = inspector.cat_file( '/export/share/applications/org.gnome.eog.desktop') lines = output.split('\n') assert 'Icon=org.gnome.eog' in lines assert 'name=org.gnome.eog' in metadata_lines assert 'tags=Viewer' in metadata_lines assert 'command=eog2' in metadata_lines elif config_name == 'runtime': # runtime # Check that permissions have been normalized assert inspector.get_file_perms('/files/etc/shadow') == '-00644' assert inspector.get_file_perms('/files/bin/mount') == '-00755' assert inspector.get_file_perms('/files/share/foo') == 'd00755' assert 'name=org.fedoraproject.Platform' in metadata_lines else: # SDK assert 'name=org.fedoraproject.Sdk' in metadata_lines