def test_rpm_qf_args(tags, separator, expected):
    kwargs = {}
    if tags is not None:
        kwargs['tags'] = tags
    if separator is not None:
        kwargs['separator'] = separator
    assert rpm_qf_args(**kwargs) == expected
示例#2
0
def test_rpm_qf_args(tags, separator, expected):
    kwargs = {}
    if tags is not None:
        kwargs['tags'] = tags
    if separator is not None:
        kwargs['separator'] = separator
    assert rpm_qf_args(**kwargs) == expected
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        # Create the dockerfile

        if source.runtime:
            profile = 'runtime'
        else:
            profile = 'default'

        module_info = source.compose.base_module

        packages = ' '.join(module_info.mmd.profiles[profile].rpms)

        df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                                stream=module_info.stream,
                                                version=module_info.version,
                                                base_image=self.base_image,
                                                packages=packages,
                                                rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir, "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            for line in source.flatpak_json.get('cleanup-commands', []):
                f.write(line)
                f.write("\n")
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        repo = {
            'name': repo_name,
            'baseurl': source.compose.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = os.path.join(YUM_REPOS_DIR, repo_name + '.repo')
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id', source.compose.compose_id)
示例#4
0
    def gather_output(self):
        for _ in range(5):
            container_id = self.tasker.run(
                self.image_id,
                command=rpm_qf_args(),
                create_kwargs={"entrypoint": "/bin/rpm", "user": "******"},
                start_kwargs={},
            )
            self._container_ids.append(container_id)
            self.tasker.wait(container_id)
            output = self.tasker.logs(container_id, stream=False)

            if output:
                return output

        raise RuntimeError('Unable to gather list of installed packages in container')
    def gather_output(self):
        for _ in range(5):
            container_id = self.tasker.run(
                self.image_id,
                command=rpm_qf_args(),
                create_kwargs={"entrypoint": "/bin/rpm", "user": "******"},
                start_kwargs={},
            )
            self._container_ids.append(container_id)
            self.tasker.wait(container_id)
            output = self.tasker.logs(container_id, stream=False)

            if output:
                return output

        raise RuntimeError('Unable to gather list of installed packages in container')
示例#6
0
    def run(self):
        # If another component has already filled in the image component list, skip
        if self.workflow.image_components is not None:
            return None

        container_id = self.tasker.run(
            self.image_id,
            command=rpm_qf_args(),
            create_kwargs={
                "entrypoint": "/bin/rpm",
                "user": "******"
            },
            start_kwargs={},
        )
        self.tasker.wait(container_id)
        plugin_output = self.tasker.logs(container_id, stream=False)

        # gpg-pubkey are autogenerated packages by rpm when you import a gpg key
        # these are of course not signed, let's ignore those by default
        if self.ignore_autogenerated_gpg_keys:
            self.log.debug("ignore rpms 'gpg-pubkey'")
            plugin_output = [
                x for x in plugin_output
                if not x.startswith("gpg-pubkey" + self.sep)
            ]

        volumes = self.tasker.get_volumes_for_container(container_id)

        try:
            self.tasker.remove_container(container_id)
        except APIError:
            self.log.warning("error removing container (ignored):",
                             exc_info=True)

        for volume_name in volumes:
            try:
                self.tasker.remove_volume(volume_name)
            except APIError:
                self.log.warning("error removing volume (ignored):",
                                 exc_info=True)

        self.workflow.image_components = parse_rpm_output(plugin_output)

        return plugin_output
    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        cmd = "/bin/rpm " + rpm_qf_args(tags)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return parse_rpm_output(output.splitlines(), tags)
    def get_rpms(self):
        """
        Build a list of installed RPMs in the format required for the
        metadata.
        """

        tags = [
            'NAME',
            'VERSION',
            'RELEASE',
            'ARCH',
            'EPOCH',
            'SIGMD5',
            'SIGPGP:pgpsig',
            'SIGGPG:pgpsig',
        ]

        cmd = "/bin/rpm " + rpm_qf_args(tags)
        try:
            # py3
            (status, output) = subprocess.getstatusoutput(cmd)
        except AttributeError:
            # py2
            with open('/dev/null', 'r+') as devnull:
                p = subprocess.Popen(cmd,
                                     shell=True,
                                     stdin=devnull,
                                     stdout=subprocess.PIPE,
                                     stderr=devnull)

                (stdout, stderr) = p.communicate()
                status = p.wait()
                output = stdout.decode()

        if status != 0:
            self.log.debug("%s: stderr output: %s", cmd, stderr)
            raise RuntimeError("%s: exit code %s" % (cmd, status))

        return parse_rpm_output(output.splitlines(), tags)
示例#9
0
    def run(self):
        # If another component has already filled in the image component list, skip
        if self.workflow.image_components is not None:
            return None

        container_id = self.tasker.run(
            self.image_id,
            command=rpm_qf_args(),
            create_kwargs={"entrypoint": "/bin/rpm"},
            start_kwargs={},
        )
        self.tasker.wait(container_id)
        plugin_output = self.tasker.logs(container_id, stream=False)

        # gpg-pubkey are autogenerated packages by rpm when you import a gpg key
        # these are of course not signed, let's ignore those by default
        if self.ignore_autogenerated_gpg_keys:
            self.log.debug("ignore rpms 'gpg-pubkey'")
            plugin_output = [x for x in plugin_output if not x.startswith("gpg-pubkey" + self.sep)]

        volumes = self.tasker.get_volumes_for_container(container_id)

        try:
            self.tasker.remove_container(container_id)
        except APIError:
            self.log.warning("error removing container (ignored):",
                             exc_info=True)

        for volume_name in volumes:
            try:
                self.tasker.remove_volume(volume_name)
            except APIError:
                self.log.warning("error removing volume (ignored):", exc_info=True)

        self.workflow.image_components = parse_rpm_output(plugin_output)

        return plugin_output
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        builder = FlatpakBuilder(source, None, None)

        builder.precheck()

        # Create the dockerfile

        module_info = source.base_module

        # We need to enable all the modules other than the platform pseudo-module
        modules_str = ' '.join(builder.get_enable_modules())

        install_packages_str = ' '.join(builder.get_install_packages())

        name = source.flatpak_yaml.get('name', module_info.name)
        component = source.flatpak_yaml.get('component', module_info.name)

        df_path = os.path.join(self.workflow.builder.df_dir, DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(DOCKERFILE_TEMPLATE.format(name=name,
                                                component=component,
                                                stream=module_info.stream.replace('-', '_'),
                                                version=module_info.version,
                                                base_image=self.base_image,
                                                modules=modules_str,
                                                packages=install_packages_str,
                                                rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        includepkgs = builder.get_includepkgs()
        includepkgs_path = os.path.join(self.workflow.builder.df_dir, 'atomic-reactor-includepkgs')
        with open(includepkgs_path, 'w') as f:
            f.write('includepkgs = ' + ','.join(includepkgs) + '\n')

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir, "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            f.write(builder.get_cleanup_script())
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        compose_info = get_compose_info(self.workflow)

        repo = {
            'name': repo_name,
            'baseurl': compose_info.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = YumRepo(os.path.join(YUM_REPOS_DIR, repo_name)).dst_filename
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)
示例#11
0
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        module_info = source.compose.base_module

        # For a runtime, certain information is duplicated between the container.yaml
        # and the modulemd, check that it matches
        if source.runtime:
            flatpak_yaml = source.flatpak_yaml
            flatpak_xmd = module_info.mmd.props.xmd['flatpak']

            def check(condition, what):
                if not condition:
                    raise RuntimeError(
                        "Mismatch for {} betweeen module xmd and container.yaml"
                        .format(what))

            check(flatpak_yaml['branch'] == flatpak_xmd['branch'], "'branch'")
            check(source.profile in flatpak_xmd['runtimes'], 'profile name')

            profile_xmd = flatpak_xmd['runtimes'][source.profile]

            check(flatpak_yaml['id'] == profile_xmd['id'], "'id'")
            check(
                flatpak_yaml.get('runtime',
                                 None) == profile_xmd.get('runtime', None),
                "'runtime'")
            check(
                flatpak_yaml.get('sdk', None) == profile_xmd.get('sdk', None),
                "'sdk'")

        # Create the dockerfile

        install_packages = module_info.mmd.peek_profiles()[
            source.profile].props.rpms.get()
        install_packages_str = ' '.join(install_packages)

        df_path = os.path.join(self.workflow.builder.df_dir,
                               DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(
                DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                           stream=module_info.stream,
                                           version=module_info.version,
                                           base_image=self.base_image,
                                           packages=install_packages_str,
                                           rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        # For a runtime, we want to make sure that the set of RPMs that is installed
        # into the filesystem is *exactly* the set that is listed in the runtime
        # profile. Requiring the full listed set of RPMs to be listed makes it
        # easier to catch unintentional changes in the package list that might break
        # applications depending on the runtime. It also simplifies the checking we
        # do for application flatpaks, since we can simply look at the runtime
        # modulemd to find out what packages are present in the runtime.
        #
        # For an application, we want to make sure that each RPM that is installed
        # into the filesystem is *either* an RPM that is part of the 'runtime'
        # profile of the base runtime, or from a module that was built with
        # flatpak-rpm-macros in the install root and, thus, prefix=/app.
        #
        # We achieve this by restricting the set of available packages in the dnf
        # configuration to just the ones that we want.
        #
        # The advantage of doing this upfront, rather than just checking after the
        # fact is that this makes sure that when a application is being installed,
        # we don't get a different package to satisfy a dependency than the one
        # in the runtime - e.g. aajohan-comfortaa-fonts to satisfy font(:lang=en)
        # because it's alphabetically first.

        if not source.runtime:
            runtime_module = source.runtime_module
            runtime_profile = runtime_module.mmd.peek_profiles()['runtime']
            available_packages = sorted(runtime_profile.props.rpms.get())

            for m in source.app_modules:
                # Strip off the '.rpm' suffix from the filename to get something
                # that DNF can parse.
                available_packages.extend(x[:-4] for x in m.rpms)
        else:
            base_module = source.compose.base_module
            runtime_profile = base_module.mmd.peek_profiles()['runtime']
            available_packages = sorted(runtime_profile.props.rpms.get())

        includepkgs_path = os.path.join(self.workflow.builder.df_dir,
                                        'atomic-reactor-includepkgs')
        with open(includepkgs_path, 'w') as f:
            f.write('includepkgs = ' + ','.join(available_packages) + '\n')

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir,
                                     "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            cleanup_commands = source.flatpak_yaml.get('cleanup-commands')
            if cleanup_commands is not None:
                f.write(cleanup_commands.rstrip())
                f.write("\n")
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        repo = {
            'name': repo_name,
            'baseurl': source.compose.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = os.path.join(YUM_REPOS_DIR, repo_name + '.repo')
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id',
                             source.compose.compose_id)
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        # Create the dockerfile

        if source.runtime:
            profile = 'runtime'
        else:
            profile = 'default'

        module_info = source.compose.base_module

        packages = ' '.join(module_info.mmd.profiles[profile].rpms)

        df_path = os.path.join(self.workflow.builder.df_dir,
                               DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(
                DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                           stream=module_info.stream,
                                           version=module_info.version,
                                           base_image=self.base_image,
                                           packages=packages,
                                           rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir,
                                     "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            for line in source.flatpak_json.get('cleanup-commands', []):
                f.write(line)
                f.write("\n")
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        repo = {
            'name': repo_name,
            'baseurl': source.compose.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = os.path.join(YUM_REPOS_DIR, repo_name + '.repo')
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id',
                             source.compose.compose_id)
    def run(self):
        """
        run the plugin
        """

        source = self._load_source()

        set_flatpak_source_info(self.workflow, source)

        builder = FlatpakBuilder(source, None, None)

        builder.precheck()

        # Create the dockerfile

        module_info = source.base_module

        # We need to enable all the modules other than the platform pseudo-module
        modules_str = ' '.join(builder.get_enable_modules())

        install_packages_str = ' '.join(builder.get_install_packages())

        df_path = os.path.join(self.workflow.builder.df_dir,
                               DOCKERFILE_FILENAME)
        with open(df_path, 'w') as fp:
            fp.write(
                DOCKERFILE_TEMPLATE.format(name=module_info.name,
                                           stream=module_info.stream,
                                           version=module_info.version,
                                           base_image=self.base_image,
                                           modules=modules_str,
                                           packages=install_packages_str,
                                           rpm_qf_args=rpm_qf_args()))

        self.workflow.builder.set_df_path(df_path)

        includepkgs = builder.get_includepkgs()
        includepkgs_path = os.path.join(self.workflow.builder.df_dir,
                                        'atomic-reactor-includepkgs')
        with open(includepkgs_path, 'w') as f:
            f.write('includepkgs = ' + ','.join(includepkgs) + '\n')

        # Create the cleanup script

        cleanupscript = os.path.join(self.workflow.builder.df_dir,
                                     "cleanup.sh")
        with open(cleanupscript, 'w') as f:
            f.write(builder.get_cleanup_script())
        os.chmod(cleanupscript, 0o0755)

        # Add a yum-repository pointing to the compose

        repo_name = 'atomic-reactor-module-{name}-{stream}-{version}'.format(
            name=module_info.name,
            stream=module_info.stream,
            version=module_info.version)

        compose_info = get_compose_info(self.workflow)

        repo = {
            'name': repo_name,
            'baseurl': compose_info.repo_url,
            'enabled': 1,
            'gpgcheck': 0,
        }

        path = YumRepo(os.path.join(YUM_REPOS_DIR, repo_name)).dst_filename
        self.workflow.files[path] = render_yum_repo(repo, escape_dollars=False)

        override_build_kwarg(self.workflow, 'module_compose_id',
                             compose_info.compose_id)