示例#1
0
    def run(self, extcmds):
        """Execute the command."""
        self.doCheck(self.base.basecmd, extcmds)

        path = self.parse_extcmds(extcmds)

        try:
            packages = parse_kickstart_packages(path)
        except pykickstart.errors.KickstartError:
            raise dnf.exceptions.Error(_('file cannot be parsed: %s') % path)
        group_names = [group.name for group in packages.groupList]

        if group_names:
            self.base.read_comps()
        try:
            self.base.install_grouplist(group_names)
        except dnf.exceptions.Error:
            are_groups_installed = False
        else:
            are_groups_installed = True

        are_packages_installed = False
        for pattern in packages.packageList:
            try:
                self.base.install(pattern)
            except dnf.exceptions.MarkingError:
                logger.info(_('No package %s available.'), pattern)
            else:
                are_packages_installed = True

        if not are_groups_installed and not are_packages_installed:
            raise dnf.exceptions.Error(_('Nothing to do.'))
示例#2
0
    def configure(self, args):
        # setup sack and populate it with enabled repos
        demands = self.cli.demands
        demands.sack_activation = True
        demands.available_repos = True

        # Setup ArgumentParser to handle util
        # You must only add options not used by dnf already
        self.parser = dnfpluginscore.ArgumentParser(self.aliases[0])
        self.parser.add_argument('packages', nargs='+',
                                 help=_('packages to download'))
        self.parser.add_argument("--source", action='store_true',
                                 help=_('download the src.rpm instead'))
        self.parser.add_argument(
            '--destdir',
            help=_('download path, default is current dir'))
        self.parser.add_argument(
            '--resolve', action='store_true',
            help=_('resolve and download needed dependencies'))

        # parse the options/args
        # list available options/args on errors & exit
        self.opts = self.parser.parse_args(args)

        # show util help & exit
        if self.opts.help_cmd:
            print(self.parser.format_help())
            return

        if self.opts.source:
            dnfpluginscore.lib.enable_source_repos(self.base.repos)
示例#3
0
    def migrate_groups(self):
        yum_exec = "/usr/bin/yum-deprecated"
        if not os.path.exists(yum_exec):
            yum_exec = "/usr/bin/yum"
        logger.info(_("Migrating groups data..."))

        try:
            installed = self.get_yum_installed_groups(yum_exec)
        except subprocess.CalledProcessError:
            logger.warning(_("Execution of Yum failed. "
                             "Could not retrieve installed groups."))
            return
        if not installed:
            logger.info(_("No groups to migrate from Yum"))
            return

        # mark installed groups in dnf
        group_cmd = dnf.cli.commands.group.GroupCommand(self.cli)
        group_cmd._grp_setup()
        for group in installed:
            try:
                group_cmd._mark_install([group])
            except dnf.exceptions.CompsError as e:
                # skips not found groups, i.e. after fedup
                # when the group name changes / disappears in new distro
                logger.warning("%s, %s", dnf.i18n.ucd(e)[:-1], _("skipping."))
    def _update(self, cache_only):
        """ update entitlement certificates """
        logger.info(_('Updating Subscription Management repositories.'))

        # XXX: Importing inline as you must be root to read the config file
        from subscription_manager.identity import ConsumerIdentity

        cert_file = str(ConsumerIdentity.certpath())
        key_file = str(ConsumerIdentity.keypath())

        identity = inj.require(inj.IDENTITY)

        # In containers we have no identity, but we may have entitlements inherited
        # from the host, which need to generate a redhat.repo.
        if identity.is_valid():
            try:
                connection.UEPConnection(cert_file=cert_file, key_file=key_file)
            # FIXME: catchall exception
            except Exception:
                # log
                logger.info(_("Unable to connect to Subscription Management Service"))
                return
        else:
            logger.info(_("Unable to read consumer identity"))

        if config.in_container():
            logger.info(_("Subscription Manager is operating in container mode."))

        if not cache_only:
            cert_action_invoker = EntCertActionInvoker()
            cert_action_invoker.update()

        repo_action_invoker = RepoActionInvoker(cache_only=cache_only)
        repo_action_invoker.update()
示例#5
0
 def _download_repo(self, project_name, repo_filename, chroot=None):
     if chroot is None:
         chroot = self._guess_chroot(self.chroot_config)
     short_chroot = '-'.join(chroot.split('-')[:2])
     #http://copr.fedorainfracloud.org/coprs/larsks/rcm/repo/epel-7-x86_64/
     api_path = "/coprs/{0}/repo/{1}/".format(project_name, short_chroot)
     try:
         f = self.base.urlopen(self.copr_url + api_path, mode='w+')
     except IOError as e:
         if os.path.exists(repo_filename):
             os.remove(repo_filename)
         if '404' in str(e):
             if PY3:
                 import urllib.request
                 try:
                     res = urllib.request.urlopen(self.copr_url + "/coprs/" + project_name)
                     status_code = res.getcode()
                 except urllib.error.HTTPError as e:
                     status_code = e.getcode()
             else:
                 import urllib
                 res = urllib.urlopen(self.copr_url + "/coprs/" + project_name)
                 status_code = res.getcode()
             if str(status_code) != '404':
                 raise dnf.exceptions.Error(_("This repository does not have"\
                     " any builds yet so you cannot enable it now."))
             else:
                 raise dnf.exceptions.Error(_("Such repository does not exist."))
         raise
     shutil.copy2(f.name, repo_filename)
     os.chmod(repo_filename, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
示例#6
0
    def _cmd_enable(self, chroot):
        self._need_root()
        msg = _("""
You are about to enable a Playground repository.

Do you want to continue?""")
        self._ask_user(msg)
        api_url = "{0}/api/playground/list/".format(
            self.copr_url)
        f = self.base.urlopen(api_url, mode="w+")
        output = self._get_data(f)
        f.close()
        if output["output"] != "ok":
            raise dnf.cli.CliError(_("Unknown response from server."))
        for repo in output["repos"]:
            project_name = "{0}/{1}".format(repo["username"],
                                            repo["coprname"])
            repo_filename = "{}/_playground_{}.repo".format(self.base.conf.get_reposdir, project_name.replace("/", "-"))
            try:
                if chroot not in repo["chroots"]:
                    continue
                api_url = "{0}/api/coprs/{1}/detail/{2}/".format(
                    self.copr_url, project_name, chroot)
                f = self.base.urlopen(api_url, mode='w+')
                output2 = self._get_data(f)
                f.close()
                if (output2 and ("output" in output2)
                        and (output2["output"] == "ok")):
                    self._download_repo(project_name, repo_filename, chroot)
            except dnf.exceptions.Error:
                # likely 404 and that repo does not exist
                pass
示例#7
0
    def _src_deps(self, src_fn):
        fd = os.open(src_fn, os.O_RDONLY)
        if self.cli.nogpgcheck:
            self.rpm_ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
        try:
            h = self.rpm_ts.hdrFromFdno(fd)
        except rpm.error as e:
            if str(e) == 'public key not available':
                logger.error("Error: public key not available, add "
                             "'--nogpgcheck' option to ignore package sign")
                return
            elif str(e) == 'error reading package header':
                e = _("Failed to open: '%s', not a valid source rpm file.") % (
                      src_fn,)
            raise dnf.exceptions.Error(e)
        os.close(fd)
        ds = h.dsFromHeader('requirename')
        done = True
        for dep in ds:
            reldep_str = self._rpm_dep2reldep_str(dep)
            if reldep_str.startswith('rpmlib('):
                continue
            done &= self._install(reldep_str)

        if not done:
            err = _("Not all dependencies satisfied")
            raise dnf.exceptions.Error(err)
 def set_argparser(parser):
     parser.add_argument(
         "--norepos", action="store_true", default=False,
         help=_("do not attempt to dump the repository contents."))
     parser.add_argument(
         "filename", nargs="?",
         help=_("optional name of dump file"))
示例#9
0
 def _ask_user(cls, question):
     answer = raw_input(question).lower()
     answer = _(answer)
     while not ((answer in yes) or (answer in no)):
         answer = raw_input(question).lower()
         answer = _(answer)
     if answer in yes:
         return
     else:
         raise dnf.exceptions.Error(_('Safe and good answer. Exiting.'))
示例#10
0
 def set_argparser(parser):
     parser.add_argument("-o", "--old", action="store_true",
                         help=_("Print the older packages"))
     parser.add_argument("-n", "--new", action="store_true",
                         help=_("Print the newest packages"))
     parser.add_argument("-s", "--space", action="store_true",
                         help=_("Space separated output, not newline"))
     parser.add_argument("-k", "--keep", action="store", metavar="KEEP",
                         help=_("Newest N packages to keep - defaults to 1"),
                         default=1, type=int)
     parser.add_argument("path", action="store",
                         help=_("Path to directory"))
示例#11
0
 def set_argparser(parser):
     parser.add_argument("--arch", default=[], action="append", dest='arches',
                         help=_("check packages of the given archs, can be "
                                "specified multiple times"))
     parser.add_argument("--check", default=[], action="append",
                         help=_("Specify repositories to check"))
     parser.add_argument("-n", "--newest", action="store_true",
                         help=_("Check only the newest packages in the "
                                "repos"))
     parser.add_argument("--pkg", default=[], action="append",
                         help=_("Check closure for this package only"),
                         dest="pkglist")
示例#12
0
 def doCheck(self, basecmd, extcmds):
     """Verify that conditions are met so that this command can run."""
     dnf.cli.commands.checkGPGKey(self.base, self.cli)
     try:
         self.parse_extcmds(extcmds)
     except ValueError:
         logger.critical(
             _('Error: Requires exactly one path to a kickstart file'))
         dnf.cli.commands.err_mini_usage(self.cli, basecmd)
         raise dnf.cli.CliError(
             _('exactly one path to a kickstart file required'))
     dnf.cli.commands.checkEnabledRepo(self.base, extcmds)
示例#13
0
 def set_argparser(parser):
     parser.add_argument('packages', nargs='+',
                         help=_('packages to download'))
     target = parser.add_mutually_exclusive_group()
     target.add_argument("--source", action='store_true',
                         help=_('download the src.rpm instead'))
     target.add_argument("--debuginfo", action='store_true',
                         help=_('download the -debuginfo package instead'))
     parser.add_argument('--destdir',
                         help=_('download path, default is current dir'))
     parser.add_argument('--resolve', action='store_true',
                         help=_('resolve and download needed dependencies'))
示例#14
0
 def _get_query_source(self, pkg_spec):
     """Return a query to match a source rpm file name."""
     pkg_spec = pkg_spec[:-4]  # skip the .rpm
     nevra = hawkey.split_nevra(pkg_spec)
     q = self.base.sack.query()
     q = q.available()
     q = q.latest()
     q = q.filter(name=nevra.name, version=nevra.version,
                  release=nevra.release, arch=nevra.arch)
     if len(q.run()) == 0:
         msg = _("No package ") + pkg_spec + _(" available.")
         raise dnf.exceptions.PackageNotFoundError(msg)
     return q
示例#15
0
 def _spec_deps(self, spec_fn):
     try:
         spec = rpm.spec(spec_fn)
     except ValueError:
         msg = _("Failed to open: '%s', not a valid spec file.") % spec_fn
         raise dnf.exceptions.Error(msg)
     for dep in rpm.ds(spec.sourceHeader, 'requires'):
         reldep_str = self._rpm_dep2reldep_str(dep)
         try:
             self.base.install(reldep_str)
         except dnf.exceptions.MarkingError:
             msg = _("No matching package to install: '%s'") % reldep_str
             raise dnf.exceptions.Error(msg)
示例#16
0
 def getcomps(self):
     for repo in self.base.repos.iter_enabled():
         comps_fn = repo.metadata._comps_fn
         if comps_fn:
             if not os.path.exists(repo.pkgdir):
                 try:
                     os.makedirs(repo.pkgdir)
                 except IOError:
                     logger.error(_("Could not make repository directory: %s"), repo.pkgdir)
                     sys.exit(1)
             dest = os.path.join(self._repo_base_path[repo.id], 'comps.xml')
             dnf.yum.misc.decompress(comps_fn, dest=dest)
             logger.info(_("comps.xml for repository %s saved"), repo.id)
示例#17
0
def parse_arguments(args):
    parser = dnfpluginscore.ArgumentParser(Py3QueryCommand.aliases[0])

    parser.add_argument('--output', '-o', metavar='FILE', action='store',
                        help=_('write output to the given file'))

    parser.add_argument('--no-bz', dest='fetch_bugzilla', action='store_false',
                        default=True, help=_("Don't get Bugzilla links"))

    parser.add_argument('--qrepo', dest='py3query_repo', default='rawhide',
                        help=_("Repo to use for the query"))

    return parser.parse_args(args), parser
示例#18
0
    def transaction(self):
        main, crepo = self.main, self.crepo

        if not main["enabled"]:
            return

        repodir = main["repodir"]
        if not os.path.exists(repodir):
            try:
                os.makedirs(repodir, mode=0o755)
            except OSError as e:
                self.logger.error("local: " + _(
                    "Unable to create a directory '{}' due to '{}'").format(repodir, ucd(e)))
                return
        elif not os.path.isdir(repodir):
            self.logger.error(
                "local: " + _("'{}' is not a directory").format(repodir))
            return

        needs_rebuild = False
        for pkg in self.base.transaction.install_set:
            path = pkg.localPkg()
            if os.path.dirname(path) == repodir:
                continue
            self.logger.debug(
                "local: " + _("Copying '{}' to local repo").format(path))
            try:
                shutil.copy2(path, repodir)
                needs_rebuild = True
            except IOError:
                self.logger.error(
                    "local: " + _("Can't write file '{}'").format(os.path.join(
                        repodir, os.path.basename(path))))

        if not crepo["enabled"] or not needs_rebuild:
            return

        args = ["createrepo_c", "--update", "--unique-md-filenames"]
        if crepo["verbose"]:
            args.append("--verbose")
        elif crepo["quiet"]:
            args.append("--quiet")
        if crepo["cachedir"] is not None:
            args.append("--cachedir")
            args.append(crepo["cachedir"])
        args.append(repodir)
        self.logger.debug("local: " + _("Rebuilding local repo"))
        p = subprocess.Popen(args, stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT)
        for line in p.stdout:
            print(line.decode().rstrip("\n"))
示例#19
0
    def _get_query(self, pkg_spec):
        """Return a query to match a pkg_spec."""
        if os.path.exists(pkg_spec):
            pkg = self.base.add_remote_rpm(pkg_spec)
            pkg_spec = "{0.name}-{0.epoch}:{0.version}-{0.release}.{0.arch}".format(pkg)

        subj = dnf.subject.Subject(pkg_spec)
        q = subj.get_best_query(self.base.sack)
        q = q.available()
        q = q.latest()
        if len(q.run()) == 0:
            msg = _("No package ") + pkg_spec + _(" available.")
            raise dnf.exceptions.PackageNotFoundError(msg)
        return q
示例#20
0
    def _spec_deps(self, spec_fn):
        try:
            spec = rpm.spec(spec_fn)
        except ValueError:
            msg = _("Failed to open: '%s', not a valid spec file.") % spec_fn
            raise dnf.exceptions.Error(msg)
        done = True
        for dep in rpm.ds(spec.sourceHeader, 'requires'):
            reldep_str = self._rpm_dep2reldep_str(dep)
            done &= self._install(reldep_str)

        if not done:
            err = _("Not all dependencies satisfied")
            raise dnf.exceptions.Error(err)
示例#21
0
    def _ask_user(self, question):
        if self.base.conf.assumeyes and not self.base.conf.assumeno:
            return
        elif self.base.conf.assumeno and not self.base.conf.assumeyes:
            raise dnf.exceptions.Error(_('Safe and good answer. Exiting.'))

        answer = None
        while not ((answer in YES) or (answer in NO)):
            answer = ucd(input(question)).lower()
            answer = _(answer)
        if answer in YES:
            return
        else:
            raise dnf.exceptions.Error(_('Safe and good answer. Exiting.'))
 def set_argparser(parser):
     filter_group = parser.add_mutually_exclusive_group()
     filter_group.add_argument(
         '--since', metavar="DATE", default=None,
         type=validate_date,
         help=_('show changelog entries since DATE. To avoid ambiguosity, '
                'YYYY-MM-DD format is recommended.'))
     filter_group.add_argument(
         '--count', default=None, type=int,
         help=_('show given number of changelog entries per package'))
     filter_group.add_argument(
         '--upgrades', default=False, action='store_true',
         help=_('show only new changelog entries for packages, that provide an '
                'upgrade for some of already installed packages.'))
     parser.add_argument("package", nargs='*', metavar=_('PACKAGE'))
示例#23
0
    def run(self):
        """Execute the util action here."""

        if self.opts.set_enabled and self.opts.set_disabled:
            logger.error(
                _("Error: Trying to enable and disable repos at the same time."))
            self.opts.set_enabled = self.opts.set_disabled = False
        if self.opts.set_enabled and not self.opts.repo:
            logger.error(_("Error: Trying to enable already enabled repos."))
            self.opts.set_enabled = False

        if self.opts.add_repo:
            self.add_repo()
        else:
            self.modify_repo()
    def _download_repo(self, project_name, repo_filename, chroot=None):
        if chroot is None:
            chroot = self._guess_chroot(self.chroot_config)
        short_chroot = '-'.join(chroot.split('-')[:2])
        #http://copr.fedorainfracloud.org/coprs/larsks/rcm/repo/epel-7-x86_64/
        api_path = "/coprs/{0}/repo/{1}/".format(project_name, short_chroot)

        try:
            f = self.base.urlopen(self.copr_url + api_path, mode='w+')
        except IOError as e:
            if os.path.exists(repo_filename):
                os.remove(repo_filename)
            if '404' in str(e):
                if PY3:
                    import urllib.request
                    try:
                        res = urllib.request.urlopen(self.copr_url + "/coprs/" + project_name)
                        status_code = res.getcode()
                    except urllib.error.HTTPError as e:
                        status_code = e.getcode()
                else:
                    import urllib
                    res = urllib.urlopen(self.copr_url + "/coprs/" + project_name)
                    status_code = res.getcode()
                if str(status_code) != '404':
                    raise dnf.exceptions.Error(_("This repository does not have"\
                        " any builds yet so you cannot enable it now."))
                else:
                    raise dnf.exceptions.Error(_("Such repository does not exist."))
            raise

        for line in f:
            if re.match("\[copr:", line):
                repo_filename = os.path.join(self.base.conf.get_reposdir,
                                             "_" + line[1:-2] + ".repo")
            break

        # if using default hub, remove possible old repofile
        if self.copr_url == self.default_url:
            # copr:hub:user:project.repo => _copr_user_project.repo
            old_repo_filename = repo_filename.replace("_copr:", "_copr", 1)\
                .replace(self.copr_hostname, "").replace(":", "_", 1).replace(":", "-")\
                .replace("group_", "@")
            if os.path.exists(old_repo_filename):
                os.remove(old_repo_filename)

        shutil.copy2(f.name, repo_filename)
        os.chmod(repo_filename, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
示例#25
0
    def configure(self):
        raw_config = ConfigParser()
        filepath = os.path.join(os.path.expanduser("~"), ".config", "copr")
        if raw_config.read(filepath):
            if PY3:
                self.copr_url = raw_config["copr-cli"].get("copr_url", None)
            else:
                self.copr_url = raw_config.get("copr-cli", "copr_url", None)
            if self.copr_url != "https://copr.fedorainfracloud.org":
                print(_("Warning: we are using non-standard Copr URL '{}'.").format(self.copr_url))


        # Useful for forcing a distribution
        copr_plugin_config = ConfigParser()
        config_file = None
        for path in self.base.conf.pluginconfpath:
            test_config_file = '{}/{}.conf'.format(path, PLUGIN_CONF)
            if os.path.isfile(test_config_file):
                config_file = test_config_file

        if config_file is not None:
            copr_plugin_config.read(config_file)
            if copr_plugin_config.has_option('main', 'distribution') and copr_plugin_config.has_option('main', 'releasever'):
                distribution = copr_plugin_config.get('main', 'distribution')
                releasever = copr_plugin_config.get('main', 'releasever')
                self.chroot_config = [distribution, releasever]
            else:
                self.chroot_config = [False, False]
示例#26
0
    def set_argparser(parser):
        parser.add_argument('subcommand', nargs=1,
                            choices=['help', 'enable', 'disable',
                                     'remove', 'list', 'search'])

        list_option = parser.add_mutually_exclusive_group()
        list_option.add_argument('--installed', action='store_true',
                                 help=_('List all installed Copr repositories (default)'))
        list_option.add_argument('--enabled', action='store_true',
                                 help=_('List enabled Copr repositories'))
        list_option.add_argument('--disabled', action='store_true',
                                 help=_('List disabled Copr repositories'))
        list_option.add_argument('--available-by-user', metavar='NAME',
                                 help=_('List available Copr repositories by user NAME'))

        parser.add_argument('arg', nargs='*')
示例#27
0
    def _cmd_enable(self, chroot):
        self._need_root()
        self._ask_user("""
You are about to enable a Playground repository.

Do you want to continue? [y/N]: """)
        api_url = "{0}/api/playground/list/".format(
            self.copr_url)
        f = dnfpluginscore.lib.urlopen(self, None, api_url)
        output = self._get_data(f)
        f.close()
        if output["output"] != "ok":
            raise dnf.cli.CliError(_("Unknown response from server."))
        for repo in output["repos"]:
            project_name = "{0}/{1}".format(repo["username"],
                                            repo["coprname"])
            repo_filename = "/etc/yum.repos.d/_playground_{}.repo" \
                    .format(project_name.replace("/", "-"))
            try:
                # check if that repo exist? but that will result in twice
                # up calls
                api_url = "{0}/api/coprs/{1}/detail/{2}/".format(
                    self.copr_url, project_name, chroot)
                f = dnfpluginscore.lib.urlopen(self, None, api_url)
                output2 = self._get_data(f)
                f.close()
                if (output2 and ("output" in output2)
                        and (output2["output"] == "ok")):
                    self._download_repo(project_name, repo_filename, chroot)
            except dnf.exceptions.Error:
                # likely 404 and that repo does not exist
                pass
示例#28
0
 def run(self):
     subcommand = self.opts.subcommand[0]
     chroot = self._guess_chroot(self.chroot_config)
     if subcommand == "enable":
         self._cmd_enable(chroot)
         logger.info(_("Playground repositories successfully enabled."))
     elif subcommand == "disable":
         self._cmd_disable()
         logger.info(_("Playground repositories successfully disabled."))
     elif subcommand == "upgrade":
         self._cmd_disable()
         self._cmd_enable(chroot)
         logger.info(_("Playground repositories successfully updated."))
     else:
         raise dnf.exceptions.Error(
             _('Unknown subcommand {}.').format(subcommand))
示例#29
0
    def run(self):
        """Execute the util action here."""

        if self.opts.source:
            pkgs = self._get_pkg_objs_source(self.opts.packages)
        elif self.opts.debuginfo:
            pkgs = self._get_pkg_objs_debuginfo(self.opts.packages)
        else:
            pkgs = self._get_pkg_objs_rpms(self.opts.packages)

        # If user asked for just urls then print them and we're done
        if self.opts.url:
            for pkg in pkgs:
                url = pkg.remote_location(schemes=self.opts.urlprotocols)
                if url:
                    print(url)
                else:
                    msg = _("Failed to get mirror for package: %s") % pkg.name
                    raise dnf.exceptions.Error(msg)
            return
        else: 
            locations = self._do_downloads(pkgs)  # download rpms

        if self.opts.destdir:
            dest = self.opts.destdir
        else:
            dest = dnf.i18n.ucd(os.getcwd())

        self._copy_packages(dest, locations)
示例#30
0
    def run(self, args):
        (opts, parser) = parse_arguments(args)

        if opts.help_cmd:
            print(parser.format_help())
            return

        if opts.querytags:
            print(_('Available query-tags: use --queryformat ".. %{tag} .."'))
            print(QUERY_TAGS)
            return

        q = self.base.sack.query().available()
        if opts.key:
            if set(opts.key) & set('*[?'):  # is pattern ?
                fdict = {'name__glob': opts.key}
            else:  # substring
                fdict = {'name': opts.key}
            q = q.filter(hawkey.ICASE, **fdict)
        if opts.repoid:
            q = q.filter(reponame=opts.repoid)
        if opts.arch:
            q = q.filter(arch=opts.arch)
        if opts.whatprovides:
            q = self.by_provides(self.base.sack, [opts.whatprovides], q)
        if opts.whatrequires:
            q = self.by_requires(self.base.sack, opts.whatrequires, q)
        fmt_fn = build_format_fn(opts)
        self.show_packages(q, fmt_fn)
class ConfigManagerCommand(dnf.cli.Command):

    aliases = ['config-manager']
    summary = _('manage dnf configuration options and repositories')

    @staticmethod
    def set_argparser(parser):
        parser.add_argument(
            'crepo', nargs='*', metavar='repo',
            help=_('repo to modify'))
        parser.add_argument(
            '--save', default=False, action='store_true',
            help=_('save the current options (useful with --setopt)'))
        parser.add_argument(
            '--set-enabled', default=False, action='store_true',
            help=_('enable the specified repos (automatically saves)'))
        parser.add_argument(
            '--set-disabled', default=False, action='store_true',
            help=_('disable the specified repos (automatically saves)'))
        parser.add_argument(
            '--add-repo', default=[], action='append', metavar='URL',
            help=_('add (and enable) the repo from the specified file or url'))
        parser.add_argument(
            '--dump', default=False, action='store_true',
            help=_('print current configuration values to stdout'))
        parser.add_argument(
            '--dump-variables', default=False, action='store_true',
            help=_('print variable values to stdout'))

    def configure(self):
        # setup sack and populate it with enabled repos
        demands = self.cli.demands
        demands.available_repos = True

        if (self.opts.save or self.opts.set_enabled or
                self.opts.set_disabled or self.opts.add_repo):
            demands.root_user = True


    def run(self):
        """Execute the util action here."""

        if self.opts.set_enabled and self.opts.set_disabled:
            logger.error(
                _("Error: Trying to enable and disable repos at the same time."))
            self.opts.set_enabled = self.opts.set_disabled = False
        if self.opts.set_enabled and not self.opts.crepo:
            logger.error(_("Error: Trying to enable already enabled repos."))
            self.opts.set_enabled = False

        if self.opts.add_repo:
            self.add_repo()
        else:
            self.modify_repo()

    def modify_repo(self):
        """ process --set-enabled, --set-disabled and --setopt options """

        sbc = self.base.conf
        modify = {}
        if hasattr(self.opts, 'main_setopts') and self.opts.main_setopts:
            modify = dict(self.opts.main_setopts._get_kwargs())
        if self.opts.dump_variables:
            for name, val in self.base.conf.substitutions.items():
                print("%s = %s" % (name, val))
        if not self.opts.crepo or 'main' in self.opts.crepo:
            if self.opts.save and modify:
                # modify [main] in dnf.conf
                self.base.conf.write_raw_configfile(dnf.const.CONF_FILENAME, 'main', sbc.substitutions, modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('main'))
                for name, val in modify.items():
                    sbc._set_value(name, val)
                print(self.base.conf.dump())

        if self.opts.set_enabled or self.opts.set_disabled:
            self.opts.save = True

        if self.opts.crepo:
            matched = []
            for name in self.opts.crepo:
                matched.extend(self.base.repos.get_matching(name))
        else:
            return

        if not matched:
            raise dnf.exceptions.Error(_("No matching repo to modify: %s.")
                                       % ', '.join(self.opts.crepo))
        for repo in sorted(matched):
            repo_modify = dict(modify)  # create local copy
            if self.opts.set_enabled:
                repo_modify['enabled'] = 1
            elif self.opts.set_disabled:
                repo_modify['enabled'] = 0
            if (hasattr(self.opts, 'repo_setopts')
                    and repo.id in self.opts.repo_setopts):
                repo_modify.update(self.opts.repo_setopts[repo.id]._get_kwargs())
            if self.opts.save and repo_modify:
                self.base.conf.write_raw_configfile(repo.repofile, repo.id, sbc.substitutions, repo_modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('repo: ' + repo.id))
                for name, val in repo_modify.items():
                    opt = repo._get_option(name)
                    if opt:
                        opt._set(val)
                print(repo.dump())

    def add_repo(self):
        """ process --add-repo option """

        # Get the reposdir location
        myrepodir = self.base.conf.get_reposdir
        errors_count = 0

        for url in self.opts.add_repo:
            if dnf.pycomp.urlparse.urlparse(url).scheme == '':
                url = 'file://' + os.path.abspath(url)
            logger.info(_('Adding repo from: %s'), url)
            if url.endswith('.repo'):
                # .repo file - download, put into reposdir and enable it
                destname = os.path.basename(url)
                destname = os.path.join(myrepodir, destname)
                try:
                    f = self.base.urlopen(url, mode='w+')
                    shutil.copy2(f.name, destname)
                    os.chmod(destname, 0o644)
                    f.close()
                except IOError as e:
                    errors_count += 1
                    logger.error(e)
                    continue
            else:
                # just url to repo, create .repo file on our own
                repoid = sanitize_url_to_fs(url)
                reponame = 'created by dnf config-manager from %s' % url
                destname = os.path.join(myrepodir, "%s.repo" % repoid)
                content = "[%s]\nname=%s\nbaseurl=%s\nenabled=1\n" % \
                                                (repoid, reponame, url)
                if not save_to_file(destname, content):
                    continue
        if errors_count:
            raise dnf.exceptions.Error(P_("Configuration of repo failed",
                                          "Configuration of repos failed", errors_count))
示例#32
0
class DebugRestoreCommand(dnf.cli.Command):

    aliases = ("debug-restore", )
    summary = _("restore packages recorded in debug-dump file")

    def configure(self):
        self.cli.demands.sack_activation = True
        self.cli.demands.available_repos = True
        self.cli.demands.root_user = True

    @staticmethod
    def set_argparser(parser):
        parser.add_argument(
            "--output",
            action="store_true",
            help=_("output commands that would be run to stdout."))
        parser.add_argument(
            "--install-latest",
            action="store_true",
            help=_("Install the latest version of recorded packages."))
        parser.add_argument(
            "--ignore-arch",
            action="store_true",
            help=_("Ignore architecture and install missing packages matching "
                   "the name, epoch, version and release."))
        parser.add_argument("--filter-types",
                            metavar="[install, remove, replace]",
                            default="install, remove, replace",
                            help=_("limit to specified type"))
        parser.add_argument("filename", nargs=1, help=_("name of dump file"))

    def run(self):
        """Execute the command action here."""
        if self.opts.filter_types:
            self.opts.filter_types = set(
                self.opts.filter_types.replace(",", " ").split())

        installed = self.base.sack.query().installed()
        dump_pkgs = self.read_dump_file(self.opts.filename[0])

        self.process_installed(installed, dump_pkgs, self.opts)

        self.process_dump(dump_pkgs, self.opts)

        if not self.opts.output:
            self.base.resolve()
            self.base.do_transaction()

    def process_installed(self, installed, dump_pkgs, opts):
        for pkg in sorted(installed):
            filtered = False
            spec = pkgspec(pkg)
            action, dn, da, de, dv, dr = dump_pkgs.get(
                (pkg.name, pkg.arch), [None, None, None, None, None, None])
            dump_naevr = (dn, da, de, dv, dr)
            if pkg.pkgtup == dump_naevr:
                # package unchanged
                del dump_pkgs[(pkg.name, pkg.arch)]
            else:
                if action == "install":
                    # already have some version
                    dump_pkgs[(pkg.name, pkg.arch)][0] = "replace"
                    if "replace" not in opts.filter_types:
                        filtered = True
                else:
                    if "remove" not in opts.filter_types:
                        filtered = True
                if not filtered:
                    if opts.output:
                        print("remove    %s" % spec)
                    else:
                        self.base.package_remove(pkg)

    def process_dump(self, dump_pkgs, opts):
        for (action, n, a, e, v, r) in sorted(dump_pkgs.values()):
            filtered = False
            if opts.ignore_arch:
                arch = ""
            else:
                arch = "." + a
            if opts.install_latest and action == "install":
                pkg_spec = "%s%s" % (n, arch)
                if "install" not in opts.filter_types:
                    filtered = True
            else:
                pkg_spec = pkgtup2spec(n, arch, e, v, r)
                if (action == "replace"
                        and "replace" not in opts.filter_types):
                    filtered = True
            if not filtered:
                if opts.output:
                    print("install   %s" % pkg_spec)
                else:
                    try:
                        self.base.install(pkg_spec)
                    except dnf.exceptions.MarkingError:
                        logger.error(_("Package %s is not available"),
                                     pkg_spec)

    @staticmethod
    def read_dump_file(filename):
        if filename.endswith(".gz"):
            fobj = gzip.GzipFile(filename)
        else:
            fobj = open(filename)

        if ucd(fobj.readline()) != DEBUG_VERSION:
            logger.error(_("Bad dnf debug file: %s"), filename)
            raise dnf.exceptions.Error

        skip = True
        pkgs = {}
        for line in fobj:
            line = ucd(line)
            if skip:
                if line == "%%%%RPMDB\n":
                    skip = False
                continue

            if not line or line[0] != " ":
                break

            pkg_spec = line.strip()
            nevra = hawkey.split_nevra(pkg_spec)
            pkgs[(nevra.name, nevra.arch)] = [
                "install",
                ucd(nevra.name),
                ucd(nevra.arch),
                ucd(nevra.epoch),
                ucd(nevra.version),
                ucd(nevra.release)
            ]

        return pkgs
示例#33
0
class DebugDumpCommand(dnf.cli.Command):

    aliases = ("debug-dump", )
    summary = _("dump information about installed rpm packages to file")

    def __init__(self, cli):
        super(DebugDumpCommand, self).__init__(cli)
        self.dump_file = None

    def configure(self):
        self.cli.demands.sack_activation = True
        self.cli.demands.available_repos = True

    @staticmethod
    def set_argparser(parser):
        parser.add_argument(
            "--norepos",
            action="store_true",
            default=False,
            help=_("do not attempt to dump the repository contents."))
        parser.add_argument("filename",
                            nargs="?",
                            help=_("optional name of dump file"))

    def run(self):
        """create debug txt file and compress it, if no filename specified
           use dnf_debug_dump-<timestamp>.txt.gz by default"""

        filename = self.opts.filename
        if not filename:
            now = time.strftime("%Y-%m-%d_%T", time.localtime(time.time()))
            filename = "dnf_debug_dump-%s-%s.txt.gz" % (os.uname()[1], now)

        filename = os.path.abspath(filename)
        if filename.endswith(".gz"):
            self.dump_file = gzip.GzipFile(filename, "w")
        else:
            self.dump_file = open(filename, "w")

        self.write(DEBUG_VERSION)
        self.dump_system_info()
        self.dump_dnf_config_info()
        self.dump_rpm_problems()
        self.dump_packages(not self.opts.norepos)
        self.dump_rpmdb_versions()
        self.dump_file.close()

        print(_("Output written to: %s") % filename)

    def write(self, msg):
        if dnf.pycomp.PY3 and isinstance(self.dump_file, gzip.GzipFile):
            msg = bytes(msg, "utf8")
        dnf.pycomp.write_to_file(self.dump_file, msg)

    def dump_system_info(self):
        self.write("%%%%SYSTEM INFO\n")
        uname = os.uname()
        self.write("  uname: %s, %s\n" % (uname[2], uname[4]))
        self.write("  rpm ver: %s\n" % rpm.__version__)
        self.write("  python ver: %s\n" % sys.version.replace("\n", ""))
        return

    def dump_dnf_config_info(self):
        var = self.base.conf.substitutions
        plugins = ",".join([p.name for p in self.base._plugins.plugins])
        self.write("%%%%DNF INFO\n")
        self.write("  arch: %s\n" % var["arch"])
        self.write("  basearch: %s\n" % var["basearch"])
        self.write("  releasever: %s\n" % var["releasever"])
        self.write("  dnf ver: %s\n" % dnf.const.VERSION)
        self.write("  enabled plugins: %s\n" % plugins)
        self.write("  global excludes: %s\n" %
                   ",".join(self.base.conf.excludepkgs))
        return

    def dump_rpm_problems(self):
        self.write("%%%%RPMDB PROBLEMS\n")
        (missing, conflicts) = rpm_problems(self.base)
        self.write("".join([
            "Package %s requires %s\n" % (ucd(pkg), ucd(req))
            for (req, pkg) in missing
        ]))
        self.write("".join([
            "Package %s conflicts with %s\n" % (ucd(pkg), ucd(conf))
            for (conf, pkg) in conflicts
        ]))

    def dump_packages(self, load_repos):
        q = self.base.sack.query()
        # packages from rpmdb
        self.write("%%%%RPMDB\n")
        for p in sorted(q.installed()):
            self.write("  %s\n" % pkgspec(p))

        if not load_repos:
            return

        self.write("%%%%REPOS\n")
        available = q.available()
        for repo in sorted(self.base.repos.iter_enabled(), key=lambda x: x.id):
            try:
                url = None
                if repo.metalink is not None:
                    url = repo.metalink
                elif repo.mirrorlist is not None:
                    url = repo.mirrorlist
                elif len(repo.baseurl) > 0:
                    url = repo.baseurl[0]
                self.write("%%%s - %s\n" % (repo.id, url))
                self.write("  excludes: %s\n" % ",".join(repo.excludepkgs))
                for po in sorted(available.filter(reponame=repo.id)):
                    self.write("  %s\n" % pkgspec(po))

            except dnf.exceptions.Error as e:
                self.write("Error accessing repo %s: %s\n" % (repo, str(e)))
                continue
        return

    def dump_rpmdb_versions(self):
        self.write("%%%%RPMDB VERSIONS\n")
        version = self.base.sack._rpmdb_version()
        self.write("  all: %s\n" % version)
        return
示例#34
0
#

from __future__ import absolute_import
from __future__ import unicode_literals
from dnfpluginscore import _, logger

import dnf
import dnf.cli
import dnf.exceptions
import fnmatch
import hawkey
import os
import tempfile
import time

NOT_READABLE = _('Unable to read version lock configuration: %s')
NO_LOCKLIST = _('Locklist not set')
ADDING_SPEC = _('Adding versionlock on:')
EXCLUDING_SPEC = _('Adding exclude on:')
DELETING_SPEC = _('Deleting versionlock for:')
NOTFOUND_SPEC = _('No package found for:')
NO_VERSIONLOCK = _('Excludes from versionlock plugin were not applied')
APPLY_LOCK = _(
    'Versionlock plugin: number of lock rules from file "{}" applied: {}')
APPLY_EXCLUDE = _(
    'Versionlock plugin: number of exclude rules from file "{}" applied: {}')
NEVRA_ERROR = _('Versionlock plugin: could not parse pattern:')

locklist_fn = None

from subscription_manager import injection as inj
from subscription_manager.repolib import RepoActionInvoker
from subscription_manager.hwprobe import ClassicCheck
from subscription_manager.utils import chroot
from subscription_manager.injectioninit import init_dep_injection
from subscription_manager import logutil
from rhsm import connection
from rhsm import config

from dnfpluginscore import _, logger
import dnf

expired_warning = _("""
*** WARNING ***
The subscription for following product(s) has expired:
%s
You no longer have access to the repositories that provide these products.  It is important that you apply an active subscription in order to resume access to security and other critical updates. If you don't have other active subscriptions, you can renew the expired subscription.  """
                    )

not_registered_warning = _(
    "This system is not registered to Red Hat Subscription Management. You can use subscription-manager to register."
)

no_subs_warning = _(
    "This system is registered to Red Hat Subscription Management, but is not receiving updates. You can use subscription-manager to assign subscriptions."
)


class SubscriptionManager(dnf.Plugin):
    name = 'subscription-manager'
示例#36
0
class VersionLockCommand(dnf.cli.Command):

    aliases = ("versionlock", )
    summary = _("control package version locks")
    usage = "[add|exclude|list|delete|clear] [<package-nevr-spec>]"

    @staticmethod
    def set_argparser(parser):
        parser.add_argument("subcommand",
                            nargs='?',
                            metavar="[add|exclude|list|delete|clear]")
        parser.add_argument("package",
                            nargs='*',
                            metavar="[<package-nevr-spec>]")

    def configure(self):
        self.cli.demands.sack_activation = True
        self.cli.demands.available_repos = True

    def run(self):
        cmd = 'list'
        if self.opts.subcommand:
            if self.opts.subcommand not in ALL_CMDS:
                cmd = 'add'
                self.opts.package.insert(0, self.opts.subcommand)
            elif self.opts.subcommand in EXC_CMDS:
                cmd = 'exclude'
            elif self.opts.subcommand in DEL_CMDS:
                cmd = 'delete'
            else:
                cmd = self.opts.subcommand

        if cmd == 'add':
            _write_locklist(self.base, self.opts.package, True,
                            "\n# Added locks on %s\n" % time.ctime(),
                            ADDING_SPEC, '')
        elif cmd == 'exclude':
            _write_locklist(self.base, self.opts.package, False,
                            "\n# Added exclude on %s\n" % time.ctime(),
                            EXCLUDING_SPEC, '!')
        elif cmd == 'list':
            for pat in _read_locklist():
                logger.info(pat)
        elif cmd == 'clear':
            with open(locklist_fn, 'w') as f:
                # open in write mode truncates file
                pass
        elif cmd == 'delete':
            dirname = os.path.dirname(locklist_fn)
            (out, tmpfilename) = tempfile.mkstemp(dir=dirname, suffix='.tmp')
            locked_specs = _read_locklist()
            count = 0
            with os.fdopen(out, 'w', -1) as out:
                for ent in locked_specs:
                    if _match(ent, self.opts.package):
                        logger.info("%s %s", DELETING_SPEC, ent)
                        count += 1
                        continue
                    out.write(ent)
                    out.write('\n')
            if not count:
                os.unlink(tmpfilename)
            else:
                os.chmod(tmpfilename, 0o644)
                os.rename(tmpfilename, locklist_fn)
示例#37
0
    def modify_repo(self):
        """ process --set-enabled, --set-disabled and --setopt options """

        matching_repos = []  # list of matched repositories
        not_matching_repos_id = set()  # IDs of not matched repositories

        def match_repos(key, add_matching_repos):
            matching = self.base.repos.get_matching(key)
            if not matching:
                not_matching_repos_id.add(name)
            elif add_matching_repos:
                matching_repos.extend(matching)

        if self.opts.crepo:
            for name in self.opts.crepo:
                match_repos(name, True)
            if hasattr(self.opts, 'repo_setopts'):
                for name in self.opts.repo_setopts.keys():
                    match_repos(name, False)
        else:
            if hasattr(self.opts, 'repo_setopts'):
                for name in self.opts.repo_setopts.keys():
                    match_repos(name, True)

        if not_matching_repos_id:
            raise dnf.exceptions.Error(
                _("No matching repo to modify: %s.") %
                ', '.join(not_matching_repos_id))

        sbc = self.base.conf
        modify = {}
        if hasattr(self.opts, 'main_setopts') and self.opts.main_setopts:
            modify = self.opts.main_setopts
        if self.opts.dump_variables:
            for name, val in self.base.conf.substitutions.items():
                print("%s = %s" % (name, val))
        if not self.opts.crepo or 'main' in self.opts.crepo:
            if self.opts.save and modify:
                # modify [main] in global configuration file
                self.base.conf.write_raw_configfile(
                    self.base.conf.config_file_path, 'main', sbc.substitutions,
                    modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('main'))
                print(self.base.conf.dump())

        if not matching_repos:
            return

        if self.opts.set_enabled or self.opts.set_disabled:
            self.opts.save = True

        for repo in sorted(matching_repos):
            repo_modify = {}
            if self.opts.set_enabled:
                repo_modify['enabled'] = "1"
            elif self.opts.set_disabled:
                repo_modify['enabled'] = "0"
            if hasattr(self.opts, 'repo_setopts'):
                for repoid, setopts in self.opts.repo_setopts.items():
                    if fnmatch.fnmatch(repo.id, repoid):
                        repo_modify.update(setopts)
            if self.opts.save and repo_modify:
                self.base.conf.write_raw_configfile(repo.repofile, repo.id,
                                                    sbc.substitutions,
                                                    repo_modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('repo: ' + repo.id))
                print(repo.dump())
示例#38
0
class ConfigManagerCommand(dnf.cli.Command):

    aliases = ['config-manager']
    summary = _('manage {prog} configuration options and repositories').format(
        prog=dnf.util.MAIN_PROG)

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('crepo',
                            nargs='*',
                            metavar='repo',
                            help=_('repo to modify'))
        parser.add_argument(
            '--save',
            default=False,
            action='store_true',
            help=_('save the current options (useful with --setopt)'))
        parser.add_argument(
            '--add-repo',
            default=[],
            action='append',
            metavar='URL',
            help=_('add (and enable) the repo from the specified file or url'))
        parser.add_argument(
            '--dump',
            default=False,
            action='store_true',
            help=_('print current configuration values to stdout'))
        parser.add_argument('--dump-variables',
                            default=False,
                            action='store_true',
                            help=_('print variable values to stdout'))
        enable_group = parser.add_mutually_exclusive_group()
        enable_group.add_argument("--set-enabled",
                                  default=False,
                                  dest="set_enabled",
                                  action="store_true",
                                  help=_("enable repos (automatically saves)"))
        enable_group.add_argument(
            "--set-disabled",
            default=False,
            dest="set_disabled",
            action="store_true",
            help=_("disable repos (automatically saves)"))

    def configure(self):
        # setup sack and populate it with enabled repos
        demands = self.cli.demands
        demands.available_repos = True

        # if no argument was passed then error
        if (not (self.opts.add_repo != [] or self.opts.save or self.opts.dump
                 or self.opts.dump_variables or self.opts.set_disabled
                 or self.opts.set_enabled)):
            self.cli.optparser.error(
                _("one of the following arguments is required: {}").format(
                    ' '.join([
                        "--save", "--add-repo", "--dump", "--dump-variables",
                        "--set-enabled", "--enable", "--set-disabled",
                        "--disable"
                    ])))

        # warn with hint if --enablerepo or --disablerepo argument was passed
        if self.opts.repos_ed != []:
            logger.warning(
                _("Warning: --enablerepo/--disablerepo arguments have no meaning"
                  "with config manager. Use --set-enabled/--set-disabled instead."
                  ))

        if (self.opts.save or self.opts.set_enabled or self.opts.set_disabled
                or self.opts.add_repo):
            demands.root_user = True

    def run(self):
        """Execute the util action here."""
        if self.opts.add_repo:
            self.add_repo()
        else:
            self.modify_repo()

    def modify_repo(self):
        """ process --set-enabled, --set-disabled and --setopt options """

        matching_repos = []  # list of matched repositories
        not_matching_repos_id = set()  # IDs of not matched repositories

        def match_repos(key, add_matching_repos):
            matching = self.base.repos.get_matching(key)
            if not matching:
                not_matching_repos_id.add(name)
            elif add_matching_repos:
                matching_repos.extend(matching)

        if self.opts.crepo:
            for name in self.opts.crepo:
                match_repos(name, True)
            if hasattr(self.opts, 'repo_setopts'):
                for name in self.opts.repo_setopts.keys():
                    match_repos(name, False)
        else:
            if hasattr(self.opts, 'repo_setopts'):
                for name in self.opts.repo_setopts.keys():
                    match_repos(name, True)

        if not_matching_repos_id:
            raise dnf.exceptions.Error(
                _("No matching repo to modify: %s.") %
                ', '.join(not_matching_repos_id))

        sbc = self.base.conf
        modify = {}
        if hasattr(self.opts, 'main_setopts') and self.opts.main_setopts:
            modify = self.opts.main_setopts
        if self.opts.dump_variables:
            for name, val in self.base.conf.substitutions.items():
                print("%s = %s" % (name, val))
        if not self.opts.crepo or 'main' in self.opts.crepo:
            if self.opts.save and modify:
                # modify [main] in global configuration file
                self.base.conf.write_raw_configfile(
                    self.base.conf.config_file_path, 'main', sbc.substitutions,
                    modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('main'))
                print(self.base.conf.dump())

        if not matching_repos:
            return

        if self.opts.set_enabled or self.opts.set_disabled:
            self.opts.save = True

        for repo in sorted(matching_repos):
            repo_modify = {}
            if self.opts.set_enabled:
                repo_modify['enabled'] = "1"
            elif self.opts.set_disabled:
                repo_modify['enabled'] = "0"
            if hasattr(self.opts, 'repo_setopts'):
                for repoid, setopts in self.opts.repo_setopts.items():
                    if fnmatch.fnmatch(repo.id, repoid):
                        repo_modify.update(setopts)
            if self.opts.save and repo_modify:
                self.base.conf.write_raw_configfile(repo.repofile, repo.id,
                                                    sbc.substitutions,
                                                    repo_modify)
            if self.opts.dump:
                print(self.base.output.fmtSection('repo: ' + repo.id))
                print(repo.dump())

    def add_repo(self):
        """ process --add-repo option """

        # Get the reposdir location
        myrepodir = self.base.conf.get_reposdir
        errors_count = 0

        for url in self.opts.add_repo:
            if dnf.pycomp.urlparse.urlparse(url).scheme == '':
                url = 'file://' + os.path.abspath(url)
            logger.info(_('Adding repo from: %s'), url)
            if url.endswith('.repo'):
                # .repo file - download, put into reposdir and enable it
                destname = os.path.basename(url)
                destname = os.path.join(myrepodir, destname)
                try:
                    f = self.base.urlopen(url, mode='w+')
                    shutil.copy2(f.name, destname)
                    os.chmod(destname, 0o644)
                    f.close()
                except IOError as e:
                    errors_count += 1
                    logger.error(e)
                    continue
            else:
                # just url to repo, create .repo file on our own
                repoid = sanitize_url_to_fs(url)
                reponame = 'created by {} config-manager from {}'.format(
                    dnf.util.MAIN_PROG, url)
                destname = os.path.join(myrepodir, "%s.repo" % repoid)
                content = "[%s]\nname=%s\nbaseurl=%s\nenabled=1\n" % \
                                                (repoid, reponame, url)
                if not save_to_file(destname, content):
                    continue
        if errors_count:
            raise dnf.exceptions.Error(
                P_("Configuration of repo failed",
                   "Configuration of repos failed", errors_count))
示例#39
0
 def getcomps(self, repo):
     comps_fn = repo._repo.getCompsFn()
     if comps_fn:
         dest = os.path.join(self.metadata_target(repo), 'comps.xml')
         dnf.yum.misc.decompress(comps_fn, dest=dest)
         logger.info(_("comps.xml for repository %s saved"), repo.id)
示例#40
0
class RepoSyncCommand(dnf.cli.Command):
    aliases = ('reposync', )
    summary = _('download all packages from remote repo')

    def __init__(self, cli):
        super(RepoSyncCommand, self).__init__(cli)

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('-a',
                            '--arch',
                            dest='arches',
                            default=[],
                            action=OptionParser._SplitCallback,
                            metavar='[arch]',
                            help=_('download only packages for this ARCH'))
        parser.add_argument(
            '--delete',
            default=False,
            action='store_true',
            help=_('delete local packages no longer present in repository'))
        parser.add_argument('-m',
                            '--downloadcomps',
                            default=False,
                            action='store_true',
                            help=_('also download comps.xml'))
        parser.add_argument('--download-metadata',
                            default=False,
                            action='store_true',
                            help=_('download all the metadata.'))
        parser.add_argument('-n',
                            '--newest-only',
                            default=False,
                            action='store_true',
                            help=_('download only newest packages per-repo'))
        parser.add_argument('-p',
                            '--download-path',
                            default='./',
                            help=_('where to store downloaded repositories'))
        parser.add_argument(
            '--metadata-path',
            help=_('where to store downloaded repository metadata. '
                   'Defaults to the value of --download-path.'))
        parser.add_argument('--source',
                            default=False,
                            action='store_true',
                            help=_('operate on source packages'))

    def configure(self):
        demands = self.cli.demands
        demands.available_repos = True
        demands.sack_activation = True

        repos = self.base.repos

        if self.opts.repo:
            repos.all().disable()
            for repoid in self.opts.repo:
                try:
                    repo = repos[repoid]
                except KeyError:
                    raise dnf.cli.CliError("Unknown repo: '%s'." % repoid)
                repo.enable()

        if self.opts.source:
            repos.enable_source_repos()

        for repo in repos.iter_enabled():
            repo._repo.expire()
            repo.deltarpm = False

    def run(self):
        self.base.conf.keepcache = True
        for repo in self.base.repos.iter_enabled():
            if self.opts.download_metadata:
                self.download_metadata(repo)
            if self.opts.downloadcomps:
                self.getcomps(repo)
            self.download_packages(repo)

    def repo_target(self, repo):
        return _pkgdir(self.opts.destdir or self.opts.download_path, repo.id)

    def metadata_target(self, repo):
        if self.opts.metadata_path:
            return _pkgdir(self.opts.metadata_path, repo.id)
        else:
            return self.repo_target(repo)

    def pkg_download_path(self, pkg):
        repo_target = self.repo_target(pkg.repo)
        pkg_download_path = os.path.realpath(
            os.path.join(repo_target, pkg.location))
        # join() ensures repo_target ends with a path separator (otherwise the
        # check would pass if pkg_download_path was a "sibling" path component
        # of repo_target that has the same prefix).
        if not pkg_download_path.startswith(os.path.join(repo_target, '')):
            raise dnf.exceptions.Error(
                _("Download target '{}' is outside of download path '{}'.").
                format(pkg_download_path, repo_target))
        return pkg_download_path

    def delete_old_local_packages(self, packages_to_download):
        download_map = dict()
        for pkg in packages_to_download:
            download_map[(pkg.repo.id, os.path.basename(pkg.location))] = 1
        # delete any *.rpm file, that is not going to be downloaded from repository
        for repo in self.base.repos.iter_enabled():
            repo_target = self.repo_target(repo)
            if os.path.exists(repo_target):
                for filename in os.listdir(repo_target):
                    path = os.path.join(repo_target, filename)
                    if filename.endswith('.rpm') and os.path.isfile(path):
                        if not (repo.id, filename) in download_map:
                            try:
                                os.unlink(path)
                                logger.info(_("[DELETED] %s"), path)
                            except OSError:
                                logger.error(_("failed to delete file %s"),
                                             path)

    def getcomps(self, repo):
        comps_fn = repo._repo.getCompsFn()
        if comps_fn:
            dest = os.path.join(self.metadata_target(repo), 'comps.xml')
            dnf.yum.misc.decompress(comps_fn, dest=dest)
            logger.info(_("comps.xml for repository %s saved"), repo.id)

    def download_metadata(self, repo):
        repo_target = self.metadata_target(repo)
        repo._repo.downloadMetadata(repo_target)
        return True

    def get_pkglist(self, repo):
        query = self.base.sack.query().available().filterm(reponame=repo.id)
        if self.opts.newest_only:
            query = query.latest()
        if self.opts.source:
            query.filterm(arch='src')
        elif self.opts.arches:
            query.filterm(arch=self.opts.arches)
        return query

    def download_packages(self, repo):
        base = self.base
        pkglist = self.get_pkglist(repo)

        remote_pkgs, local_repository_pkgs = base._select_remote_pkgs(pkglist)
        if remote_pkgs:
            progress = base.output.progress
            if progress is None:
                progress = dnf.callback.NullDownloadProgress()
            drpm = dnf.drpm.DeltaInfo(base.sack.query().installed(), progress,
                                      0)
            payloads = [
                RPMPayloadLocation(pkg, progress, self.pkg_download_path(pkg))
                for pkg in remote_pkgs
            ]
            base._download_remote_payloads(payloads, drpm, progress, None)
        if local_repository_pkgs:
            for pkg in local_repository_pkgs:
                pkg_path = os.path.join(pkg.repo.pkgdir,
                                        pkg.location.lstrip("/"))
                target_dir = os.path.dirname(self.pkg_download_path(pkg))
                dnf.util.ensure_dir(target_dir)
                shutil.copy(pkg_path, target_dir)
        if self.opts.delete:
            self.delete_old_local_packages(pkglist)
示例#41
0
class DownloadCommand(dnf.cli.Command):

    aliases = ['download']
    summary = _('Download package to current directory')

    def __init__(self, cli):
        super(DownloadCommand, self).__init__(cli)
        self.opts = None
        self.parser = None

    @staticmethod
    def set_argparser(parser):
        parser.add_argument('packages',
                            nargs='+',
                            help=_('packages to download'))
        parser.add_argument("--source",
                            action='store_true',
                            help=_('download the src.rpm instead'))
        parser.add_argument("--debuginfo",
                            action='store_true',
                            help=_('download the -debuginfo package instead'))
        parser.add_argument(
            "--debugsource",
            action='store_true',
            help=_('download the -debugsource package instead'))
        parser.add_argument(
            "--arch",
            '--archlist',
            dest='arches',
            default=[],
            action=OptionParser._SplitCallback,
            metavar='[arch]',
            help=_("limit  the  query to packages of given architectures."))
        parser.add_argument('--resolve',
                            action='store_true',
                            help=_('resolve and download needed dependencies'))
        parser.add_argument(
            '--alldeps',
            action='store_true',
            help=_('when running with --resolve, download all dependencies '
                   '(do not exclude already installed ones)'))
        parser.add_argument('--url',
                            '--urls',
                            action='store_true',
                            dest='url',
                            help=_('print list of urls where the rpms '
                                   'can be downloaded instead of downloading'))
        parser.add_argument('--urlprotocols',
                            action='append',
                            choices=['http', 'https', 'rsync', 'ftp'],
                            default=[],
                            help=_('when running with --url, '
                                   'limit to specific protocols'))

    def configure(self):
        # setup sack and populate it with enabled repos
        demands = self.cli.demands
        demands.sack_activation = True
        demands.available_repos = True
        if self.opts.resolve and self.opts.alldeps:
            demands.load_system_repo = False

        if self.opts.source:
            self.base.repos.enable_source_repos()

        if self.opts.debuginfo or self.opts.debugsource:
            self.base.repos.enable_debug_repos()

        if self.opts.destdir:
            self.base.conf.destdir = self.opts.destdir
        else:
            self.base.conf.destdir = dnf.i18n.ucd(os.getcwd())

    def run(self):
        """Execute the util action here."""

        if (not self.opts.source and not self.opts.debuginfo
                and not self.opts.debugsource):
            pkgs = self._get_pkg_objs_rpms(self.opts.packages)
        else:
            pkgs = []
            if self.opts.source:
                pkgs.extend(self._get_pkg_objs_source(self.opts.packages))

            if self.opts.debuginfo:
                pkgs.extend(self._get_pkg_objs_debuginfo(self.opts.packages))

            if self.opts.debugsource:
                pkgs.extend(self._get_pkg_objs_debugsource(self.opts.packages))

        # If user asked for just urls then print them and we're done
        if self.opts.url:
            for pkg in pkgs:
                # command line repo packages do not have .remote_location
                if pkg.repoid != hawkey.CMDLINE_REPO_NAME:
                    url = pkg.remote_location(schemes=self.opts.urlprotocols)
                    if url:
                        print(url)
                    else:
                        msg = _(
                            "Failed to get mirror for package: %s") % pkg.name
                        if self.base.conf.strict:
                            raise dnf.exceptions.Error(msg)
                        logger.warning(msg)
            return
        else:
            self._do_downloads(pkgs)  # download rpms

    def _do_downloads(self, pkgs):
        """
        Perform the download for a list of packages
        """
        pkg_dict = {}
        for pkg in pkgs:
            pkg_dict.setdefault(str(pkg), []).append(pkg)

        to_download = []
        cmdline = []
        for pkg_list in pkg_dict.values():
            pkgs_cmdline = [
                pkg for pkg in pkg_list
                if pkg.repoid == hawkey.CMDLINE_REPO_NAME
            ]
            if pkgs_cmdline:
                cmdline.append(pkgs_cmdline[0])
                continue
            pkg_list.sort(key=lambda x: (x.repo.priority, x.repo.cost))
            to_download.append(pkg_list[0])
        if to_download:
            self.base.download_packages(to_download, self.base.output.progress)
        if cmdline:
            # command line repo packages are either local files or already downloaded urls
            # just copy them to the destination
            for pkg in cmdline:
                # python<3.4 shutil module does not raise SameFileError, check manually
                src = pkg.localPkg()
                dst = os.path.join(self.base.conf.destdir,
                                   os.path.basename(src))
                if os.path.exists(dst) and os.path.samefile(src, dst):
                    continue
                shutil.copy(src, self.base.conf.destdir)
        locations = sorted([pkg.localPkg() for pkg in to_download + cmdline])
        return locations

    def _get_pkg_objs_rpms(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the rpms
        to download.
        """
        if self.opts.resolve:
            pkgs = self._get_packages_with_deps(pkg_specs)
        else:
            pkgs = self._get_packages(pkg_specs)
        return pkgs

    def _get_pkg_objs_source(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the source
        rpms to download.
        """
        pkgs = self._get_pkg_objs_rpms(pkg_specs)
        source_pkgs = self._get_source_packages(pkgs)
        pkgs = set(self._get_packages(source_pkgs, source=True))
        return pkgs

    def _get_pkg_objs_debuginfo(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the debuginfo
        rpms to download.
        """
        dbg_pkgs = set()
        q = self.base.sack.query().available()

        for pkg in self._get_packages(pkg_specs):
            for dbg_name in [pkg.debug_name, pkg.source_debug_name]:
                dbg_available = q.filter(name=dbg_name,
                                         epoch=int(pkg.epoch),
                                         version=pkg.version,
                                         release=pkg.release,
                                         arch=pkg.arch)

                if not dbg_available:
                    continue

                for p in dbg_available:
                    dbg_pkgs.add(p)

                break

        return dbg_pkgs

    def _get_pkg_objs_debugsource(self, pkg_specs):
        """
        Return a list of dnf.Package objects that represent the debugsource
        rpms to download.
        """
        dbg_pkgs = set()
        q = self.base.sack.query().available()

        for pkg in self._get_packages(pkg_specs):
            dbg_available = q.filter(name=pkg.debugsource_name,
                                     epoch=int(pkg.epoch),
                                     version=pkg.version,
                                     release=pkg.release,
                                     arch=pkg.arch)

            for p in dbg_available:
                dbg_pkgs.add(p)

        return dbg_pkgs

    def _get_packages(self, pkg_specs, source=False):
        """Get packages matching pkg_specs."""
        func = self._get_query_source if source else self._get_query
        queries = []
        for pkg_spec in pkg_specs:
            try:
                queries.append(func(pkg_spec))
            except dnf.exceptions.PackageNotFoundError as e:
                logger.error(dnf.i18n.ucd(e))
                if self.base.conf.strict:
                    logger.error(_("Exiting due to strict setting."))
                    raise dnf.exceptions.Error(e)

        pkgs = list(itertools.chain(*queries))
        return pkgs

    def _get_packages_with_deps(self, pkg_specs, source=False):
        """Get packages matching pkg_specs and the deps."""
        pkgs = self._get_packages(pkg_specs)
        goal = hawkey.Goal(self.base.sack)
        for pkg in pkgs:
            goal.install(pkg)
        rc = goal.run()
        if rc:
            new_pkgs = goal.list_installs() + goal.list_upgrades()
            for pkg in pkgs:
                if pkg not in new_pkgs:
                    new_pkgs += [pkg]
            return new_pkgs
        else:
            msg = [_('Error in resolve of packages:')]
            logger.warning("\n    ".join(msg + [str(pkg) for pkg in pkgs]))
            logger.warning(
                dnf.util._format_resolve_problems(goal.problem_rules()))
            return []

    @staticmethod
    def _get_source_packages(pkgs):
        """Get list of source rpm names for a list of packages."""
        source_pkgs = set()
        for pkg in pkgs:
            if pkg.sourcerpm:
                source_pkgs.add(pkg.sourcerpm)
                logger.debug('  --> Package : %s Source : %s', str(pkg),
                             pkg.sourcerpm)
            elif pkg.arch == 'src':
                source_pkgs.add("%s-%s.src.rpm" % (pkg.name, pkg.evr))
            else:
                logger.info(_("No source rpm defined for %s"), str(pkg))
        return list(source_pkgs)

    def _get_query(self, pkg_spec):
        """Return a query to match a pkg_spec."""
        schemes = dnf.pycomp.urlparse.urlparse(pkg_spec)[0]
        is_url = schemes and schemes in ('http', 'ftp', 'file', 'https')
        if is_url or (pkg_spec.endswith('.rpm') and os.path.isfile(pkg_spec)):
            pkgs = self.base.add_remote_rpms(
                [pkg_spec], progress=self.base.output.progress)
            return self.base.sack.query().filterm(pkg=pkgs)
        subj = dnf.subject.Subject(pkg_spec)
        q = subj.get_best_query(self.base.sack, with_src=self.opts.source)
        q = q.available()
        q = q.latest()
        if self.opts.arches:
            q = q.filter(arch=self.opts.arches)
        if len(q.run()) == 0:
            msg = _("No package %s available.") % (pkg_spec)
            raise dnf.exceptions.PackageNotFoundError(msg)
        return q

    def _get_query_source(self, pkg_spec):
        """Return a query to match a source rpm file name."""
        pkg_spec = pkg_spec[:-4]  # skip the .rpm
        subj = dnf.subject.Subject(pkg_spec)
        for nevra_obj in subj.get_nevra_possibilities():
            tmp_query = nevra_obj.to_query(self.base.sack).available()
            if tmp_query:
                return tmp_query.latest()

        msg = _("No package %s available.") % (pkg_spec)
        raise dnf.exceptions.PackageNotFoundError(msg)
示例#42
0
 def macro_def(arg):
     arglist = arg.split(None, 1) if arg else []
     if len(arglist) < 2:
         msg = _("'%s' is not of the format 'MACRO EXPR'") % arg
         raise dnfpluginscore.argparse.ArgumentTypeError(msg)
     return arglist
示例#43
0
def _enable_sub_repos(repos, sub_name_fn):
    for repo in repos.iter_enabled():
        for found in repos.get_matching(sub_name_fn(repo.id)):
            if not found.enabled:
                logger.info(_('enabling %s repository'), found.id)
                found.enable()
示例#44
0
class Py3QueryCommand(dnf.cli.Command):
    """The util command there is extending the dnf command line."""
    aliases = ('py3query', )
    summary = _('query the python3 porting status')
    usage = _('[OPTIONS] [KEYWORDS]')

    def configure(self):
        """Setup the demands."""
        demands = self.cli.demands
        demands.sack_activation = True
        demands.available_repos = True

    @staticmethod
    def set_argparser(parser):
        """Parse command line arguments."""
        parser.add_argument('--output',
                            '-o',
                            metavar='FILE',
                            action='store',
                            help=_('write output to the given file'))

        parser.add_argument('--no-bz',
                            dest='fetch_bugzilla',
                            action='store_false',
                            default=True,
                            help=_("Don't get Bugzilla links"))

        parser.add_argument('--qrepo',
                            dest='py3query_repo',
                            action='append',
                            help=_("Repo(s) to use for the query"))

        parser.add_argument('--repo-groups',
                            dest='repo_groups_file',
                            default=None,
                            metavar='FILE',
                            action='store',
                            help=_("Optional filename of a 'groups.json' file "
                                   "that will record which package comes from "
                                   "which repositories"))

    def run(self):
        reponames = self.opts.py3query_repo
        if not reponames:
            reponames = ['rawhide']
        self.base_query = self.base.sack.query()
        self.pkg_query = self.base_query.filter(reponame=list(reponames))
        source_reponames = [n + '-source' for n in reponames]
        self.src_query = self.base_query.filter(
            reponame=source_reponames).filter(arch=['src'])

        # python_versions: {package: set of Python versions}
        python_versions = collections.defaultdict(set)
        # rpm_pydeps: {package: set of dep names}
        rpm_pydeps = collections.defaultdict(set)
        # dep_versions: {dep name: Python version}
        dep_versions = collections.defaultdict(set)
        for n, seeds in SEED_PACKAGES.items():
            provides = sorted(self.all_provides(reponames, seeds), key=str)

            # This effectively includes packages that still need
            # Python 3.4 while Rawhide only provides Python 3.5
            provides += sorted(seeds)

            for dep in progressbar(provides,
                                   'Getting py{} requires'.format(n)):
                dep_versions[str(dep)] = n
                for pkg in self.whatrequires(dep, self.pkg_query):
                    python_versions[pkg].add(n)
                    rpm_pydeps[pkg].add(str(dep))

        # Add packages with 'python?' as a component of their name, if they
        # haven't been added as dependencies
        for name, version in {
                'python': 0,
                'python2': 2,
                'python3': 3,
        }.items():
            for pattern in '{}-*', '*-{}', '*-{}-*':
                name_glob = pattern.format(name)
                query = self.pkg_query.filter(name__glob=name_glob)
                message = 'Getting {} packages'.format(name_glob)
                for pkg in progressbar(query, message):
                    if pkg.sourcerpm.startswith('mingw-'):
                        # Ignore mingw packages
                        continue
                    if pkg not in python_versions:
                        python_versions[pkg].add(version)

        # srpm_names: {package: srpm name}
        # by_srpm_name: {srpm name: set of packages}
        srpm_names = {}
        by_srpm_name = collections.defaultdict(set)
        # repo_srpms: {repo name: set of srpm names}
        repo_srpms = {}
        for pkg in progressbar(python_versions.keys(), 'Getting SRPMs'):
            srpm_name = get_srpm_name(pkg)
            srpm_names[pkg] = srpm_name
            by_srpm_name[srpm_name].add(pkg)
            repo_srpms.setdefault(pkg.reponame, set()).add(srpm_name)

        # deps_of_pkg: {package: set of packages}
        deps_of_pkg = collections.defaultdict(set)
        # build_deps_of_srpm: {srpm: set of packages}
        build_deps_of_srpm = collections.defaultdict(set)
        # requirers_of_pkg: {package: set of srpm}
        requirers_of_pkg = collections.defaultdict(set)
        # build_requirers_of_pkg: {pkg: set of srpm}
        build_requirers_of_pkg = collections.defaultdict(set)
        # all_provides: {provide_name: package}
        all_provides = {
            str(r).split()[0]: p
            for p in python_versions for r in p.provides
            if not str(r).startswith(PROVIDES_BLACKLIST)
        }
        for pkg in progressbar(sorted(python_versions.keys()),
                               'Getting requirements'):
            if python_versions[pkg] == {3}:
                continue
            reqs = set()
            build_reqs = set()
            for provide in pkg.provides:
                reqs.update(self.whatrequires(provide, self.pkg_query))
                build_reqs.update(self.whatrequires(provide, self.src_query))

            for req in reqs:
                if req in python_versions.keys():
                    deps_of_pkg[req].add(pkg)
                # Both Python and non-Python packages here.
                requirers_of_pkg[pkg].add(req)

            for req in build_reqs:
                if req.name in by_srpm_name.keys():
                    build_deps_of_srpm[req.name].add(pkg)
                # Both Python and non-Python packages here.
                build_requirers_of_pkg[pkg].add(req)

        # unversioned_requirers: {srpm_name: set of srpm_names}
        unversioned_requirers = collections.defaultdict(set)
        for pkg in progressbar(
                set.union(*requirers_of_pkg.values(),
                          *build_requirers_of_pkg.values()),
                'Processing packages with ambiguous dependencies'):
            # Ignore packages that are:
            if (python_versions.get(pkg) == {3} or  # Python 3 only
                    pkg.name.endswith('-doc')):  # Documentation
                continue
            for require in (pkg.requires + pkg.requires_pre + pkg.recommends +
                            pkg.suggests + pkg.supplements + pkg.enhances):
                require = str(require).split()[0]
                requirement = all_provides.get(require)
                if (is_unversioned(require) and requirement
                        and not (require.endswith('-doc')
                                 or python_versions.get(requirement) == {3})
                        and require not in NAME_NOTS
                        and require != 'python-unversioned-command'):
                    requirement_srpm_name = get_srpm_name(requirement)
                    requirer_srpm_name = get_srpm_name(pkg)
                    unversioned_requirers[requirement_srpm_name].add(
                        requirer_srpm_name)

        # deps_of_pkg: {srpm name: info}
        json_output = dict()
        for name in progressbar(by_srpm_name, 'Generating output'):
            pkgs = sorted(by_srpm_name[name])
            r = json_output[name] = {}
            r['rpms'] = {
                format_rpm_name(p): {
                    'py_deps':
                    {str(d): dep_versions[d]
                     for d in rpm_pydeps[p]},
                    'non_python_requirers': {
                        'build_time':
                        sorted(
                            get_srpm_names(build_requirers_of_pkg[p]) -
                            by_srpm_name.keys()),
                        'run_time':
                        sorted(
                            get_srpm_names(requirers_of_pkg[p]) -
                            by_srpm_name.keys()),
                    },
                    'almost_leaf': (
                        # is Python 2 and is not required by anything EXCEPT
                        # sibling subpackages
                        2 in python_versions[p] and
                        not get_srpm_names(build_requirers_of_pkg[p]
                                           | requirers_of_pkg[p]) - {name}),
                    'legacy_leaf': (
                        # is Python 2 and is not required by anything
                        2 in python_versions[p]
                        and not get_srpm_names(build_requirers_of_pkg[p]
                                               | requirers_of_pkg[p])),
                }
                for p in pkgs
            }
            set_status(r, pkgs, python_versions)

            r['deps'] = sorted(
                set(srpm_names[d] for p in pkgs
                    for d in deps_of_pkg.get(p, '') if srpm_names[d] != name))
            r['build_deps'] = sorted(
                set(srpm_names[d] for d in build_deps_of_srpm.get(name, '')
                    if srpm_names[d] != name))
            if unversioned_requirers.get(name):
                r['unversioned_requirers'] = sorted(
                    unversioned_requirers[name])

        # add Bugzilla links
        if self.opts.fetch_bugzilla:
            bar = iter(
                progressbar(['connecting', 'tracker', 'individual'],
                            'Getting bugs'))

            next(bar)
            bz = bugzilla.RHBugzilla(BUGZILLA_URL)

            next(bar)
            include_fields = [
                'id', 'depends_on', 'blocks', 'component', 'status',
                'resolution', 'last_change_time'
            ]
            trackers = bz.getbugs(TRACKER_BUG_IDS,
                                  include_fields=include_fields)
            all_ids = [b for t in trackers for b in t.depends_on]

            next(bar)
            bugs = bz.getbugs(all_ids, include_fields=include_fields)
            bar.close()

            def bug_namegetter(bug):
                return '{bug.id} {bug.status} {bug.component}'.format(bug=bug)

            rank = [
                'NEW', 'ASSIGNED', 'POST', 'MODIFIED', 'ON_QA', 'VERIFIED',
                'RELEASE_PENDING', 'CLOSED'
            ]

            def key(bug):
                return rank.index(bug.status), bug.last_change_time

            bugs = sorted(bugs, key=key)

            for bug in progressbar(bugs,
                                   'Merging bugs',
                                   namegetter=bug_namegetter):
                r = json_output.get(bug.component, {})
                if 'bug' in r.get('links', {}):
                    continue
                status = bug.status
                if bug.resolution:
                    status += ' ' + bug.resolution
                # Let's get the datetime of the last comment and convert to string
                last_change_datetime = time.strftime(
                    '%Y-%m-%d %H:%M:%S', bug.last_change_time.timetuple())
                r.setdefault('links', {})['bug'] = [
                    bug.weburl, status, last_change_datetime
                ]

                for tb in bug.blocks:
                    if tb in ADDITIONAL_TRACKER_BUGS:
                        r.setdefault('tracking_bugs',
                                     []).append(BUGZILLA_BUG_URL.format(tb))

                if (any(tb in bug.blocks for tb in MISPACKAGED_TRACKER_BUG_IDS)
                        and r.get('status') == 'idle'
                        and status not in NOTABUG_STATUSES):
                    r['status'] = "mispackaged"
                    r['note'] = ('There is a problem in Fedora packaging, '
                                 'not necessarily with the software itself. '
                                 'See the linked Fedora bug.')

        # Print out output

        if self.opts.output:
            with open(self.opts.output, 'w') as f:
                json.dump(json_output, f, indent=2, sort_keys=True)
        else:
            json.dump(json_output, sys.stdout, indent=2, sort_keys=True)
            sys.stdout.flush()

        # Write out a groups.json
        if self.opts.repo_groups_file:
            output = {
                repo_name: {
                    'name': repo_name,
                    'packages': sorted(srpm_names)
                }
                for repo_name, srpm_names in repo_srpms.items()
            }
            with open(self.opts.repo_groups_file, 'w') as f:
                json.dump(output, f, indent=2, sort_keys=True)

    def all_provides(self, reponames, seeds):
        pkgs = set()
        for seed in seeds:
            query = dnf.subject.Subject(seed, ignore_case=True).get_best_query(
                self.base.sack, with_provides=False)
            query = query.filter(reponame=list(reponames))
            pkgs.update(query.run())
        provides = set()
        for pkg in sorted(pkgs):
            provides.update(pkg.provides)
        return provides

    def whatrequires(self, dep, query):
        query = query.filter(requires=dep)
        return set(query)
示例#45
0
class BuildDepCommand(dnf.cli.Command):

    aliases = ('builddep',)
    msg = "Install build dependencies for package or spec file"
    summary = _(msg)
    usage = _("[PACKAGE|PACKAGE.spec]")

    def __init__(self, args):
        super(BuildDepCommand, self).__init__(args)
        self.rpm_ts = rpm.TransactionSet()
        self.opts = None

    def configure(self, args):
        demands = self.cli.demands
        demands.available_repos = True
        demands.resolving = True
        demands.root_user = True
        demands.sack_activation = True

        (self.opts, parser) = parse_arguments(args)

        if self.opts.help_cmd:
            print(parser.format_help())
            return

        # enable source repos only if needed
        if not (self.opts.spec or self.opts.srpm):
            for pkgspec in self.opts.packages:
                if not (pkgspec.endswith('.src.rpm')
                        or pkgspec.endswith('nosrc.rpm')
                        or pkgspec.endswith('.spec')):
                    dnfpluginscore.lib.enable_source_repos(self.base.repos)
                    break

    @sink_rpm_logging()
    def run(self, args):
        if self.opts.help_cmd:
            return

        # Push user-supplied macro definitions for spec parsing
        for macro in self.opts.define:
            rpm.addMacro(macro[0], macro[1])

        pkg_errors = False
        for pkgspec in self.opts.packages:
            try:
                if self.opts.srpm:
                    self._src_deps(pkgspec)
                elif self.opts.spec:
                    self._spec_deps(pkgspec)
                elif pkgspec.endswith('.src.rpm') or pkgspec.endswith('nosrc.rpm'):
                    self._src_deps(pkgspec)
                elif pkgspec.endswith('.spec'):
                    self._spec_deps(pkgspec)
                else:
                    self._remote_deps(pkgspec)
            except dnf.exceptions.Error as e:
                logger.error(e)
                pkg_errors = True

        # Pop user macros so they don't affect future rpm calls
        for macro in self.opts.define:
            rpm.delMacro(macro[0])

        if pkg_errors:
            raise dnf.exceptions.Error(_("Some packages could not be found."))

    @staticmethod
    def _rpm_dep2reldep_str(rpm_dep):
        return rpm_dep.DNEVR()[2:]

    def _install(self, reldep_str):
        try:
            self.base.install(reldep_str)
        except dnf.exceptions.MarkingError:
            msg = _("No matching package to install: '%s'")
            logger.warning(msg, reldep_str)
            return False
        return True

    def _src_deps(self, src_fn):
        fd = os.open(src_fn, os.O_RDONLY)
        if self.cli.nogpgcheck:
            self.rpm_ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
        try:
            h = self.rpm_ts.hdrFromFdno(fd)
        except rpm.error as e:
            if str(e) == 'public key not available':
                logger.error("Error: public key not available, add "
                             "'--nogpgcheck' option to ignore package sign")
                return
            elif str(e) == 'error reading package header':
                e = _("Failed to open: '%s', not a valid source rpm file.") % (
                      src_fn,)
            raise dnf.exceptions.Error(e)
        os.close(fd)
        ds = h.dsFromHeader('requirename')
        done = True
        for dep in ds:
            reldep_str = self._rpm_dep2reldep_str(dep)
            if reldep_str.startswith('rpmlib('):
                continue
            done &= self._install(reldep_str)

        if not done:
            err = _("Not all dependencies satisfied")
            raise dnf.exceptions.Error(err)

    def _spec_deps(self, spec_fn):
        try:
            spec = rpm.spec(spec_fn)
        except ValueError:
            msg = _("Failed to open: '%s', not a valid spec file.") % spec_fn
            raise dnf.exceptions.Error(msg)
        done = True
        for dep in rpm.ds(spec.sourceHeader, 'requires'):
            reldep_str = self._rpm_dep2reldep_str(dep)
            done &= self._install(reldep_str)

        if not done:
            err = _("Not all dependencies satisfied")
            raise dnf.exceptions.Error(err)

    def _remote_deps(self, package):
        available = dnf.subject.Subject(package).get_best_query(
                        self.base.sack).filter(arch__neq="src")
        sourcenames = list({dnfpluginscore.lib.package_source_name(pkg)
                           for pkg in available})
        pkgs = self.base.sack.query().available().filter(
                name=(sourcenames + [package]), arch="src").latest().run()
        if not pkgs:
            raise dnf.exceptions.Error(_('no package matched: %s') % package)
        self.base.download_packages(pkgs)
        for pkg in pkgs:
            self._src_deps(pkg.localPkg())