def __init__(self, base): self._system_cachedir = None self.demands = dnf.cli.demand.DemandSheet() #:cli self.logger = logging.getLogger("dnf") self.command = None self.base = base self.cli_commands = {} self.nogpgcheck = False self.register_command(dnf.cli.commands.install.InstallCommand) self.register_command(dnf.cli.commands.upgrade.UpgradeCommand) self.register_command(dnf.cli.commands.UpgradeToCommand) self.register_command(dnf.cli.commands.InfoCommand) self.register_command(dnf.cli.commands.ListCommand) self.register_command(dnf.cli.commands.EraseCommand) self.register_command(dnf.cli.commands.group.GroupCommand) self.register_command(dnf.cli.commands.MakeCacheCommand) self.register_command(dnf.cli.commands.CleanCommand) self.register_command(dnf.cli.commands.ProvidesCommand) self.register_command(dnf.cli.commands.CheckUpdateCommand) self.register_command(dnf.cli.commands.SearchCommand) self.register_command(dnf.cli.commands.RepoListCommand) self.register_command(dnf.cli.commands.RepoPkgsCommand) self.register_command(dnf.cli.commands.HelpCommand) self.register_command(dnf.cli.commands.reinstall.ReinstallCommand) self.register_command(dnf.cli.commands.downgrade.DowngradeCommand) self.register_command(dnf.cli.commands.HistoryCommand) self.register_command(dnf.cli.commands.distrosync.DistroSyncCommand)
def test_setup_verbose(self): logger = logging.getLogger("dnf") with support.patch_std_streams() as (stdout, stderr): self.logging.setup(logging.DEBUG, logging.WARNING, self.logdir) self._bench(logger) self.assertEqual("d\ni\n", stdout.getvalue()) self.assertEqual("w\ne\n", stderr.getvalue())
def test_setup(self): logger = logging.getLogger("dnf") with support.patch_std_streams() as (stdout, stderr): self.logging._setup(logging.INFO, logging.ERROR, self.logdir) self._bench(logger) self.assertEqual("i\n", stdout.getvalue()) self.assertEqual("e\n", stderr.getvalue())
def test_setup_verbose(self): logger = logging.getLogger("dnf") with support.patch_std_streams() as (stdout, stderr): self.logging._setup(logging.DEBUG, logging.WARNING, self.logdir) self._bench(logger) self.assertEqual("d\ni\n", stdout.getvalue()) self.assertEqual("w\ne\n", stderr.getvalue())
def test_setup(self): logger = logging.getLogger("dnf") with support.patch_std_streams() as (stdout, stderr): self.logging.setup(logging.INFO, logging.ERROR, self.logdir) self._bench(logger) self.assertEqual("i\n", stdout.getvalue()) self.assertEqual("e\n", stderr.getvalue())
def configure_dnf_logging(): """Configure the DNF logging.""" # Set up librepo. libdnf.repo.LibrepoLog.removeAllHandlers() libdnf.repo.LibrepoLog.addHandler(DNF_LIBREPO_LOG) # Set up DNF. Increase the log level to the custom DDEBUG level. dnf_logger = logging.getLogger(DNF_LOGGER) dnf_logger.setLevel(dnf.logging.DDEBUG)
def test_setup_only_once(self): logger = logging.getLogger("dnf") self.assertLength(logger.handlers, 0) self.logging.setup(dnf.logging.SUBDEBUG, dnf.logging.SUBDEBUG, self.logdir) cnt = len(logger.handlers) self.assertGreater(cnt, 0) self.logging.setup(dnf.logging.SUBDEBUG, dnf.logging.SUBDEBUG, self.logdir) # no new handlers self.assertEqual(cnt, len(logger.handlers))
def test_setup_only_once(self): logger = logging.getLogger("dnf") self.assertLength(logger.handlers, 0) self.logging._setup(dnf.logging.SUBDEBUG, dnf.logging.SUBDEBUG, self.logdir) cnt = len(logger.handlers) self.assertGreater(cnt, 0) self.logging._setup(dnf.logging.SUBDEBUG, dnf.logging.SUBDEBUG, self.logdir) # no new handlers self.assertEqual(cnt, len(logger.handlers))
def configure_dnf_logging(): """Configure the DNF logging.""" # Set up librepo. # This is still required even when the librepo has a separate logger because # DNF needs to have callbacks that the librepo log is written to be able to # process that log. libdnf.repo.LibrepoLog.removeAllHandlers() libdnf.repo.LibrepoLog.addHandler(DNF_LIBREPO_LOG) # Set up DNF. Increase the log level to the custom DDEBUG level. dnf_logger = logging.getLogger(DNF_LOGGER) dnf_logger.setLevel(dnf.logging.DDEBUG)
def test_file_logging(self): # log nothing to the console: self.logging.setup(dnf.logging.SUPERCRITICAL, dnf.logging.SUPERCRITICAL, self.logdir) logger = logging.getLogger("dnf") with support.patch_std_streams() as (stdout, stderr): logger.info("i") logger.critical("c") self.assertEqual(stdout.getvalue(), '') self.assertEqual(stderr.getvalue(), '') # yet the file should contain both the entries: logfile = os.path.join(self.logdir, "dnf.log") self.assertFile(logfile) with open(logfile) as f: msgs = map(operator.attrgetter("message"), map(_split_logfile_entry, f.readlines())) self.assertSequenceEqual(msgs, [dnf.const.LOG_MARKER, 'i', 'c'])
def test_rpm_logging(self): # log everything to the console: self.logging._setup(dnf.logging.SUBDEBUG, dnf.logging.SUBDEBUG, self.logdir) logger = logging.getLogger("dnf.rpm") with support.patch_std_streams() as (stdout, stderr): logger.info('rpm transaction happens.') # rpm logger never outputs to the console: self.assertEqual(stdout.getvalue(), "") self.assertEqual(stderr.getvalue(), "") logfile = os.path.join(self.logdir, "dnf.rpm.log") self.assertFile(logfile) with open(logfile) as f: msgs = map(operator.attrgetter("message"), map(_split_logfile_entry, f.readlines())) self.assertSequenceEqual( list(msgs), [dnf.const.LOG_MARKER, 'rpm transaction happens.'])
def test_file_logging(self): # log nothing to the console: self.logging._setup(dnf.logging.SUPERCRITICAL, dnf.logging.SUPERCRITICAL, self.logdir) logger = logging.getLogger("dnf") with support.patch_std_streams() as (stdout, stderr): logger.info("i") logger.critical("c") self.assertEqual(stdout.getvalue(), '') self.assertEqual(stderr.getvalue(), '') # yet the file should contain both the entries: logfile = os.path.join(self.logdir, "dnf.log") self.assertFile(logfile) with open(logfile) as f: msgs = map(operator.attrgetter("message"), map(_split_logfile_entry, f.readlines())) self.assertSequenceEqual(list(msgs), [dnf.const.LOG_MARKER, 'i', 'c'])
def test_rpm_logging(self): # log everything to the console: self.logging.setup(dnf.logging.SUBDEBUG, dnf.logging.SUBDEBUG, self.logdir) logger = logging.getLogger("dnf.rpm") with support.patch_std_streams() as (stdout, stderr): logger.info('rpm transaction happens.') # rpm logger never outputs to the console: self.assertEqual(stdout.getvalue(), "") self.assertEqual(stderr.getvalue(), "") logfile = os.path.join(self.logdir, "dnf.rpm.log") self.assertFile(logfile) with open(logfile) as f: msgs = map(operator.attrgetter("message"), map(_split_logfile_entry, f.readlines())) self.assertSequenceEqual(msgs, [dnf.const.LOG_MARKER, 'rpm transaction happens.'])
def __init__(self, base, searchpath, optparser=None, types=None, pluginconfpath=None,disabled=None,enabled=None): '''Initialise the instance. @param base: The @param searchpath: A list of paths to look for plugin modules. @param optparser: The OptionParser instance for this run (optional). Use to allow plugins to extend command line options. @param types: A sequence specifying the types of plugins to load. This should be sequnce containing one or more of the TYPE_... constants. If None (the default), all plugins will be loaded. @param pluginconfpath: A list of paths to look for plugin configuration files. Defaults to "/etc/yum/pluginconf.d". ''' if not pluginconfpath: pluginconfpath = ['/etc/yum/pluginconf.d'] self.searchpath = searchpath self.pluginconfpath = pluginconfpath self.base = weakref(base) self.optparser = optparser self.cmdline = (None, None) self.logger = logging.getLogger("dnf") self.disabledPlugins = disabled self.enabledPlugins = enabled if types is None: types = ALL_TYPES if not isinstance(types, (list, tuple)): types = (types,) if id(TYPE_INTERFACE) in [id(t) for t in types]: self.logger.info( 'Deprecated constant TYPE_INTERFACE during plugin ' 'initialization.\nPlease use TYPE_INTERACTIVE instead.') self._importplugins(types) self.cmdlines = {} # Call close handlers when yum exit's atexit.register(self.run, 'close') # Let plugins register custom config file options self.run('config')
string.hexdigits) # Regex patterns matching any filename that is repo-specific cache data of a # particular type. The filename is expected to not contain the base cachedir # path components. CACHE_FILES = { 'metadata': r'^%s\/.*(xml(\.gz|\.xz|\.bz2)?|asc|cachecookie|%s)$' % (_CACHEDIR_RE, _MIRRORLIST_FILENAME), 'packages': r'^%s\/%s\/.+rpm$' % (_CACHEDIR_RE, _PACKAGES_RELATIVE_DIR), 'dbcache': r'^.+(solv|solvx)$', } logger = logging.getLogger("dnf") def repo_id_invalid(repo_id): # :api """Return index of an invalid character in the repo ID (if present).""" invalids = (i for i, c in enumerate(repo_id) if c not in _REPOID_CHARS) return dnf.util.first(invalids) def _user_pass_str(user, password): if user is None: return None user = dnf.pycomp.urllib_quote(user) password = '' if password is None else dnf.pycomp.urllib_quote(password) return '%s:%s' % (user, password)
import hawkey import logging import librepo import operator import os import shutil import string import time import types _METADATA_RELATIVE_DIR = "repodata" _METALINK_FILENAME = "metalink.xml" _MIRRORLIST_FILENAME = "mirrorlist" _RECOGNIZED_CHKSUMS = ['sha512', 'sha256'] logger = logging.getLogger("dnf") def repo_id_invalid(repo_id): """Return index of an invalid character in the repo ID (if present). :api""" allowed_chars = ''.join((string.ascii_letters, string.digits, '-_.:')) invalids = (index for index, char in enumerate(repo_id) if char not in allowed_chars) return dnf.util.first(invalids) def _user_pass_str(user, password): if user is None: return None user = dnf.pycomp.urllib_quote(user) password = '' if password is None else dnf.pycomp.urllib_quote(password)
import dnf.plugin import dnf.persistor import dnf.rpm import dnf.sack import dnf.util import dnf.yum.config import dnf.yum.misc import hawkey import logging import operator import os import re import sys import time logger = logging.getLogger('dnf') def _add_pkg_simple_list_lens(data, pkg, indent=''): """ Get the length of each pkg's column. Add that to data. This "knows" about simpleList and printVer. """ na = len(pkg.name) + 1 + len(pkg.arch) + len(indent) ver = len(pkg.evr) rid = len(pkg.reponame) for (d, v) in (('na', na), ('ver', ver), ('rid', rid)): data[d].setdefault(v, 0) data[d][v] += 1 def _list_cmd_calc_columns(output, ypl): """ Work out the dynamic size of the columns to pass to fmtColumns. """
import dnf.plugin import dnf.persistor import dnf.rpm import dnf.sack import dnf.util import dnf.yum.config import dnf.yum.misc import hawkey import logging import operator import os import re import sys import time logger = logging.getLogger('dnf') def _add_pkg_simple_list_lens(data, pkg, indent=''): """ Get the length of each pkg's column. Add that to data. This "knows" about simpleList and printVer. """ na = len(pkg.name) + 1 + len(pkg.arch) + len(indent) ver = len(pkg.evr) rid = len(pkg.reponame) for (d, v) in (('na', na), ('ver', ver), ('rid', rid)): data[d].setdefault(v, 0) data[d][v] += 1 def _list_cmd_calc_columns(output, ypl): """ Work out the dynamic size of the columns to pass to fmtColumns. """
def __init__(self, parent, base, conf): self._parent = parent self._base = base self._conf = conf self.logger = logging.getLogger("dnf")
def __init__(self): # handle sigquit early on signal.signal(signal.SIGQUIT, sigquit) dnf.Base.__init__(self) self.output = output.Output(self, self.conf) self.logger = logging.getLogger("dnf")
def drop_all_handlers(): for logger_name in ('dnf', 'dnf.rpm'): logger = logging.getLogger(logger_name) for handler in logger.handlers[:]: logger.removeHandler(handler)
def drop_all_handlers(): for logger_name in ('dnf', 'dnf.rpm'): logger = logging.getLogger(logger_name) for handler in logger.handlers[:]: logger.removeHandler(handler)