def __init__(self, stream, term=None, forcetty=False, encoding=None): """Initialize. :type stream: file-like object. :param stream: stream to output to, defaulting to :py:class:`sys.stdout`. :type term: string. :param term: terminal type, pulled from the environment if omitted. :type forcetty: bool :param forcetty: force output of colors even if the wrapped stream is not a tty. """ PlainTextFormatter.__init__(self, stream, encoding=encoding) fd = stream.fileno() if term is None: # We only apply the remapping if we are guessing the # terminal type from the environment. If we get a term # type passed explicitly we just use it as-is (if the # caller wants the remap just doing the # term_alternates lookup there is easy enough.) term_env = os.environ.get('TERM') term_alt = self.term_alternates.get(term_env) for term in (term_alt, term_env): if term is not None: try: curses.setupterm(fd=fd, term=term) except curses.error: pass else: break else: raise TerminfoDisabled( 'no terminfo entries, not even for "dumb"?') else: # TODO maybe do something more useful than raising curses.error # if term is not in the terminfo db here? curses.setupterm(fd=fd, term=term) self._term = term self.width = curses.tigetnum('cols') try: self.reset = TerminfoReset(curses.tigetstr('sgr0')) self.bold = TerminfoMode(curses.tigetstr('bold')) self.underline = TerminfoMode(curses.tigetstr('smul')) self._color_reset = curses.tigetstr('op') self._set_color = (curses.tigetstr('setaf'), curses.tigetstr('setab')) except (_BogusTerminfo, curses.error): compatibility.raise_from(TerminfoHatesOurTerminal(self._term)) if not all(self._set_color): raise TerminfoDisabled( 'setting background/foreground colors is not supported') curses.tparm(self._set_color[0], curses.COLOR_WHITE) # [fg, bg] self._current_colors = [None, None] self._modes = set() self._pos = 0 self._fg_cache = defaultdictkey(partial(TerminfoColor, 0)) self._bg_cache = defaultdictkey(partial(TerminfoColor, 1))
def __init__(self, options, iuse_handler): super().__init__(options) self.preferred_checksums = mappings.defaultdictkey( lambda repo: frozenset(repo.config.manifests.hashes if hasattr(repo, 'config') else ())) self.required_checksums = mappings.defaultdictkey( lambda repo: frozenset(repo.config.manifests.required_hashes if hasattr(repo, 'config') else ())) self.seen_checksums = {} self.iuse_filter = iuse_handler.get_filter('fetchables')
def __init__(self, **kwargs): kwargs.setdefault("verbosity", 0) super().__init__(**kwargs) self.package_data = defaultdictkey(lambda x: 0) # total download size for all pkgs to be merged self.download_size = 0
def initialize_cache(package, force=False): """Determine available plugins in a package. Writes cache files if they are stale and writing is possible. """ # package plugin cache, see above. package_cache = defaultdict(set) modpath = os.path.dirname(package.__file__) modlist = listdir_files(modpath) stored_cache_name = pjoin(modpath, CACHE_FILENAME) stored_cache = _read_cache_file(package, stored_cache_name) if force: _clean_old_caches(modpath) # Directory cache, mapping modulename to # (mtime, set([keys])) modlist = set(x for x in modlist if os.path.splitext(x)[1] == ".py" and x != "__init__.py") cache_stale = False # Hunt for modules. actual_cache = defaultdict(set) mtime_cache = mappings.defaultdictkey(lambda x: int(os.path.getmtime(x))) for modfullname in sorted(modlist): modname = os.path.splitext(modfullname)[0] # It is an actual module. Check if its cache entry is valid. mtime = mtime_cache[pjoin(modpath, modfullname)] vals = stored_cache.get((modname, mtime)) if vals is None or force: # Cache entry is stale. logger.debug( "stale because of %s: actual %s != stored %s", modname, mtime, stored_cache.get(modname, (0, ()))[0] ) cache_stale = True entries = [] qualname = ".".join((package.__name__, modname)) try: module = import_module(qualname) except ImportError: # This is a serious problem, but if we blow up # here we cripple pkgcore entirely which may make # fixing the problem impossible. So be noisy but # try to continue. logger.exception("plugin import failed for %s processing %s", package.__name__, modname) continue registry = getattr(module, PLUGIN_ATTR, {}) vals = set() for key, plugs in registry.iteritems(): for idx, plug_name in enumerate(plugs): if isinstance(plug_name, basestring): plug = _process_plugin(package, _plugin_data(key, 0, qualname, plug_name)) else: plug = plug_name if plug is None: # import failure, ignore it, error already logged continue priority = getattr(plug, "priority", 0) if not isinstance(priority, int): logger.error("ignoring plugin %s: has a non integer priority: %s", plug, priority) continue if plug_name is plug: # this means it's an object, rather than a string; store # the offset. plug_name = idx data = _plugin_data(key, priority, qualname, plug_name) vals.add(data) actual_cache[(modname, mtime)] = vals for data in vals: package_cache[data.key].add(data) if force or set(stored_cache) != set(actual_cache): logger.debug("updating cache %r for new plugins", stored_cache_name) _write_cache_file(stored_cache_name, actual_cache) return mappings.ImmutableDict((k, sort_plugs(v)) for k, v in package_cache.iteritems())
def get_plugins(key, package=plugins): """Return all enabled plugins matching "key". Plugins with a C{disabled} attribute evaluating to C{True} are skipped. """ cache = _global_cache[package] for plug in _process_plugins(package, cache.get(key, ()), filter_disabled=True): yield plug def get_plugin(key, package=plugins): """Get a single plugin matching this key. This assumes all plugins for this key have a priority attribute. If any of them do not the AttributeError is not stopped. :return: highest-priority plugin or None if no plugin available. """ cache = _global_cache[package] for plug in _process_plugins(package, cache.get(key, ()), filter_disabled=True): # first returned will be the highest. return plug return None # Global plugin cache. Mapping of package to package cache, which is a # mapping of plugin key to a list of module names. _global_cache = mappings.defaultdictkey(initialize_cache)
def initialize_cache(package, force=False): """Determine available plugins in a package. Writes cache files if they are stale and writing is possible. """ # package plugin cache, see above. package_cache = collections.defaultdict(set) seen_modnames = set() for path in package.__path__: # Check if the path actually exists first. try: modlist = listdir_files(path) except OSError as e: if e.errno not in (errno.ENOENT, errno.ENOTDIR): raise continue stored_cache_name = pjoin(path, CACHE_FILENAME) stored_cache = _read_cache_file(package, stored_cache_name) if force: _clean_old_caches(path) # Directory cache, mapping modulename to # (mtime, set([keys])) modlist = set( x for x in modlist if os.path.splitext(x)[1] == '.py' and x != '__init__.py') modlist.difference_update(seen_modnames) cache_stale = False # Hunt for modules. actual_cache = collections.defaultdict(set) mtime_cache = mappings.defaultdictkey( lambda x: int(os.path.getmtime(x))) for modfullname in sorted(modlist): modname = os.path.splitext(modfullname)[0] # It is an actual module. Check if its cache entry is valid. mtime = mtime_cache[pjoin(path, modfullname)] vals = stored_cache.get((modname, mtime)) if vals is None or force: # Cache entry is stale. logger.debug('stale because of %s: actual %s != stored %s', modname, mtime, stored_cache.get(modname, (0, ()))[0]) cache_stale = True entries = [] qualname = '.'.join((package.__name__, modname)) try: module = modules.load_module(qualname) except modules.FailedImport: # This is a serious problem, but if we blow up # here we cripple pkgcore entirely which may make # fixing the problem impossible. So be noisy but # try to continue. logger.exception( 'plugin import failed for %s processing %s', package.__name__, modname) continue registry = getattr(module, PLUGIN_ATTR, {}) vals = set() for key, plugs in registry.iteritems(): for idx, plug_name in enumerate(plugs): if isinstance(plug_name, basestring): plug = _process_plugin( package, _plugin_data(key, 0, qualname, plug_name)) else: plug = plug_name if plug is None: # import failure, ignore it, error already logged continue priority = getattr(plug, 'priority', 0) if not isinstance(priority, int): logger.error( "ignoring plugin %s: has a non integer priority: %s", plug, priority) continue if plug_name is plug: # this means it's an object, rather than a string; store # the offset. plug_name = idx data = _plugin_data(key, priority, qualname, plug_name) vals.add(data) actual_cache[(modname, mtime)] = vals seen_modnames.add(modfullname) for data in vals: package_cache[data.key].add(data) if force or set(stored_cache) != set(actual_cache): logger.debug('updating cache %r for new plugins', stored_cache_name) _write_cache_file(stored_cache_name, actual_cache) return mappings.ImmutableDict( (k, sort_plugs(v)) for k, v in package_cache.iteritems())
Plugins with a C{disabled} attribute evaluating to C{True} are skipped. """ cache = _global_cache[package] for plug in _process_plugins(package, cache.get(key, ()), filter_disabled=True): yield plug def get_plugin(key, package=plugins): """Get a single plugin matching this key. This assumes all plugins for this key have a priority attribute. If any of them do not the AttributeError is not stopped. :return: highest-priority plugin or None if no plugin available. """ cache = _global_cache[package] for plug in _process_plugins(package, cache.get(key, ()), filter_disabled=True): # first returned will be the highest. return plug return None # Global plugin cache. Mapping of package to package cache, which is a # mapping of plugin key to a list of module names. _global_cache = mappings.defaultdictkey(initialize_cache)
def initialize_cache(package, force=False): """Determine available plugins in a package. Writes cache files if they are stale and writing is possible. """ # package plugin cache, see above. package_cache = defaultdict(set) modpath = os.path.dirname(package.__file__) modlist = listdir_files(modpath) stored_cache_name = pjoin(modpath, CACHE_FILENAME) stored_cache = _read_cache_file(package, stored_cache_name) if force: _clean_old_caches(modpath) # Directory cache, mapping modulename to # (mtime, set([keys])) modlist = set(x for x in modlist if os.path.splitext(x)[1] == '.py' and x != '__init__.py') cache_stale = False # Hunt for modules. actual_cache = defaultdict(set) mtime_cache = mappings.defaultdictkey(lambda x: int(os.path.getmtime(x))) for modfullname in sorted(modlist): modname = os.path.splitext(modfullname)[0] # It is an actual module. Check if its cache entry is valid. mtime = mtime_cache[pjoin(modpath, modfullname)] vals = stored_cache.get((modname, mtime)) if vals is None or force: # Cache entry is stale. logger.debug('stale because of %s: actual %s != stored %s', modname, mtime, stored_cache.get(modname, (0, ()))[0]) cache_stale = True entries = [] qualname = '.'.join((package.__name__, modname)) module = import_module(qualname) registry = getattr(module, PLUGIN_ATTR, {}) vals = set() for key, plugs in registry.iteritems(): for idx, plug_name in enumerate(plugs): if isinstance(plug_name, basestring): plug = _process_plugin( package, _plugin_data(key, 0, qualname, plug_name)) else: plug = plug_name if plug is None: # import failure, ignore it, error already logged continue priority = getattr(plug, 'priority', 0) if not isinstance(priority, int): logger.error( "ignoring plugin %s: has a non integer priority: %s", plug, priority) continue if plug_name is plug: # this means it's an object, rather than a string; store # the offset. plug_name = idx data = _plugin_data(key, priority, qualname, plug_name) vals.add(data) actual_cache[(modname, mtime)] = vals for data in vals: package_cache[data.key].add(data) if force or set(stored_cache) != set(actual_cache): logger.debug('updating cache %r for new plugins', stored_cache_name) _write_cache_file(stored_cache_name, actual_cache) return mappings.ImmutableDict( (k, sort_plugs(v)) for k, v in package_cache.iteritems())
def __init__(self, options, iuse_handler): base.Template.__init__(self, options) self.required_checksums = mappings.defaultdictkey( lambda repo: frozenset(repo.config.manifests.hashes if hasattr(repo, 'config') else ())) self.seen_checksums = {} self.iuse_filter = iuse_handler.get_filter('fetchables')
def __init__(self, stream, term=None, forcetty=False, encoding=None): """Initialize. :type stream: file-like object. :param stream: stream to output to, defaulting to :py:class:`sys.stdout`. :type term: string. :param term: terminal type, pulled from the environment if omitted. :type forcetty: bool :param forcetty: force output of colors even if the wrapped stream is not a tty. """ PlainTextFormatter.__init__(self, stream, encoding=encoding) fd = stream.fileno() if term is None: # We only apply the remapping if we are guessing the # terminal type from the environment. If we get a term # type passed explicitly we just use it as-is (if the # caller wants the remap just doing the # term_alternates lookup there is easy enough.) term_env = os.environ.get('TERM') term_alt = self.term_alternates.get(term_env) for term in (term_alt, term_env): if term is not None: try: curses.setupterm(fd=fd, term=term) except curses.error: pass else: break else: raise TerminfoDisabled( 'no terminfo entries, not even for "dumb"?') else: # TODO maybe do something more useful than raising curses.error # if term is not in the terminfo db here? curses.setupterm(fd=fd, term=term) self._term = term self.width = curses.tigetnum('cols') try: self.reset = TerminfoReset(curses.tigetstr('sgr0')) self.bold = TerminfoMode(curses.tigetstr('bold')) self.underline = TerminfoMode(curses.tigetstr('smul')) self._color_reset = curses.tigetstr('op') self._set_color = ( curses.tigetstr('setaf'), curses.tigetstr('setab')) except (_BogusTerminfo, curses.error): compatibility.raise_from(TerminfoHatesOurTerminal(self._term)) if not all(self._set_color): raise TerminfoDisabled( 'setting background/foreground colors is not supported') curses.tparm(self._set_color[0], curses.COLOR_WHITE) # [fg, bg] self._current_colors = [None, None] self._modes = set() self._pos = 0 self._fg_cache = defaultdictkey(partial(TerminfoColor, 0)) self._bg_cache = defaultdictkey(partial(TerminfoColor, 1))
def initialize_cache(package, force=False, cache_dir=None): """Determine available plugins in a package. Writes cache files if they are stale and writing is possible. """ modpath = os.path.dirname(package.__file__) pkgpath = os.path.dirname(os.path.dirname(modpath)) uid = gid = -1 mode = 0o755 if cache_dir is None: if not force: # use user-generated caches if they exist, fallback to module cache if os.path.exists(pjoin(const.USER_CACHE_PATH, CACHE_FILENAME)): cache_dir = const.USER_CACHE_PATH elif os.path.exists(pjoin(const.SYSTEM_CACHE_PATH, CACHE_FILENAME)): cache_dir = const.SYSTEM_CACHE_PATH uid = os_data.portage_uid gid = os_data.portage_gid mode = 0o775 else: cache_dir = modpath else: # generate module cache when running from git repo, otherwise create system/user cache if pkgpath == sys.path[0]: cache_dir = modpath elif os_data.uid in (os_data.root_uid, os_data.portage_uid): cache_dir = const.SYSTEM_CACHE_PATH uid = os_data.portage_uid gid = os_data.portage_gid mode = 0o775 else: cache_dir = const.USER_CACHE_PATH # put pkgcore consumer plugins (e.g. pkgcheck) inside pkgcore cache dir if cache_dir in (const.SYSTEM_CACHE_PATH, const.USER_CACHE_PATH): chunks = package.__name__.split('.', 1) if chunks[0] != os.path.basename(cache_dir): cache_dir = pjoin(cache_dir, chunks[0]) # package plugin cache, see above. package_cache = defaultdict(set) stored_cache_name = pjoin(cache_dir, CACHE_FILENAME) stored_cache = _read_cache_file(package, stored_cache_name) if force: _clean_old_caches(cache_dir) # Directory cache, mapping modulename to # (mtime, set([keys])) modlist = listdir_files(modpath) modlist = set(x for x in modlist if os.path.splitext(x)[1] == '.py' and x != '__init__.py') cache_stale = False # Hunt for modules. actual_cache = defaultdict(set) mtime_cache = mappings.defaultdictkey(lambda x: int(os.path.getmtime(x))) for modfullname in sorted(modlist): modname = os.path.splitext(modfullname)[0] # It is an actual module. Check if its cache entry is valid. mtime = mtime_cache[pjoin(modpath, modfullname)] vals = stored_cache.get((modname, mtime)) if vals is None or force: # Cache entry is stale. logger.debug('stale because of %s: actual %s != stored %s', modname, mtime, stored_cache.get(modname, (0, ()))[0]) cache_stale = True entries = [] qualname = '.'.join((package.__name__, modname)) module = import_module(qualname) registry = getattr(module, PLUGIN_ATTR, {}) vals = set() for key, plugs in registry.items(): for idx, plug_name in enumerate(plugs): if isinstance(plug_name, str): plug = _process_plugin( package, _plugin_data(key, 0, qualname, plug_name)) else: plug = plug_name if plug is None: # import failure, ignore it, error already logged continue priority = getattr(plug, 'priority', 0) if not isinstance(priority, int): logger.error( "ignoring plugin %s: has a non integer priority: %s", plug, priority) continue if plug_name is plug: # this means it's an object, rather than a string; store # the offset. plug_name = idx data = _plugin_data(key, priority, qualname, plug_name) vals.add(data) actual_cache[(modname, mtime)] = vals for data in vals: package_cache[data.key].add(data) if force or set(stored_cache) != set(actual_cache): logger.debug('updating cache %r for new plugins', stored_cache_name) ensure_dirs(cache_dir, uid=uid, gid=gid, mode=mode) _write_cache_file(stored_cache_name, actual_cache, uid=uid, gid=gid) return mappings.ImmutableDict( (k, sort_plugs(v)) for k, v in package_cache.items())
def __init__(self, options): base.Template.__init__(self, options) self.required_checksums = mappings.defaultdictkey( lambda repo: frozenset(repo.config.manifests.hashes if hasattr(repo, 'config') else ())) self.seen_checksums = {}