def reload_config(self): if (self.config_root != '' and (self.config_root != os.environ.get('PORTAGE_CONFIGROOT', ''))): raise NotImplementedError( 'pkgcore supports only PORTAGE_CONFIGROOT') c = load_config() self._domain = c.get_default('domain')
def find_repository(path: Path, conf_path: typing.Optional[Path] = None) -> RepoTuple: """ Find an ebuild repository in specified `path`. Find an ebuild repository in specified `path`, and return initiated a tuple of (domain, repo object). If `conf_path` is specified, it overrides config location. """ c = load_config(location=str(conf_path) if conf_path is not None else None) domain = c.get_default('domain') # if it's a configured repository, we need to handle it explicitly # started with longest paths in case of nested repos for repo in reversed( sorted(domain.ebuild_repos_raw, key=lambda x: len(x.location))): p = path while not p.samefile(p / '..'): if p.samefile(repo.location): return RepoTuple(domain, repo) p = p / '..' # fallback to unconfigured repo search return RepoTuple(domain, domain.find_repo(str(path), config=c, configure=False))
def test_load_config(self): manager = load_config(user_conf_file=self.user_config.name) self.assertEqual(manager.foo['foo'], ((), {})) # Test user config overrides system config. manager = load_config(user_conf_file=self.user_config.name, system_conf_file=self.system_config.name) self.assertEqual(manager.foo['foo'], ((), {})) # Test prepends. manager = load_config(user_conf_file=self.user_config.name, prepend_sources=[{ 'myfoo': basics.HardCodedConfigSection( {'inherit': ['foo']}) }]) self.assertEqual(manager.foo['myfoo'], ((), {})) # Test disabling loading. manager = load_config(user_conf_file=self.user_config.name, skip_config_files=True) self.assertRaises(KeyError, operator.getitem, manager.foo, 'foo')
def store_config(namespace, attr): configs = map(_convert_config_mods, [namespace.new_config, namespace.add_config]) # add necessary inherits for add_config for key, vals in configs[1].iteritems(): vals.setdefault('inherit', key) configs = [{section: basics.ConfigSectionFromStringDict(vals) for section, vals in d.iteritems()} for d in configs if d] config = load_config(skip_config_files=namespace.empty_config, debug=getattr(namespace, 'debug', False), append_sources=tuple(configs)) setattr(namespace, attr, config)
def test_load_config(self): manager = load_config(user_conf_file=self.user_config.name) self.assertEqual(manager.foo['foo'], ((), {})) # Test user config overrides system config. manager = load_config( user_conf_file=self.user_config.name, system_conf_file=self.system_config.name) self.assertEqual(manager.foo['foo'], ((), {})) # Test prepends. manager = load_config( user_conf_file=self.user_config.name, prepend_sources=[{'myfoo': basics.HardCodedConfigSection({ 'inherit': ['foo']})}]) self.assertEqual(manager.foo['myfoo'], ((), {})) # Test disabling loading. manager = load_config( user_conf_file=self.user_config.name, skip_config_files=True) self.assertRaises( KeyError, operator.getitem, manager.foo, 'foo')
def load_config(self): """Override this if you need a different way of loading config.""" # This makes mixing --new-config and --add-config sort of # work. Not sure if that is a good thing, but detecting and # erroring is about as much work as making it mostly work :) new_config = { name: basics.ConfigSectionFromStringDict(val) for name, val in self.new_config.iteritems()} add_config = {} for name, config in self.add_config.iteritems(): config.setdefault('inherit', name) add_config[name] = basics.ConfigSectionFromStringDict(config) # Triggers failures if these get mucked with after this point # (instead of silently ignoring). self.add_config = self.new_config = None return load_config( debug=self.debug, append_sources=(new_config, add_config), skip_config_files=self.empty_config)
def store_config(namespace, attr, global_config=()): configs = map( _convert_config_mods, [namespace.new_config, namespace.add_config]) # add necessary inherits for add_config for key, vals in configs[1].iteritems(): vals.setdefault('inherit', key) configs = [{section: basics.ConfigSectionFromStringDict(vals) for section, vals in d.iteritems()} for d in configs if d] config = load_config( skip_config_files=namespace.empty_config, prepend_sources=tuple(global_config), append_sources=tuple(configs), location=namespace.override_config, **vars(namespace)) setattr(namespace, attr, config)
def store_config(namespace, attr, global_config=()): configs = map(_convert_config_mods, [namespace.new_config, namespace.add_config]) # add necessary inherits for add_config for key, vals in configs[1].iteritems(): vals.setdefault('inherit', key) configs = [{ section: basics.ConfigSectionFromStringDict(vals) for section, vals in d.iteritems() } for d in configs if d] config = load_config(skip_config_files=namespace.empty_config, prepend_sources=tuple(global_config), append_sources=tuple(configs), location=namespace.override_config, **vars(namespace)) setattr(namespace, attr, config)
from guppy import hpy from pkgcore.config import load_config c = load_config()
def __init__(self): self._config = load_config() self._domains = self._config.domain self._set_default_domain('livefs domain') self._get_repos()
#!/usr/bin/env python from pkgcore.config import load_config from pkgcore.util.file_type import file_identifier import re debug_paths = ["/usr/lib%s/debug" % (x, ) for x in ("64", "32", "")] fi = file_identifier() vdbs = load_config().get_default("domain").vdb for repo in vdbs: for pkg in repo: contents = getattr(pkg, 'contents', ()) if not contents: continue files = contents.iterfiles() for obj in files: res = fi(obj.location) if res is None: # nonexistant file. continue if res.startswith("ELF "): break else: # no elf objects continue for path in debug_paths: if path in contents: break else:
#!/usr/bin/env python3 from pkgcore.config import load_config from pkgcore.util.file_type import file_identifier debug_paths = ["/usr/lib/debug"] fi = file_identifier() vdbs = load_config().get_default("domain").all_installed_repos for pkg in sorted(vdbs): contents = getattr(pkg, 'contents', ()) if not contents: continue files = contents.iterfiles() for obj in files: res = fi(obj.location) if res is None: # nonexistent file. continue if res.startswith("ELF "): break else: # no elf objects continue for path in debug_paths: if path in contents: break else: # no debug bits, but is elf.
#!/usr/bin/env python from pkgcore.config import load_config from pkgcore.util.file_type import file_identifier import re debug_paths = ["/usr/lib/debug"] fi = file_identifier() vdbs = load_config().get_default("domain").vdb for repo in vdbs: for pkg in repo: contents = getattr(pkg, 'contents', ()) if not contents: continue files = contents.iterfiles() for obj in files: res = fi(obj.location) if res is None: # nonexistant file. continue if res.startswith("ELF "): break else: # no elf objects continue for path in debug_paths: if path in contents: break else:
def main() -> int: """CLI interface for kuroneko scraper.""" colorama.init() argp = argparse.ArgumentParser() db_source = argp.add_mutually_exclusive_group() db_source.add_argument('-d', '--database', type=argparse.FileType('r'), help='Use bug database from specified json file ' '(if not specified, database will be fetched ' 'from --database-url)') db_source.add_argument('--database-url', default=DEFAULT_DB_URL, help=f'Fetch bug database from specified URL ' f'(default: {DEFAULT_DB_URL})') argp.add_argument('--cache-file', help=f'File used to store a cached copy of bug database ' f'(default: {DEFAULT_CACHE_PATH})') argp.add_argument('-q', '--quiet', action='store_true', help='Disable progress messages') args = argp.parse_args() # load the database db = Database() if args.database is not None: if not args.quiet: print(f'Using local database {args.database.name}', file=sys.stderr) else: if not args.quiet: print(f'Using remote database {args.database_url}', file=sys.stderr) if args.cache_file is None: os.makedirs(XDG_CACHE_HOME, exist_ok=True) args.cache_file = DEFAULT_CACHE_PATH args.database = cached_get(args.database_url, args.cache_file) if not args.quiet: if isinstance(args.database, io.BytesIO): print('Database update fetched', file=sys.stderr) else: print('Local cache is up-to-date', file=sys.stderr) db.load(args.database) args.database.close() # initialize pkgcore config = load_config() domain = config.get_default('domain') vdb = domain.repos_raw['vdb'] # do a quick search for vulnerable packages restrict = packages_to_restriction(db) if not args.quiet: print('Searching for vulnerable packages', file=sys.stderr) vulnerable = vdb.match(restrict) # match vulnerable packages to bugs if not args.quiet: print(file=sys.stderr) first_one = True for pkg in vulnerable: for bug_pkg, bug in find_applicable_bugs(pkg, db): if first_one: first_one = False else: print() print_bug(bug, bug_pkg, pkg.cpvstr) return 0