def package_keywords_splitter(iterable): for line, lineno, path in iterable: v = line.split() try: yield atom(v[0]), tuple(v[1:]), line, lineno, path except ebuild_errors.MalformedAtom as e: logger.error(f'{path!r}, line {lineno}: parsing error: {e}')
def project(self): try: return self._projects_xml.projects[self._ref] except KeyError: logger.error( f'projects.xml: subproject {self._ref!r} does not exist') return None
def _expand_groups(self, groups): keep_going = True for k, v in groups.items(): groups[k] = v.split() while keep_going: keep_going = False for k, v in groups.items(): if not any(x[0] == '@' for x in v): continue keep_going = True l = [] for v2 in v: if v2[0] == '@': v2 = v2[1:] if not v2 or v2 not in groups: logger.error( f"invalid license group reference: {v2!r} in {self}" ) continue elif v2 == k: logger.error( f"cyclic license group references for {v2!r} in {self}" ) continue l.extend(groups[v2]) else: l.append(v2) groups[k] = l
def parse(profiles_base, repo_id, known_status=None, known_arch=None): """Return the mapping of arches to profiles for a repo.""" l = [] fp = pjoin(profiles_base, 'profiles.desc') try: for lineno, line in iter_read_bash(fp, enum_line=True): try: arch, profile, status = line.split() except ValueError: logger.error( f"{repo_id}::profiles/profiles.desc, " f"line {lineno}: invalid profile line format: " "should be 'arch profile status'") continue if known_status is not None and status not in known_status: logger.warning( f"{repo_id}::profiles/profiles.desc, " f"line {lineno}: unknown profile status: {status!r}") if known_arch is not None and arch not in known_arch: logger.warning(f"{repo_id}::profiles/profiles.desc, " f"line {lineno}: unknown arch: {arch!r}") # Normalize the profile name on the offchance someone slipped an extra / # into it. path = '/'.join(filter(None, profile.split('/'))) deprecated = os.path.exists( os.path.join(profiles_base, path, 'deprecated')) l.append( _KnownProfile(profiles_base, arch, path, status, deprecated)) except FileNotFoundError: logger.debug( f"No profile descriptions found at {repo_id}::profiles/profiles.desc" ) return frozenset(l)
def parent_paths(self, data): repo_config = self.repoconfig if repo_config is not None and 'portage-2' in repo_config.profile_formats: l = [] for line, lineno, path in data: repo_id, separator, profile_path = line.partition(':') if separator: if repo_id: try: location = self._repo_map[repo_id] except KeyError: # check if requested repo ID matches the current # repo which could be the case when running against # unconfigured, external repos. if repo_id == repo_config.repo_id: location = repo_config.location else: logger.error( f'repo {repo_config.repo_id!r}: ' f"'{self.name}/parent' (line {lineno}), " f'bad profile parent {line!r}: ' f'unknown repo {repo_id!r}' ) continue l.append((abspath(pjoin(location, 'profiles', profile_path)), line, lineno)) else: l.append((abspath(pjoin(self.path, repo_id)), line, lineno)) return tuple(l) return tuple((abspath(pjoin(self.path, line)), line, lineno) for line, lineno, path in data)
def arches_desc(self): """Arch stability status (GLEP 72). See https://www.gentoo.org/glep/glep-0072.html for more details. """ fp = pjoin(self.profiles_base, 'arches.desc') d = {'stable': set(), 'transitional': set(), 'testing': set()} try: for lineno, line in iter_read_bash(fp, enum_line=True): try: arch, status = line.split() except ValueError: logger.error(f"{self.repo_id}::profiles/arches.desc, " f"line {lineno}: invalid line format: " "should be '<arch> <status>'") continue if arch not in self.known_arches: logger.warning(f"{self.repo_id}::profiles/arches.desc, " f"line {lineno}: unknown arch: {arch!r}") continue if status not in d: logger.warning( f"{self.repo_id}::profiles/arches.desc, " f"line {lineno}: unknown status: {status!r}") continue d[status].add(arch) except FileNotFoundError: pass return mappings.ImmutableDict(d)
def _process_plugin(package, plug, filter_disabled=False): if isinstance(plug.target, str): plug = modules.load_any(plug.target) elif isinstance(plug.target, int): module = modules.load_any(plug.source) plugs = getattr(module, PLUGIN_ATTR, {}) plugs = plugs.get(plug.key, []) if len(plugs) <= plug.target: logger.exception( "plugin cache for %s, %s, %s is somehow wrong; no item at position %s", package.__name__, plug.source, plug.key, plug.target) return None plug = plugs[plug.target] else: logger.error( "package %s, plug %s; non int, non string. wtf?", package.__name__, plug) return None if filter_disabled: if getattr(plug, 'disabled', False): logger.debug("plugin %s is disabled, skipping", plug) return None f = getattr(plug, '_plugin_disabled_check', None) if f is not None and f(): logger.debug("plugin %s is disabled, skipping", plug) return None return plug
def _write_cache_file(path, data, uid=-1, gid=-1): """Write a new cache file.""" cachefile = None try: try: cachefile = AtomicWriteFile( path, binary=False, perms=0o664, uid=uid, gid=gid) cachefile.write(CACHE_HEADER + "\n") for (module, mtime), plugs in sorted(data.items(), key=operator.itemgetter(0)): plugs = sort_plugs(plugs) plugs = ':'.join(f'{plug.key},{plug.priority},{plug.target}' for plug in plugs) cachefile.write(f'{module}:{mtime}:{plugs}\n') cachefile.close() except EnvironmentError as e: # We cannot write a new cache. We should log this # since it will have a performance impact. # Use error, not exception for this one: the traceback # is not necessary and too alarming. logger.error('Cannot write cache for %s: %s. ' 'Try running pplugincache.', path, e) finally: if cachefile is not None: cachefile.discard()
def update_mtime(path, timestamp=None): if timestamp is None: timestamp = time.time() logger.debug(f"updating vdb timestamp for {path!r}") try: os.utime(path, (timestamp, timestamp)) except EnvironmentError as e: logger.error(f"failed updated vdb timestamp for {path!r}: {e}")
def _clean_old_caches(path): for name in ('plugincache2',): try: unlink_if_exists(pjoin(path, name)) except EnvironmentError as e: logger.error( "attempting to clean old plugin cache %r failed with %s", pjoin(path, name), e)
def _scan_directory(path): files = [] for filename in listdir_files(path): match = valid_updates_re.match(filename) if match is not None: files.append(((match.group(2), match.group(1)), filename)) else: logger.error(f'incorrectly named update file: {filename!r}') files.sort(key=itemgetter(0)) return [x[1] for x in files]
def eapi(self, data): try: data = (x[0] for x in data) data = (x.strip() for x in data) data = [x for x in data if x] if len(data) > 1: logger.error(f'{self.name}/eapi, multiple lines detected') return get_eapi(data[0]) except IndexError: logger.error(f'{self.name}/eapi, empty file') return get_eapi('0')
def category_dirs(self): try: return frozenset( map( intern, filterfalse(self.false_categories.__contains__, (x for x in listdir_dirs(self.base) if not x.startswith('.'))))) except EnvironmentError as e: logger.error(f"failed listing categories: {e}") return ()
def groups(self): """Return the mapping of defined license groups to licenses for a repo.""" try: d = read_dict(self.license_groups_path, splitter=' ') except EnvironmentError: return mappings.ImmutableDict() except BashParseError as pe: logger.error(f"failed parsing license_groups: {pe}") return mappings.ImmutableDict() self._expand_groups(d) return mappings.ImmutableDict( (k, frozenset(v)) for (k, v) in d.items())
def _parse_xml(self, source=None): if source is None: source = self._source.bytes_fileobj() try: tree = etree.parse(source) except etree.XMLSyntaxError as e: self._maintainers = () self._local_use = mappings.ImmutableDict() self._longdescription = None self._source = None logger.error(e) return # TODO: handle i18n properly maintainers = [] for x in tree.findall("maintainer"): name = email = description = None for e in x: if e.tag == "name": name = e.text elif e.tag == "email": email = e.text elif e.tag == 'description' and e.get('lang', 'en') == 'en': description = e.text maintainers.append( Maintainer(name=name, email=email, description=description, maint_type=x.get('type'))) self._maintainers = tuple(maintainers) # Could be unicode! self._longdescription = None for x in tree.findall("longdescription"): if x.get('lang', 'en') != 'en': continue longdesc = ''.join(x.itertext()) if longdesc: self._longdescription = ' '.join(longdesc.split()) break self._source = None # lang="" is property of <use/> self._local_use = mappings.ImmutableDict() for x in tree.findall("use"): if x.get('lang', 'en') != 'en': continue self._local_use = mappings.ImmutableDict( (e.attrib['name'], ' '.join(''.join(e.itertext()).split())) for e in x.findall('flag') if 'name' in e.attrib) break
def _write_data(self): handler = None try: try: handler = AtomicWriteFile(self._location) self._serialize_to_handle(list(self.data.items()), handler) handler.close() except PermissionError as e: logger.error( f'failed writing binpkg cache to {self._location!r}: {e}') finally: if handler is not None: handler.discard()
def parents(self): kls = getattr(self, 'parent_node_kls', self.__class__) parents = [] for path, line, lineno in self.parent_paths: try: parents.append(kls(path)) except ProfileError as e: repo_id = self.repoconfig.repo_id logger.error( f"repo {repo_id!r}: '{self.name}/parent' (line {lineno}), " f'bad profile parent {line!r}: {e.error}' ) continue return tuple(parents)
def deprecated(self, data): if data is not None: data = iter(readlines_utf8(data[0])) try: replacement = next(data).strip() msg = "\n".join(x.lstrip("#").strip() for x in data) data = (replacement, msg) except StopIteration: # only an empty replacement could trigger this; thus # formatted badly. logger.error( f"deprecated profile missing replacement: '{self.name}/deprecated'") data = None return data
def f(node): for path, line, lineno in node.parent_paths: try: x = self._node_kls._autodetect_and_create(path) except ProfileError as e: repo_id = node.repoconfig.repo_id logger.error( f"repo {repo_id!r}: '{self.name}/parent' (line {lineno}), " f'bad profile parent {line!r}: {e.error}' ) continue for y in f(x): yield y yield node
def _read_cache_file(package, cache_path): """Read an existing cache file.""" stored_cache = {} cache_data = list(readlines_ascii(cache_path, True, True, False)) if len(cache_data) >= 1: if cache_data[0] != CACHE_HEADER: logger.warning( "plugin cache has a wrong header: %r, regenerating", cache_data[0]) cache_data = [] else: cache_data = cache_data[1:] if not cache_data: return {} try: for line in cache_data: module, mtime, entries = line.split(':', 2) mtime = int(mtime) # Needed because ''.split(':') == [''], not [] if not entries: entries = set() else: entries = entries.replace(':', ',').split(',') if not len(entries) % 3 == 0: logger.error( "failed reading cache %s; entries field isn't " "divisable by 3: %r", cache_path, entries) continue entries = iter(entries) def f(val): if val.isdigit(): val = int(val) return val entries = set( _plugin_data( key, int(priority), f'{package.__name__}.{module}', f(target)) for (key, priority, target) in zip(entries, entries, entries)) stored_cache[(module, mtime)] = entries except IGNORED_EXCEPTIONS: raise except Exception as e: logger.warning("failed reading cache; exception %s, regenerating.", e) stored_cache.clear() return stored_cache
def _parse_atom_negations(self, data): """Parse files containing optionally negated package atoms.""" neg, pos = [], [] for line, lineno, path in data: if line[0] == '-': line = line[1:] if not line: logger.error(f"{path!r}, line {lineno}: '-' negation without an atom") continue l = neg else: l = pos try: l.append(self.eapi_atom(line)) except ebuild_errors.MalformedAtom as e: logger.error(f'{path!r}, line {lineno}: parsing error: {e}') return tuple(neg), tuple(pos)
def _parse_package_use(self, data): d = defaultdict(list) # split the data down ordered cat/pkg lines for line, lineno, path in data: l = line.split() try: a = self.eapi_atom(l[0]) except ebuild_errors.MalformedAtom as e: logger.error(e) continue if len(l) == 1: logger.error(f'{path!r}, line {lineno}: missing USE flag(s): {line!r}') continue d[a.key].append(misc.chunked_data(a, *split_negations(l[1:]))) return ImmutableDict((k, misc._build_cp_atom_payload(v, atom(k))) for k, v in d.items())
def _split_use_desc_file(self, name, converter, matcher=True): line = None fp = pjoin(self.profiles_base, name) try: for line in iter_read_bash(fp): try: key, val = line.split(None, 1) key = converter(key) if matcher: yield key[0], (key[1], val.split('-', 1)[1].strip()) else: yield key, val.split('-', 1)[1].strip() except ValueError as e: logger.error(f'failed parsing {fp!r}, line {line!r}: {e}') except FileNotFoundError: pass except ValueError as e: logger.error(f'failed parsing {fp!r}: {e}')
def _preload_eclass(self, ec_file, async_req=False): """Preload an eclass into a bash function. Avoids the cost of going to disk on inherit. Preloading eutils (which is heavily inherited) speeds up regen times for example. :param ec_file: filepath of eclass to preload :return: boolean, True for success """ if not os.path.exists(ec_file): logger.error(f"failed: {ec_file}") return False self.write(f"preload_eclass {ec_file}") if self.expect("preload_eclass succeeded", async_req=async_req, flush=True): return True return False
def shutdown_all_processors(): """Kill all known processors.""" try: while active_ebp_list: try: active_ebp_list.pop().shutdown_processor( ignore_keyboard_interrupt=True) except EnvironmentError: pass while inactive_ebp_list: try: inactive_ebp_list.pop().shutdown_processor( ignore_keyboard_interrupt=True) except EnvironmentError: pass except Exception as e: traceback.print_exc() logger.error(e) raise
def sandbox_summary(self, move_log=False): """If the instance is sandboxed, print the sandbox access summary. :param move_log: location to move the sandbox log to if a failure occurred """ if not os.path.exists(self.__sandbox_log): self.write("end_sandbox_summary") return 0 with open(self.__sandbox_log, "r") as f: violations = [x.strip() for x in f if x.strip()] if not violations: self.write("end_sandbox_summary") return 0 if not move_log: move_log = self.__sandbox_log elif move_log != self.__sandbox_log: with open(move_log) as myf: for x in violations: myf.write(x + "\n") # XXX this is fugly, use a colorizer or something # (but it is better than "from output import red" (portage's output)) def red(text): return '\x1b[31;1m%s\x1b[39;49;00m' % (text, ) self.write( red("--------------------------- ACCESS VIOLATION SUMMARY " "---------------------------") + "\n") self.write(red(f"LOG FILE = \"{move_log}\"") + "\n\n") for x in violations: self.write(x + "\n") self.write( red("-----------------------------------------------------" "---------------------------") + "\n") self.write("end_sandbox_summary") try: os.remove(self.__sandbox_log) except (IOError, OSError) as e: logger.error(f"exception caught when cleansing sandbox_log={e}") return 1
def _get_packages(self, category): cpath = pjoin(self.location, category.lstrip(os.path.sep)) l = set() d = {} bad = False try: for x in listdir_dirs(cpath): if x.startswith(".tmp.") or x.endswith(".lockfile") \ or x.startswith("-MERGING-"): continue try: pkg = VersionedCPV(f'{category}/{x}') except InvalidCPV: bad = True if bad or not pkg.fullver: if '-scm' in x: bad = 'scm' elif '-try' in x: bad = 'try' else: raise InvalidCPV(f'{category}/{x}', 'no version component') logger.error( f'merged -{bad} pkg detected: {category}/{x}. ' f'throwing exception due to -{bad} not being a valid' ' version component. Silently ignoring that ' 'specific version is not viable either since it ' 'would result in pkgcore stomping whatever it was ' f'that -{bad} version merged. ' 'Use the offending pkg manager that merged it to ' 'unmerge it.') raise InvalidCPV( f'{category}/{x}', f'{bad} version component is not standard.') l.add(pkg.package) d.setdefault((category, pkg.package), []).append(pkg.fullver) except EnvironmentError as e: category = pjoin(self.location, category.lstrip(os.path.sep)) raise KeyError(f'failed fetching packages for category {category}: {e}') from e self._versions_tmp_cache.update(d) return tuple(l)
def _parse_xml(self, source=None): if source is None: source = self._source.bytes_fileobj() try: tree = etree.parse(source) except etree.XMLSyntaxError as e: logger.error(f'failed parsing projects.xml: {e}') return mappings.ImmutableDict() projects = {} for p in tree.findall('project'): kwargs = {} for k in ('email', 'name', 'url', 'description'): kwargs[k] = p.findtext(k) members = [] for m in p.findall('member'): m_kwargs = {} for k in ('email', 'name', 'role'): m_kwargs[k] = m.findtext(k) m_kwargs['is_lead'] = m.get('is-lead', '') == '1' try: members.append(ProjectMember(**m_kwargs)) except ValueError: logger.error( f"project {kwargs['email']} has <member/> with no email" ) kwargs['members'] = members subprojects = [] for sp in p.findall('subproject'): try: subprojects.append( Subproject(ref=sp.get('ref'), inherit_members=sp.get( 'inherit-members', '') == '1', projects_xml=self)) except ValueError: logger.error( f"project {kwargs['email']} has <subproject/> with no ref" ) kwargs['subprojects'] = subprojects projects[kwargs['email']] = Project(**kwargs) return mappings.ImmutableDict(projects)
def packages(self, data): repo_config = self.repoconfig # TODO: get profile-set support into PMS profile_set = repo_config is not None and 'profile-set' in repo_config.profile_formats sys, neg_sys, pro, neg_pro = [], [], [], [] neg_wildcard = False for line, lineno, path in data: try: if line[0] == '-': if line == '-*': neg_wildcard = True elif line[1] == '*': neg_sys.append(self.eapi_atom(line[2:])) elif profile_set: neg_pro.append(self.eapi_atom(line[1:])) else: logger.error( f'invalid line format, ' f'{self.name}/packages, line {lineno}: {line!r}' ) else: if line[0] == '*': sys.append(self.eapi_atom(line[1:])) elif profile_set: pro.append(self.eapi_atom(line)) else: logger.error( f'invalid line format, ' f'{self.name}/packages, line {lineno}: {line!r}' ) except ebuild_errors.MalformedAtom as e: logger.error(f'{self.name}/packages, line {lineno}: {e}') system = [tuple(neg_sys), tuple(sys)] profile = [tuple(neg_pro), tuple(pro)] if neg_wildcard: system.append(neg_wildcard) profile.append(neg_wildcard) return _Packages(tuple(system), tuple(profile))
def generic_handler(self, additional_commands=None): """Internal event handler responding to the running processor's requests. :type additional_commands: mapping from string to callable. :param additional_commands: Extra command handlers. Command names cannot have spaces. The callable is called with the processor as first arg, and remaining string (None if no remaining fragment) as second arg. If you need to split the args to command, whitespace splitting falls to your func. :raise UnhandledCommand: thrown when an unknown command is encountered. """ # note that self is passed in. so... we just pass in the # unbound instance. Specifically, via digging through # __class__ if you don't do it, sandbox_summary (fex) cannot # be overridden, this func will just use this classes version. # so dig through self.__class__ for it. :P handlers = {"request_sandbox_summary": self.__class__.sandbox_summary} f = chuck_UnhandledCommand for x in ("prob", "env_receiving_failed", "failed"): handlers[x] = f del f handlers["phases"] = partial( chuck_StoppingCommand, lambda f: f.lower().strip() == "succeeded") handlers["SIGINT"] = chuck_KeyboardInterrupt handlers["SIGTERM"] = chuck_TermInterrupt handlers["dying"] = chuck_DyingInterrupt if additional_commands is not None: for x in additional_commands: if not callable(additional_commands[x]): raise TypeError(additional_commands[x]) handlers.update(additional_commands) self.lock() try: if self._outstanding_expects: if not self._consume_async_expects(): logger.error("error in daemon") raise UnhandledCommand("expects out of alignment") while True: line = self.read().strip() # split on first whitespace cmd, _, args_str = line.partition(' ') if not cmd: raise InternalError( f"Expected command; instead got nothing from {line!r}") if cmd in handlers: args = [] if args_str: args.append(args_str) # TODO: handle exceptions raised from handlers better handlers[cmd](self, *args) else: logger.error(f"unhandled command {cmd!r}, line {line!r}") raise UnhandledCommand(line) except FinishedProcessing as fp: v = fp.val return v finally: self.unlock()