def get_eclass_data(self, inherits): """Return the cachable entries from a list of inherited eclasses. Only make get_eclass_data calls for data you know came from this eclass_cache, otherwise be ready to catch a KeyError exception for any eclass that was requested, but not known to this cache. """ keys = OrderedFrozenSet(inherits) o = self._eclass_data_inst_cache.get(keys) if o is None: o = ImmutableDict((k, self.eclasses[k]) for k in keys) self._eclass_data_inst_cache[keys] = o return o
def paths(self): """Ordered set of all staged paths.""" return OrderedFrozenSet(x.path for x in chain.from_iterable(self.data.values()))
def ebuild_changes(self): """Ordered set of all ebuild change objects.""" return OrderedFrozenSet(x for x in self.pkg_changes if x.ebuild)
def pkg_changes(self): """Ordered set of all package change objects.""" return OrderedFrozenSet(self.data.get(PkgChange, ()))
class EAPI(metaclass=klass.immutable_instance): known_eapis = WeakValueDictionary() unknown_eapis = WeakValueDictionary() def __init__(self, magic, parent=None, phases=(), default_phases=(), mandatory_keys=(), dep_keys=(), metadata_keys=(), eclass_keys=(), tracked_attributes=(), archive_exts=(), optionals=None, ebd_env_options=None): sf = object.__setattr__ sf(self, "_magic", str(magic)) sf(self, "_parent", parent) sf(self, "phases", ImmutableDict(phases)) sf(self, "phases_rev", ImmutableDict((v, k) for k, v in self.phases.items())) # We track the phases that have a default implementation- this is # primarily due to DEFINED_PHASES cache values not including it. sf(self, "default_phases", frozenset(default_phases)) sf(self, "mandatory_keys", frozenset(mandatory_keys)) sf(self, "dep_keys", frozenset(dep_keys)) sf(self, "metadata_keys", (self.mandatory_keys | self.dep_keys | frozenset(metadata_keys))) # variables that eclasses have access to (used by pkgcheck eclass inherit checks) sf(self, "eclass_keys", self.mandatory_keys | self.dep_keys | frozenset(eclass_keys)) sf(self, "tracked_attributes", (frozenset(tracked_attributes) | frozenset(x.lower() for x in dep_keys))) sf(self, "archive_exts", frozenset(archive_exts)) if optionals is None: optionals = {} sf(self, 'options', _optionals_cls(optionals)) if ebd_env_options is None: ebd_env_options = () sf(self, "_ebd_env_options", ebd_env_options) @classmethod def register(cls, *args, **kwds): eapi = cls(*args, **kwds) pre_existing = cls.known_eapis.get(eapi._magic) if pre_existing is not None: raise ValueError( f"EAPI '{eapi}' is already known/instantiated- {pre_existing!r}" ) if (getattr(eapi.options, 'bash_compat', False) and bash_version() < eapi.options.bash_compat): # hard exit if the system doesn't have an adequate bash installed raise SystemExit( f"EAPI '{eapi}' requires >=bash-{eapi.options.bash_compat}, " f"system version: {bash_version()}") cls.known_eapis[eapi._magic] = eapi # generate EAPI bash libs when running from git repo eapi.bash_libs() return eapi @klass.jit_attr def is_supported(self): """Check if an EAPI is supported.""" if EAPI.known_eapis.get(self._magic) is not None: if not self.options.is_supported: logger.warning(f"EAPI '{self}' isn't fully supported") sys.stderr.flush() return True return False @klass.jit_attr def bash_funcs_global(self): """Internally implemented global EAPI specific functions to skip when exporting.""" # TODO: This is currently duplicated across EAPI objs, but # instead could be cached to a class attr. funcs = pjoin(const.EBD_PATH, '.generated', 'funcs', 'global') if not os.path.exists(funcs): # we're probably running in a cacheless git repo, so generate a cached version try: os.makedirs(os.path.dirname(funcs), exist_ok=True) with open(funcs, 'w') as f: subprocess.run( [pjoin(const.EBD_PATH, 'generate_global_func_list')], cwd=const.EBD_PATH, stdout=f) except (IOError, subprocess.CalledProcessError) as e: raise Exception( f"failed to generate list of global EAPI '{self}' specific functions: {str(e)}" ) with open(funcs, 'r') as f: return frozenset(line.strip() for line in f) @klass.jit_attr def bash_funcs(self): """Internally implemented EAPI specific functions to skip when exporting.""" funcs = pjoin(const.EBD_PATH, '.generated', 'funcs', self._magic) if not os.path.exists(funcs): # we're probably running in a cacheless git repo, so generate a cached version try: os.makedirs(os.path.dirname(funcs), exist_ok=True) with open(funcs, 'w') as f: subprocess.run([ pjoin(const.EBD_PATH, 'generate_eapi_func_list'), self._magic ], cwd=const.EBD_PATH, stdout=f) except (IOError, subprocess.CalledProcessError) as e: raise Exception( f"failed to generate list of EAPI '{self}' specific functions: {str(e)}" ) with open(funcs, 'r') as f: return frozenset(line.strip() for line in f) @klass.jit_attr def bash_cmds_internal(self): """EAPI specific commands for this EAPI.""" cmds = pjoin(const.EBD_PATH, '.generated', 'cmds', self._magic, 'internal') if not os.path.exists(cmds): # we're probably running in a cacheless git repo, so generate a cached version try: os.makedirs(os.path.dirname(cmds), exist_ok=True) with open(cmds, 'w') as f: subprocess.run([ pjoin(const.EBD_PATH, 'generate_eapi_cmd_list'), '-i', self._magic ], cwd=const.EBD_PATH, stdout=f) except (IOError, subprocess.CalledProcessError) as e: raise Exception( f'failed to generate list of EAPI {self} internal commands: {str(e)}' ) with open(cmds, 'r') as f: return frozenset(line.strip() for line in f) @klass.jit_attr def bash_cmds_deprecated(self): """EAPI specific commands deprecated for this EAPI.""" cmds = pjoin(const.EBD_PATH, '.generated', 'cmds', self._magic, 'deprecated') if not os.path.exists(cmds): # we're probably running in a cacheless git repo, so generate a cached version try: os.makedirs(os.path.dirname(cmds), exist_ok=True) with open(cmds, 'w') as f: subprocess.run([ pjoin(const.EBD_PATH, 'generate_eapi_cmd_list'), '-d', self._magic ], cwd=const.EBD_PATH, stdout=f) except (IOError, subprocess.CalledProcessError) as e: raise Exception( f'failed to generate list of EAPI {self} deprecated commands: {str(e)}' ) with open(cmds, 'r') as f: return frozenset(line.strip() for line in f) @klass.jit_attr def bash_cmds_banned(self): """EAPI specific commands banned for this EAPI.""" cmds = pjoin(const.EBD_PATH, '.generated', 'cmds', self._magic, 'banned') if not os.path.exists(cmds): # we're probably running in a cacheless git repo, so generate a cached version try: os.makedirs(os.path.dirname(cmds), exist_ok=True) with open(cmds, 'w') as f: subprocess.run([ pjoin(const.EBD_PATH, 'generate_eapi_cmd_list'), '-b', self._magic ], cwd=const.EBD_PATH, stdout=f) except (IOError, subprocess.CalledProcessError) as e: raise Exception( f'failed to generate list of EAPI {self} banned commands: {str(e)}' ) with open(cmds, 'r') as f: return frozenset(line.strip() for line in f) def bash_libs(self): """Generate internally implemented EAPI specific bash libs required by the ebd.""" eapi_global_lib = pjoin(const.EBD_PATH, '.generated', 'libs', self._magic, 'global') script = pjoin(const.EBD_PATH, 'generate_eapi_lib') # skip generation when installing as the install process takes care of it if not os.path.exists(script): return if not os.path.exists(eapi_global_lib): try: os.makedirs(os.path.dirname(eapi_global_lib), exist_ok=True) with open(eapi_global_lib, 'w') as f: subprocess.run([script, '-s', 'global', self._magic], cwd=const.EBD_PATH, stdout=f) except (IOError, subprocess.CalledProcessError) as e: raise Exception( f"failed to generate EAPI '{self}' global lib: {str(e)}") for phase in self.phases.values(): eapi_lib = pjoin(const.EBD_PATH, '.generated', 'libs', self._magic, phase) if not os.path.exists(eapi_lib): try: os.makedirs(os.path.dirname(eapi_lib), exist_ok=True) with open(eapi_lib, 'w') as f: subprocess.run([script, '-s', phase, self._magic], cwd=const.EBD_PATH, stdout=f) except (IOError, subprocess.CalledProcessError) as e: raise Exception( f"failed to generate EAPI '{self}' phase {phase} lib: {str(e)}" ) @klass.jit_attr def archive_exts_regex_pattern(self): """Regex pattern for supported archive extensions.""" pattern = '|'.join(map(re.escape, self.archive_exts)) if self.options.unpack_case_insensitive: return f'(?i:({pattern}))' return f'({pattern})' @klass.jit_attr def archive_exts_regex(self): """Regex matching strings ending with supported archive extensions.""" return re.compile(rf'{self.archive_exts_regex_pattern}$') @klass.jit_attr def valid_slot_regex(self): """Regex matching valid SLOT values.""" valid_slot = r'[A-Za-z0-9_][A-Za-z0-9+_.-]*' if self.options.sub_slotting: valid_slot += rf'(/{valid_slot})?' return re.compile(rf'^{valid_slot}$') @klass.jit_attr def atom_kls(self): return partial(atom.atom, eapi=self._magic) def interpret_cache_defined_phases(self, sequence): phases = set(sequence) if not self.options.trust_defined_phases_cache: if not phases: # run them all; cache was generated # by a pm that didn't support DEFINED_PHASES return frozenset(self.phases) phases.discard("-") return frozenset(phases) def __str__(self): return self._magic @klass.jit_attr def inherits(self): """Ordered set containing an EAPI's inheritance tree. Note that this assumes a simple, linear inheritance tree. """ eapis = [self] eapi = self while eapi := eapi._parent: eapis.append(eapi) return OrderedFrozenSet(eapis)
def inherit(self): """Ordered set of directly inherited eclasses.""" return OrderedFrozenSet(self.data.get("INHERIT", "").split())
def inherited(self): """Ordered set of all inherited eclasses.""" return OrderedFrozenSet(self.data.get("_eclasses_", ()))