def __init__(self, magic, phases, default_phases, metadata_keys, mandatory_keys, tracked_attributes, optionals, ebd_env_options=None): sf = object.__setattr__ sf(self, "magic", magic) sf(self, "phases", mappings.ImmutableDict(phases)) sf(self, "phases_rev", mappings.ImmutableDict((v, k) for k, v in self.phases.iteritems())) # we track the phases that have a default implementation- # this is primarily due to DEFINED_PHASES cache values # not including it. sf(self, "default_phases", frozenset(default_phases)) sf(self, "mandatory_keys", frozenset(mandatory_keys)) sf(self, "metadata_keys", frozenset(metadata_keys)) sf(self, "tracked_attributes", frozenset(tracked_attributes)) d = dict(eapi_optionals) d.update(optionals) sf(self, 'options', optionals_cls(d)) if ebd_env_options is None: ebd_env_options = {} sf(self, "ebd_env_options", mappings.ImmutableDict(ebd_env_options))
def __init__(self, magic, parent=None, phases=(), default_phases=(), metadata_keys=(), mandatory_keys=(), tracked_attributes=(), archive_suffixes=(), optionals=None, ebd_env_options=None): sf = object.__setattr__ sf(self, "_magic", str(magic)) sf(self, "_parent", parent) sf(self, "phases", mappings.ImmutableDict(phases)) sf(self, "phases_rev", mappings.ImmutableDict((v, k) for k, v in self.phases.items())) # We track the phases that have a default implementation- this is # primarily due to DEFINED_PHASES cache values not including it. sf(self, "default_phases", frozenset(default_phases)) sf(self, "mandatory_keys", frozenset(mandatory_keys)) sf(self, "metadata_keys", frozenset(metadata_keys)) sf(self, "tracked_attributes", frozenset(tracked_attributes)) sf(self, "archive_suffixes", frozenset(archive_suffixes)) if optionals is None: optionals = {} sf(self, 'options', _optionals_cls(optionals)) if ebd_env_options is None: ebd_env_options = () sf(self, "_ebd_env_options", ebd_env_options)
def __init__(self, magic, parent, phases, default_phases, metadata_keys, mandatory_keys, tracked_attributes, archive_suffixes, optionals, ebd_env_options=None): sf = object.__setattr__ sf(self, "_magic", str(magic)) sf(self, "_parent", parent) sf(self, "phases", mappings.ImmutableDict(phases)) sf(self, "phases_rev", mappings.ImmutableDict((v, k) for k, v in self.phases.iteritems())) # We track the phases that have a default implementation- this is # primarily due to DEFINED_PHASES cache values not including it. sf(self, "default_phases", frozenset(default_phases)) sf(self, "mandatory_keys", frozenset(mandatory_keys)) sf(self, "metadata_keys", frozenset(metadata_keys)) sf(self, "tracked_attributes", frozenset(tracked_attributes)) sf(self, "archive_suffixes", frozenset(archive_suffixes)) sf(self, "archive_suffixes_re", '(?:%s)' % '|'.join(map(re.escape, archive_suffixes))) d = dict(eapi_optionals) d.update(optionals) sf(self, 'options', _optionals_cls(d)) if ebd_env_options is None: ebd_env_options = {} sf(self, "ebd_env_options", mappings.ImmutableDict(ebd_env_options))
def types(self): type_map = defaultdict(dict) for name, sections in self.sections_lookup.items(): if self._section_is_inherit_only(sections[0]): continue obj = self.collapse_named_section(name) type_map[obj.type.name][name] = obj return mappings.ImmutableDict( (k, mappings.ImmutableDict(v)) for k, v in type_map.items())
def groups(self): """Return the mapping of defined license groups to licenses for a repo.""" try: d = read_dict(self.license_groups_path, splitter=' ') except EnvironmentError: return mappings.ImmutableDict() except BashParseError as pe: logger.error(f"failed parsing license_groups: {pe}") return mappings.ImmutableDict() self._expand_groups(d) return mappings.ImmutableDict((k, frozenset(v)) for (k, v) in d.items())
def groups(self): try: d = read_dict(self.license_groups_path, splitter=' ') except EnvironmentError: return mappings.ImmutableDict() except BashParseError as pe: logger.error("failed parsing license_groups: %s", pe) return mappings.ImmutableDict() self._expand_groups(d) return mappings.ImmutableDict( (k, tuple(v)) for (k, v) in d.iteritems())
def _parse_xml(self, source=None): if source is None: source = self._source.bytes_fileobj() try: tree = etree.parse(source) except etree.XMLSyntaxError as e: self._maintainers = () self._local_use = mappings.ImmutableDict() self._longdescription = None self._source = None logger.error(e) return # TODO: handle i18n properly maintainers = [] for x in tree.findall("maintainer"): name = email = description = None for e in x: if e.tag == "name": name = e.text elif e.tag == "email": email = e.text elif e.tag == 'description' and e.get('lang', 'en') == 'en': description = e.text maintainers.append( Maintainer(name=name, email=email, description=description, maint_type=x.get('type'))) self._maintainers = tuple(maintainers) # Could be unicode! self._longdescription = None for x in tree.findall("longdescription"): if x.get('lang', 'en') != 'en': continue longdesc = ''.join(x.itertext()) if longdesc: self._longdescription = ' '.join(longdesc.split()) break self._source = None # lang="" is property of <use/> self._local_use = mappings.ImmutableDict() for x in tree.findall("use"): if x.get('lang', 'en') != 'en': continue self._local_use = mappings.ImmutableDict( (e.attrib['name'], ' '.join(''.join(e.itertext()).split())) for e in x.findall('flag') if 'name' in e.attrib) break
def _parse_xml(self, source=None): if source is None: source = self._source.bytes_fileobj() tree = etree.parse(source) maintainers = [] for x in tree.findall("maintainer"): name = email = description = None for e in x: if e.tag == "name": name = e.text elif e.tag == "email": email = e.text elif e.tag == 'description': description = e.text maintainers.append( Maintainer(name=name, email=email, description=description)) self._maintainers = tuple(maintainers) # Could be unicode! longdesc = tree.findtext("longdescription") if longdesc: longdesc = ' '.join(longdesc.split()) self._longdescription = longdesc self._source = None self._local_use = mappings.ImmutableDict( (x.attrib['name'], ' '.join(''.join(x.itertext()).split())) for x in tree.findall('use/flag') if 'name' in x.attrib)
def optimize(self, cache=None): if cache is None: d_stream = ((k, _build_cp_atom_payload(v, atom.atom(k), False)) for k, v in self._dict.iteritems()) g_stream = (_build_cp_atom_payload(self._global_settings, packages.AlwaysTrue, payload_form=isinstance( self, PayloadDict))) else: d_stream = ((k, _cached_build_cp_atom_payload(cache, v, atom.atom(k), False)) for k, v in self._dict.iteritems()) g_stream = (_cached_build_cp_atom_payload(cache, self._global_settings, packages.AlwaysTrue, payload_form=isinstance( self, PayloadDict))) if self.frozen: self._dict = mappings.ImmutableDict(d_stream) self._global_settings = tuple(g_stream) else: self._dict.update(d_stream) self._global_settings[:] = list(g_stream)
def test_invalid_operations(self): d = mappings.ImmutableDict({1: -1, 2: -2}) initial_hash = hash(d) # __delitem__ isn't allowed with pytest.raises(TypeError): del d[1] with pytest.raises(TypeError): del d[7] # __setitem__ isn't allowed with pytest.raises(TypeError): d[1] = -1 with pytest.raises(TypeError): d[7] = -7 # modifying operators aren't defined with pytest.raises(AttributeError): d.clear() with pytest.raises(AttributeError): d.update({6: -6}) with pytest.raises(AttributeError): d.pop(1) with pytest.raises(AttributeError): d.popitem() with pytest.raises(AttributeError): d.setdefault(6, -6) assert initial_hash == hash(d)
def arch_profiles(self): """Return the mapping of arches to profiles for a repo.""" d = mappings.defaultdict(list) fp = pjoin(self.profile_base, 'profiles.desc') try: for line in iter_read_bash(fp): l = line.split() try: key, profile, status = l except ValueError: logger.error( "%s: line doesn't follow 'key profile status' form: %s", fp, line) continue # Normalize the profile name on the offchance someone slipped an extra / # into it. d[key].append( _KnownProfile('/'.join(filter(None, profile.split('/'))), status)) except EnvironmentError as e: if e.errno != errno.ENOENT: raise logger.debug("No profile descriptions found at %r", fp) return mappings.ImmutableDict( (k, tuple(sorted(v))) for k, v in d.iteritems())
def arches_desc(self): """Arch stability status (GLEP 72). See https://www.gentoo.org/glep/glep-0072.html for more details. """ fp = pjoin(self.profiles_base, 'arches.desc') d = {'stable': set(), 'transitional': set(), 'testing': set()} try: for lineno, line in iter_read_bash(fp, enum_line=True): try: arch, status = line.split() except ValueError: logger.error(f"{self.repo_id}::profiles/arches.desc, " f"line {lineno}: invalid line format: " "should be '<arch> <status>'") continue if arch not in self.known_arches: logger.warning(f"{self.repo_id}::profiles/arches.desc, " f"line {lineno}: unknown arch: {arch!r}") continue if status not in d: logger.warning( f"{self.repo_id}::profiles/arches.desc, " f"line {lineno}: unknown status: {status!r}") continue d[status].add(arch) except FileNotFoundError: pass return mappings.ImmutableDict(d)
def _parse_xml(self, source=None): if source is None: source = self._source.bytes_fileobj() try: tree = etree.parse(source) except etree.XMLSyntaxError as e: logger.error(f'failed parsing projects.xml: {e}') return mappings.ImmutableDict() projects = {} for p in tree.findall('project'): kwargs = {} for k in ('email', 'name', 'url', 'description'): kwargs[k] = p.findtext(k) members = [] for m in p.findall('member'): m_kwargs = {} for k in ('email', 'name', 'role'): m_kwargs[k] = m.findtext(k) m_kwargs['is_lead'] = m.get('is-lead', '') == '1' try: members.append(ProjectMember(**m_kwargs)) except ValueError: logger.error( f"project {kwargs['email']} has <member/> with no email" ) kwargs['members'] = members subprojects = [] for sp in p.findall('subproject'): try: subprojects.append( Subproject(ref=sp.get('ref'), inherit_members=sp.get( 'inherit-members', '') == '1', projects_xml=self)) except ValueError: logger.error( f"project {kwargs['email']} has <subproject/> with no ref" ) kwargs['subprojects'] = subprojects projects[kwargs['email']] = Project(**kwargs) return mappings.ImmutableDict(projects)
def _parse_xml(self): try: MetadataXml._parse_xml(self, open(self._source, "rb", 32768)) except FileNotFoundError: self._maintainers = () self._local_use = mappings.ImmutableDict() self._longdescription = None self._source = None
def ebd_env(self): """Dictionary of EAPI options passed to the ebd environment.""" d = {} for k in self._ebd_env_options: d[f"PKGCORE_{k.upper()}"] = str(getattr(self.options, k)).lower() d["PKGCORE_EAPI_INHERITS"] = ' '.join(x._magic for x in self.inherits) d["EAPI"] = self._magic return mappings.ImmutableDict(d)
def groups(self): d = {} for li in self._license_instances: for k, v in li.groups.items(): if k in d: d[k] |= v else: d[k] = v return mappings.ImmutableDict(d)
def _parse_xml(self): try: MetadataXml._parse_xml(self, open(self._source, "rb", 32768)) except EnvironmentError as oe: if oe.errno != errno.ENOENT: raise self._maintainers = () self._local_use = mappings.ImmutableDict() self._longdescription = None self._source = None
def _render_config_stack(self, type_obj, config_stack): conf = {} for key in config_stack: typename = type_obj.types.get(key) if typename is None: if not type_obj.allow_unknowns: raise errors.ConfigurationError('Type of %r unknown' % (key, )) typename = 'str' is_ref = typename.startswith('ref:') is_refs = typename.startswith('refs:') if typename.startswith('lazy_'): typename = typename[5:] if typename.startswith('refs:') or typename in ('list', 'str'): result = config_stack.render_prepends( self, key, typename, flatten=(typename != 'str')) if typename == 'str': result = ' '.join(result) else: result = config_stack.render_val(self, key, typename) if is_ref: result = [result] is_refs = True if is_refs: try: result = [ref.collapse() for ref in result] except compatibility.IGNORED_EXCEPTIONS: raise except Exception: compatibility.raise_from( errors.ConfigurationError( "Failed collapsing section key %r" % (key, ))) if is_ref: result = result[0] conf[key] = result # Check if we got all values required to instantiate. missing = set(type_obj.required) - set(conf) if missing: raise errors.ConfigurationError( 'type %s.%s needs settings for %s' % (type_obj.callable.__module__, type_obj.callable.__name__, ', '.join(repr(var) for var in missing))) return mappings.ImmutableDict(conf)
def _render_config_stack(self, type_obj, config_stack): conf = {} for key in config_stack: typename = type_obj.types.get(key) if typename is None: if not type_obj.allow_unknowns: raise errors.ConfigurationError(f'Type of {key!r} unknown') typename = 'str' is_ref = typename.startswith('ref:') is_refs = typename.startswith('refs:') if typename.startswith('lazy_'): typename = typename[5:] if typename.startswith('refs:') or typename in ('list', 'str'): result = config_stack.render_prepends( self, key, typename, flatten=(typename != 'str')) if typename == 'str': result = ' '.join(result) else: result = config_stack.render_val(self, key, typename) if is_ref: result = [result] is_refs = True if is_refs: try: result = [ref.collapse() for ref in result] except IGNORED_EXCEPTIONS: raise except Exception as e: raise errors.ConfigurationError( f'Failed collapsing section key {key!r}') from e if is_ref: result = result[0] conf[key] = result # Check if we got all values required to instantiate. missing = set(type_obj.required) - set(conf) if missing: module = type_obj.callable.__module__ name = type_obj.callable.__name__ missing_vars = ', '.join(map(repr, missing)) raise errors.ConfigurationError( f'type {module}.{name} needs settings for {missing_vars}') return mappings.ImmutableDict(conf)
def use_expand_sort(self): """Mapping of USE_EXPAND sorting keys for the repo.""" base = pjoin(self.profiles_base, 'desc') d = {} try: targets = listdir_files(base) except FileNotFoundError: targets = [] for use_group in targets: group = use_group.split('.', 1)[0] use_expand = (x[0] for x in self._split_use_desc_file( f'desc/{use_group}', lambda k: k, matcher=False)) d[group] = {use: i for i, use in enumerate(use_expand)} return mappings.ImmutableDict(d)
def use_expand_desc(self): """USE_EXPAND settings for the repo.""" base = pjoin(self.profiles_base, 'desc') d = {} try: targets = listdir_files(base) except FileNotFoundError: targets = [] for use_group in targets: group = use_group.split('.', 1)[0] d[group] = tuple( self._split_use_desc_file( f'desc/{use_group}', lambda k: f'{group}_{k}', matcher=False)) return mappings.ImmutableDict(d)
def helpers(self): """Phase to directory mapping for EAPI specific helpers to add to $PATH.""" paths = defaultdict(list) for eapi in self.inherits: paths['global'].append(pjoin(const.EBUILD_HELPERS_PATH, 'common')) helper_dir = pjoin(const.EBUILD_HELPERS_PATH, eapi._magic) for dirpath, dirnames, filenames in os.walk(helper_dir): if not filenames: continue if dirpath == helper_dir: paths['global'].append(dirpath) else: phase = os.path.basename(dirpath) if phase in self.phases_rev: paths[phase].append(dirpath) else: raise ValueError(f'unknown phase: {phase!r}') return mappings.ImmutableDict((k, tuple(v)) for k, v in paths.items())
def test_init_dictmixin(self): d = MutableDict(baz="cat") e = mappings.ImmutableDict(d) assert dict(d) == {'baz': 'cat'}
def updates(self): """Package updates for the repo defined in profiles/updates/*.""" updates_dir = pjoin(self.profiles_base, 'updates') d = pkg_updates.read_updates(updates_dir) return mappings.ImmutableDict(d)
def test_str(self): d = {1: 1, 2: 2} e = mappings.ImmutableDict(d) assert str(d) == str(e)
def test_repr(self): d = {1: 1, 2: 2} e = mappings.ImmutableDict(d) assert repr(d) == repr(e)
def _parse_xml(self): try: with open(self._source, "rb", 32768) as f: return super()._parse_xml(f) except FileNotFoundError: return mappings.ImmutableDict()
def freeze(self): if not isinstance(self._dict, mappings.ImmutableDict): self._dict = mappings.ImmutableDict( (k, tuple(v)) for k, v in self._dict.iteritems()) self._global_settings = tuple(self._global_settings)
def test_init_bad_data(self): for data in (range(10), list(range(10)), [([], 1)]): with pytest.raises(TypeError): d = mappings.ImmutableDict(data)
def projects(self): if self._source is not None: return self._parse_xml() return mappings.ImmutableDict()