def test_iter_stable_unique(self): self.test_stable_unique( lambda x: list(sequences.iter_stable_unique(x))) o = UnhashableComplex() l = [1, 2, 3, o, UnhashableComplex(), 4, 3, UnhashableComplex()] self.assertEqual(list(sequences.iter_stable_unique(l)), [1, 2, 3, o, 4])
def __new__(cls, name, bases, class_dict): new_functions = { attr: class_dict.pop(attr).functor for attr, thing in list(class_dict.items()) if isinstance(thing, cls.register) } existing = {} for base in bases: existing.update(getattr(base, '_get_attr', {})) slots = class_dict.get('__slots__', None) if slots is not None: # only add slots for new attr's; assume the layer above already slotted # if this layer is setting slots. class_dict['__slots__'] = tuple( sequences.iter_stable_unique( itertools.chain(slots, set(new_functions).difference(existing)))) d = existing if class_dict.pop('__DynamicGetattrSetter_auto_inherit__', True) else {} d.update(new_functions) d.update(class_dict.pop('_get_attr', {})) class_dict['_get_attr'] = d class_dict.setdefault('__getattr__', dynamic_getattr_dict) return type.__new__(cls, name, bases, class_dict)
def _validate_args(parser, namespace): if namespace.noversion: if namespace.contents: parser.error('both --no-version and --contents does not make sense') if namespace.min or namespace.max: parser.error('--no-version with --min or --max does not make sense') if namespace.print_revdep: parser.error('--print-revdep with --no-version does not make sense') if namespace.one_attr and namespace.print_revdep: parser.error('--print-revdep with --force-one-attr or --one-attr does not make sense') def process_attrs(sequence): for attr in sequence: if attr == 'all': i = [x for x in printable_attrs if x != 'all'] elif attr == 'allmetadata': i = process_attrs(metadata_attrs) elif attr == 'alldepends': i = dep_attrs elif attr == 'raw_alldepends': i = [f'raw_{x}' for x in dep_attrs] else: i = [attr] for attr in i: yield attr attrs = ['repo', 'description', 'homepage', 'license'] if namespace.verbosity > 0 else [] attrs.extend(process_attrs(namespace.attr)) # finally, uniquify the attrs. namespace.attr = list(iter_stable_unique(attrs))
def regen_main(options, out, err): """Regenerate a repository cache.""" ret = [] observer = observer_mod.formatter_output(out) for repo in iter_stable_unique(options.repos): if options.cache_dir is not None: # recreate new repo object with cache dir override cache = (md5_cache( pjoin(options.cache_dir.rstrip(os.sep), repo.repo_id)), ) repo = ebuild_repo.tree(options.config, repo.config, cache=cache) if not repo.operations.supports("regen_cache"): out.write(f"repo {repo} doesn't support cache regeneration") continue elif not getattr(repo, 'cache', False) and not options.force: out.write(f"skipping repo {repo}: cache disabled") continue start_time = time.time() ret.append( repo.operations.regen_cache( threads=options.threads, observer=observer, force=options.force, eclass_caching=(not options.disable_eclass_caching))) end_time = time.time() if options.verbosity > 0: out.write("finished %d nodes in %.2f seconds" % (len(repo), end_time - start_time)) if options.rsync: timestamp = pjoin(repo.location, "metadata", "timestamp.chk") try: with open(timestamp, "w") as f: f.write( time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())) except IOError as e: err.write( f"Unable to update timestamp file {timestamp!r}: {e.strerror}" ) ret.append(os.EX_IOERR) if options.use_local_desc: ret.append(update_use_local_desc(repo, observer)) if options.pkg_desc_index: ret.append(update_pkg_desc_index(repo, observer)) return int(any(ret))
def sync_main(options, out, err): """Update local repos to match their remotes.""" succeeded, failed = [], [] for repo_name, repo in iter_stable_unique(options.repos): # rewrite the name if it has the usual prefix if repo_name.startswith("conf:"): repo_name = repo_name[5:] if not repo.operations.supports("sync"): continue out.write(f"*** syncing {repo_name}") ret = False err_msg = '' # repo operations don't yet take an observer, thus flush # output to keep lines consistent. out.flush() err.flush() try: ret = repo.operations.sync(force=options.force, verbosity=options.verbosity) except OperationError as e: exc = getattr(e, '__cause__', e) if not isinstance(exc, PkgcoreUserException): raise err_msg = f': {exc}' if not ret: out.write(f"!!! failed syncing {repo_name}{err_msg}") failed.append(repo_name) else: succeeded.append(repo_name) out.write(f"*** synced {repo_name}") out.flush() err.flush() total = len(succeeded) + len(failed) if total > 1: results = [] succeeded = ', '.join(sorted(succeeded)) failed = ', '.join(sorted(failed)) if succeeded: results.append(f"*** synced: {succeeded}") if failed: results.append(f"!!! failed: {failed}") results = "\n".join(results) out.write(f"\n*** sync results:\n{results}") return 1 if failed else 0
def __init__(self, domain, pkg, verified_files, eclass_cache, observer=None, force_test=False, **kwargs): """ :param pkg: :obj:`pkgcore.ebuild.ebuild_src.package` instance we'll be building :param eclass_cache: the :class:`pkgcore.ebuild.eclass_cache` we'll be using :param verified_files: mapping of fetchables mapped to their disk location """ self._built_class = ebuild_built.fresh_built_package format.build.__init__(self, domain, pkg, verified_files, observer) domain_settings = self.domain.settings ebd.__init__(self, pkg, initial_env=domain_settings, **kwargs) self.env["FILESDIR"] = pjoin(os.path.dirname(pkg.ebuild.path), "files") self.eclass_cache = eclass_cache self.run_test = force_test or self.feat_or_bool("test", domain_settings) self.allow_failed_test = self.feat_or_bool("test-fail-continue", domain_settings) if "test" in self.restrict: self.run_test = False elif not force_test and "test" not in pkg.use: if self.run_test: logger.warning(f"disabling test for {pkg} due to test use flag being disabled") self.run_test = False # XXX minor hack path = self.env["PATH"].split(os.pathsep) for s, default in (("DISTCC", ".distcc"), ("CCACHE", "ccache")): b = (self.feat_or_bool(s, domain_settings) and s not in self.restrict) setattr(self, s.lower(), b) if b: # looks weird I realize, but # pjoin("/foor/bar", "/barr/foo") == "/barr/foo" # and pjoin("/foo/bar", ".asdf") == "/foo/bar/.asdf" self.env.setdefault(s + "_DIR", pjoin(self.domain.tmpdir, default)) # gentoo bug 355283 libdir = self.env.get("ABI") if libdir is not None: libdir = self.env.get(f"LIBDIR_{libdir}") if libdir is not None: libdir = self.env.get(libdir) if libdir is None: libdir = "lib" path.insert(0, f"/usr/{libdir}/{s.lower()}/bin") else: for y in ("_PATH", "_DIR"): if s + y in self.env: del self.env[s+y] self.env["PATH"] = os.pathsep.join(path) # ordering must match appearance order in SRC_URI per PMS self.env["A"] = ' '.join(iter_stable_unique(pkg.distfiles)) if self.eapi.options.has_AA: pkg = self.pkg while hasattr(pkg, '_raw_pkg'): pkg = getattr(pkg, '_raw_pkg') self.env["AA"] = ' '.join(set(iflatten_instance(pkg.distfiles))) if self.eapi.options.has_KV: self.env["KV"] = domain.KV if self.eapi.options.has_merge_type: self.env["MERGE_TYPE"] = "source" if self.eapi.options.has_portdir: self.env["PORTDIR"] = pkg.repo.location self.env["ECLASSDIR"] = eclass_cache.eclassdir if self.setup_is_for_src: self._init_distfiles_env()
def _parse_config(self): """Load data from the repo's metadata/layout.conf file.""" path = pjoin(self.location, self.layout_offset) data = read_dict(iter_read_bash( readlines(path, strip_whitespace=True, swallow_missing=True)), source_isiter=True, strip=True, filename=path, ignore_errors=True) sf = object.__setattr__ sf(self, 'repo_name', data.get('repo-name', None)) hashes = data.get('manifest-hashes', '').lower().split() if hashes: hashes = ['size'] + hashes hashes = tuple(iter_stable_unique(hashes)) else: hashes = self.default_hashes required_hashes = data.get('manifest-required-hashes', '').lower().split() if required_hashes: required_hashes = ['size'] + required_hashes required_hashes = tuple(iter_stable_unique(required_hashes)) else: required_hashes = self.default_required_hashes manifest_policy = data.get('use-manifests', 'strict').lower() d = { 'disabled': (manifest_policy == 'false'), 'strict': (manifest_policy == 'strict'), 'thin': (data.get('thin-manifests', '').lower() == 'true'), 'signed': (data.get('sign-manifests', 'true').lower() == 'true'), 'hashes': hashes, 'required_hashes': required_hashes, } sf(self, 'manifests', _immutable_attr_dict(d)) masters = data.get('masters') _missing_masters = False if masters is None: if not self.is_empty: logger.warning( f"{self.repo_id} repo at {self.location!r}, doesn't " "specify masters in metadata/layout.conf. Please explicitly " "set masters (use \"masters =\" if the repo is standalone)." ) _missing_masters = True masters = () else: masters = tuple(iter_stable_unique(masters.split())) sf(self, '_missing_masters', _missing_masters) sf(self, 'masters', masters) aliases = data.get('aliases', '').split() + [ self.config_name, self.repo_name, self.pms_repo_name, self.location ] sf(self, 'aliases', tuple(filter(None, iter_stable_unique(aliases)))) sf(self, 'eapis_deprecated', tuple(iter_stable_unique(data.get('eapis-deprecated', '').split()))) sf(self, 'eapis_banned', tuple(iter_stable_unique(data.get('eapis-banned', '').split()))) sf( self, 'properties_allowed', tuple( iter_stable_unique(data.get('properties-allowed', '').split()))) sf(self, 'restrict_allowed', tuple(iter_stable_unique(data.get('restrict-allowed', '').split()))) v = set(data.get('cache-formats', 'md5-dict').lower().split()) if not v: v = [None] else: # sort into favored order v = [f for f in self.supported_cache_formats if f in v] if not v: logger.warning( f'unknown cache format: falling back to md5-dict format') v = ['md5-dict'] sf(self, 'cache_format', list(v)[0]) profile_formats = set( data.get('profile-formats', 'pms').lower().split()) if not profile_formats: logger.info( f"{self.repo_id!r} repo at {self.location!r} has explicitly " "unset profile-formats, defaulting to pms") profile_formats = {'pms'} unknown = profile_formats.difference(self.supported_profile_formats) if unknown: logger.info("%r repo at %r has unsupported profile format%s: %s", self.repo_id, self.location, pluralism(unknown), ', '.join(sorted(unknown))) profile_formats.difference_update(unknown) profile_formats.add('pms') sf(self, 'profile_formats', profile_formats)
def test_iter_stable_unique(self): self.test_stable_unique(lambda x: list(sequences.iter_stable_unique(x))) o = UnhashableComplex() l = [1, 2, 3, o, UnhashableComplex(), 4, 3, UnhashableComplex()] self.assertEqual(list(sequences.iter_stable_unique(l)), [1, 2, 3, o, 4])