def desc(self): s = pluralism(self.chksums) chksums = ', '.join(self.chksums) pkgs_s = pluralism(self.pkgs) pkgs = ', '.join(self.pkgs) return (f'distfile {self.filename!r} has different checksum{s} ' f'({chksums}) for package{pkgs_s}: {pkgs}')
def _setup_shared_opts(namespace): namespace.exclude_restrict = None exclude_restrictions = [] if namespace.pkgsets: disabled, enabled = namespace.pkgsets unknown_sets = set(disabled + enabled).difference(namespace.config.pkgset) if unknown_sets: argparser.error("unknown set%s: %s (available sets: %s)" % ( pluralism(unknown_sets), ', '.join(sorted(map(repr, unknown_sets))), ', '.join(sorted(namespace.config.pkgset)))) for s in set(disabled): exclude_restrictions.extend(namespace.config.pkgset[s]) for s in set(enabled): namespace.restrict.append(boolean.OrRestriction(*namespace.config.pkgset[s])) # handle command line and file excludes excludes = namespace.excludes if namespace.excludes is not None else [] if namespace.exclude_file is not None: excludes.extend(namespace.exclude_file.read().split('\n')) if excludes: exclude_restrictions.extend(convert_to_restrict(excludes, default=None)) if exclude_restrictions: namespace.restrict.append( boolean.OrRestriction(negate=True, *exclude_restrictions)) namespace.exclude_restrict = boolean.OrRestriction(*exclude_restrictions)
def __call__(self, parser, namespace, values, option_string=None): disabled, enabled = self.parse_values(values) error = (k for k, v in objects.KEYWORDS.items() if issubclass(v, results.Error)) warning = (k for k, v in objects.KEYWORDS.items() if issubclass(v, results.Warning)) info = (k for k, v in objects.KEYWORDS.items() if issubclass(v, results.Info)) alias_map = {'error': error, 'warning': warning, 'info': info} replace_aliases = lambda x: alias_map.get(x, [x]) # expand keyword aliases to keyword lists disabled = list(chain.from_iterable(map(replace_aliases, disabled))) enabled = list(chain.from_iterable(map(replace_aliases, enabled))) # validate selected keywords unknown_keywords = set(disabled + enabled) - set(objects.KEYWORDS) if unknown_keywords: unknown = ', '.join(map(repr, unknown_keywords)) s = pluralism(unknown_keywords) raise argparse.ArgumentError(self, f'unknown keyword{s}: {unknown}') setattr(namespace, self.dest, (disabled, enabled))
def _setup_shared_opts(namespace): namespace.exclude_restrict = None exclude_restrictions = [] if namespace.pkgsets: disabled, enabled = namespace.pkgsets unknown_sets = set(disabled + enabled).difference( namespace.config.pkgset) if unknown_sets: argparser.error("unknown set%s: %s (available sets: %s)" % (pluralism(unknown_sets), ', '.join( sorted(map(repr, unknown_sets))), ', '.join( sorted(namespace.config.pkgset)))) for s in set(disabled): exclude_restrictions.extend(namespace.config.pkgset[s]) for s in set(enabled): namespace.restrict.append( boolean.OrRestriction(*namespace.config.pkgset[s])) # handle command line and file excludes excludes = namespace.excludes if namespace.excludes is not None else [] if namespace.exclude_file is not None: excludes.extend(namespace.exclude_file.read().split('\n')) if excludes: exclude_restrictions.extend(convert_to_restrict(excludes, default=None)) if exclude_restrictions: namespace.restrict.append( boolean.OrRestriction(negate=True, *exclude_restrictions)) namespace.exclude_restrict = boolean.OrRestriction( *exclude_restrictions)
def __init__(self, *args, **kwargs): if 'config_type' in kwargs: raise ValueError( "StoreRepoObject: config_type keyword is redundant: got %s" % (kwargs['config_type'], )) self.repo_type = kwargs.pop('repo_type', 'all') if self.repo_type not in self.valid_repo_types: raise argparse.ArgumentTypeError( f"unknown repo type: {self.repo_type!r}") self.repo_key = self.valid_repo_types[self.repo_type] self.allow_aliases = set(kwargs.pop("allow_aliases", ())) if self.allow_aliases: unknown_aliases = self.allow_aliases.difference( self.valid_repo_types) if unknown_aliases: raise argparse.ArgumentTypeError( 'unknown repo alias%s: %s' % (pluralism(unknown_aliases, plural='es'), ', '.join(unknown_aliases))) if self.repo_type == 'config': kwargs['config_type'] = 'repo_config' else: kwargs['config_type'] = 'repo' self.allow_name_lookup = kwargs.pop("allow_name_lookup", True) self.allow_external_repos = kwargs.pop("allow_external_repos", False) super().__init__(*args, **kwargs)
def check_args(parser, namespace): arches = namespace.selected_arches target_repo = getattr(namespace, "target_repo", None) if target_repo is not None: all_arches = target_repo.known_arches else: all_arches = set() if arches is None: arches = (set(), all_arches) disabled, enabled = arches if not enabled: # enable all non-prefix arches enabled = set(arch for arch in all_arches if '-' not in arch) arches = set(enabled).difference(set(disabled)) if all_arches: unknown_arches = arches.difference(all_arches) if unknown_arches: es = pluralism(unknown_arches, plural='es') unknown = ', '.join(unknown_arches) valid = ', '.join(sorted(all_arches)) parser.error( f'unknown arch{es}: {unknown} (valid arches: {valid})') namespace.arches = tuple(sorted(arches))
def _validate(parser, namespace): # nothing to validate if listing pkgsets if namespace.list_sets: return if namespace.unmerge: if namespace.sets: parser.error("using sets with -C probably isn't wise, aborting") if not namespace.targets: parser.error("you must provide at least one atom") if namespace.clean: if namespace.sets or namespace.targets: parser.error( "--clean currently cannot be used w/ any sets or targets given" ) namespace.sets = ('world', 'system') namespace.deep = True namespace.replace = False if namespace.usepkgonly or namespace.usepkg or namespace.source_only: parser.error( '--clean cannot be used with any of the following options: ' '--usepkg --usepkgonly --source-only') elif namespace.usepkgonly and namespace.usepkg: parser.error('--usepkg is redundant when --usepkgonly is used') elif (namespace.usepkgonly or namespace.usepkg) and namespace.source_only: parser.error( "--source-only cannot be used with --usepkg nor --usepkgonly") elif namespace.nodeps and namespace.onlydeps: parser.error( "-O/--nodeps cannot be used with -o/--onlydeps (it's a no-op)") if namespace.sets: unknown_sets = set(namespace.sets).difference(namespace.config.pkgset) if unknown_sets: parser.error("unknown set%s: %s (available sets: %s)" % (pluralism(unknown_sets), ', '.join( sorted(map(repr, unknown_sets))), ', '.join( sorted(namespace.config.pkgset)))) namespace.sets = [(x, namespace.config.pkgset[x]) for x in namespace.sets] if namespace.upgrade: namespace.replace = False if not namespace.targets and not namespace.sets: parser.error('please specify at least one atom or nonempty set') if namespace.newuse: namespace.oneshot = True # At some point, fix argparse so this isn't necessary... def f(val): if val is None: return () elif isinstance(val, tuple): return [val] return val namespace.targets = f(namespace.targets) namespace.sets = f(namespace.sets)
def attachments(self, ids, id_map=False, item_id=False, output_url=False, browser=False, **kw): """Get attachments from a service.""" # skip pulling data if we don't need it get_data = (not output_url and not browser) # extract attachment IDs to display if the service uses ID maps display_ids = [] if id_map: for id, a_ids in ids: if not a_ids: display_ids.append(f'from {self.service.item.type} {id}') else: display_ids.extend(f'{id}:{a}' for a in a_ids) else: display_ids = ids if item_id: request = self.service.AttachmentsRequest(ids=ids, get_data=get_data) item_str = f' from {self.service.item.type}' plural = '(s)' else: request = self.service.AttachmentsRequest(attachment_ids=ids, get_data=get_data) item_str = '' plural = pluralism(display_ids) self.log_t(f"Getting attachment{plural}{item_str}: {', '.join(map(str, display_ids))}") def _launch_browser(ids): urls = list(self.service.attachment_urls(ids)) self.log_t(f'Launching attachment{pluralism(ids)} in browser: {const.BROWSER}') self.log(urls, prefix=' - ') launch_browser(urls) if not item_id and (output_url or browser): if output_url: print(*self.service.attachment_urls(ids), sep='\n') elif browser: _launch_browser(ids) else: attachments = request.send() # Attachment requests yield lists of attachments -- each list # corresponds to the attachments for given item ID or a single list # of all attachments requested. attachments = chain.from_iterable(attachments) if output_url: ids = (x.id for x in attachments) print(*self._attachment_urls(ids), sep='\n') elif browser: _launch_browser(x.id for x in attachments) else: save_to = kw.get('save_to') if save_to is not None: os.makedirs(save_to, exist_ok=True) self._process_attachments(attachments, **kw)
def feed(self, pkg, reporter): if not self.iuse_handler.ignore: iuse = pkg.iuse_stripped.difference( self.iuse_handler.allowed_iuse(pkg)) if iuse: reporter.add_report( MetadataError( pkg, "iuse", "iuse unknown flag%s: [ %s ]" % (pluralism(iuse), ", ".join(iuse))))
def _supported_eapis(self, block, tag, lineno): """Parse @SUPPORTED_EAPIS tag arguments.""" eapis = self._tag_inline_list(block, tag, lineno) unknown = set(eapis) - self._known_eapis if unknown: s = pluralism(unknown) unknown_str = ' '.join(sorted(unknown)) logger.warning( f'{repr(tag)}, line {lineno}: unknown EAPI{s}: {unknown_str}') return OrderedSet(eapis)
def __init__(self, raw_repo, domain, domain_settings): """ :param raw_repo: :obj:`UnconfiguredTree` instance :param domain_settings: environment settings to bind """ required_settings = {'USE', 'CHOST'} if missing_settings := required_settings.difference(domain_settings): s = pluralism(missing_settings) raise errors.InitializationError( f"{self.__class__} missing required setting{s}: " f"{', '.join(map(repr, missing_settings))}")
def check_args(cls, parser, namespace): if namespace.commits: if namespace.targets: targets = ' '.join(namespace.targets) s = pluralism(namespace.targets) parser.error( f'--commits is mutually exclusive with target{s}: {targets}' ) ref = namespace.commits repo = namespace.target_repo targets = list(repo.category_dirs) if os.path.isdir(pjoin(repo.location, 'eclass')): targets.append('eclass') try: p = subprocess.run( ['git', 'diff', '--cached', ref, '--name-only'] + targets, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=repo.location, encoding='utf8') except FileNotFoundError: parser.error( 'git not available to determine targets for --commits') if p.returncode != 0: error = p.stderr.splitlines()[0] parser.error(f'failed running git: {error}') elif not p.stdout: # no changes exist, exit early parser.exit() pkgs, eclasses = partition( p.stdout.splitlines(), predicate=lambda x: x.startswith('eclass/')) pkgs = sorted(cls._pkg_atoms(pkgs)) eclasses = filter(None, (eclass_regex.match(x) for x in eclasses)) eclasses = sorted(x.group('eclass') for x in eclasses) restrictions = [] if pkgs: restrict = packages.OrRestriction(*pkgs) restrictions.append((base.package_scope, restrict)) if eclasses: func = partial(cls._committed_eclass, frozenset(eclasses)) restrict = values.AnyMatch(values.FunctionRestriction(func)) restrictions.append((base.eclass_scope, restrict)) # no pkgs or eclasses to check, exit early if not restrictions: parser.exit() namespace.contexts.append(GitStash(parser, repo)) namespace.restrictions = restrictions
def desc(self): msg = [f'attr({self.attr})'] if self.profile is not None: if self.num_profiles is not None: num_profiles = f' ({self.num_profiles} total)' else: num_profiles = '' msg.append(f'profile {self.profile!r}{num_profiles}') flags = ', '.join(self.flags) s = pluralism(self.flags) msg.extend([f'unstated flag{s}', f'[ {flags} ]']) return ': '.join(msg)
def desc(self): eclass_migration = [] for old_eclass, new_eclass in self.eclasses: if new_eclass: update_path = f'migrate to {new_eclass}' else: update_path = 'no replacement' eclass_migration.append(f'{old_eclass} ({update_path})') es = pluralism(eclass_migration, plural='es') eclasses = ', '.join(eclass_migration) return f'uses deprecated eclass{es}: [ {eclasses} ]'
def _setup_arches(namespace, attr): default_repo = namespace.config.get_default('repo') try: known_arches = {arch for r in namespace.repo.trees for arch in r.config.known_arches} except AttributeError: try: # binary/vdb repos use known arches from the default repo known_arches = default_repo.config.known_arches except AttributeError: # TODO: remove fallback for tests after fixing default repo pull # from faked config known_arches = set() arches = known_arches if namespace.arch is not None: disabled_arches, enabled_arches = namespace.arch disabled_arches = set(disabled_arches) enabled_arches = set(enabled_arches) unknown_arches = disabled_arches.difference(known_arches) | enabled_arches.difference(known_arches) if unknown_arches: unknown = ', '.join(map(repr, sorted(unknown_arches))) known = ', '.join(sorted(known_arches)) es = pluralism(unknown_arches, plural='es') showkw.error(f'unknown arch{es}: {unknown} (choices: {known})') if enabled_arches: arches = arches.intersection(enabled_arches) if disabled_arches: arches = arches - disabled_arches prefix_arches = set(x for x in arches if '-' in x) native_arches = arches.difference(prefix_arches) arches = native_arches if namespace.prefix: arches = arches.union(prefix_arches) if namespace.stable: try: stable_arches = {arch for r in namespace.repo.trees for arch in r.config.profiles.arches('stable')} except AttributeError: # binary/vdb repos use stable arches from the default repo stable_arches = default_repo.config.profiles.arches('stable') arches = arches.intersection(stable_arches) namespace.known_arches = known_arches namespace.prefix_arches = prefix_arches namespace.native_arches = native_arches namespace.arches = arches
def __call__(self, parser, namespace, values, option_string=None): disabled, enabled = self.parse_values(values) # validate selected scopes unknown_scopes = set(disabled + enabled) - set(base.scopes) if unknown_scopes: unknown = ', '.join(map(repr, unknown_scopes)) available = ', '.join(base.scopes) s = pluralism(unknown_scopes) raise argparse.ArgumentError( self, f'unknown scope{s}: {unknown} (available scopes: {available})') disabled = {base.scopes[x] for x in disabled} enabled = {base.scopes[x] for x in enabled} setattr(namespace, self.dest, (disabled, enabled))
def parse_values(self, values): all_cache_types = {cache.type for cache in CachedAddon.caches.values()} disabled, enabled = [], list(all_cache_types) if values is None or values in ('y', 'yes', 'true'): pass elif values in ('n', 'no', 'false'): disabled = list(all_cache_types) else: disabled, enabled = super().parse_values(values) disabled = set(disabled) enabled = set(enabled) if enabled else all_cache_types unknown = (disabled | enabled) - all_cache_types if unknown: unknowns = ', '.join(map(repr, unknown)) choices = ', '.join(map(repr, sorted(self.default))) s = pluralism(unknown) raise argparse.ArgumentError( self, f'unknown cache type{s}: {unknowns} (choose from {choices})') enabled = set(enabled).difference(disabled) return enabled
def __call__(self, parser, namespace, values, option_string=None): disabled, enabled = self.parse_values(values) available = set(objects.CHECKS) network = (c for c, v in objects.CHECKS.items() if issubclass(v, NetworkCheck)) alias_map = {'all': available, 'net': network} replace_aliases = lambda x: alias_map.get(x, [x]) # expand check aliases to check lists disabled = set(chain.from_iterable(map(replace_aliases, disabled))) enabled = set(chain.from_iterable(map(replace_aliases, enabled))) # validate selected checks unknown_checks = (disabled | enabled) - available if unknown_checks: unknown = ', '.join(map(repr, unknown_checks)) s = pluralism(unknown_checks) raise argparse.ArgumentError(self, f'unknown check{s}: {unknown}') setattr(namespace, self.dest, (disabled, enabled))
def main(options, out, err): domain = options.domain kwds = {} phase_obs = observer.phase_observer(observer.formatter_output(out), not options.debug) phases = [x for x in options.phase if x != 'clean'] clean = (len(phases) != len(options.phase)) if options.no_auto: kwds["ignore_deps"] = True if "setup" in phases: phases.insert(0, "fetch") # by default turn off startup cleans; we clean by ourselves if # told to do so via an arg build = domain.build_pkg(options.pkg, phase_obs, clean=False, allow_fetching=True) if clean: build.cleanup(force=True) build._reload_state() phase_funcs = [(p, getattr(build, p, None)) for p in phases] unknown_phases = [p for p, func in phase_funcs if func is None] if unknown_phases: argparser.error( "unknown phase%s: %s" % (pluralism(unknown_phases), ', '.join(map(repr, unknown_phases)))) try: for phase, func in phase_funcs: out.write('executing phase %s' % (phase, )) func(**kwds) except format.errors as e: out.error("caught exception executing phase %s: %s" % (phase, e)) return 1
def short_desc(self): return "keyword%s dropped: %s" % (pluralism(self.arches), ', '.join( sorted(self.arches)))
def unmerge(out, err, installed_repos, targets, options, formatter, world_set=None): """Unmerge tokens. hackish, should be rolled back into the resolver""" # split real and virtual repos vdb = installed_repos.real.combined fake_vdb = installed_repos.virtual.combined matches = set() fake = set() unknown = set() for token, restriction in targets: # Catch restrictions matching across more than one category. # Multiple matches in the same category are acceptable. # The point is that matching across more than one category is # nearly always unintentional ("pmerge -C spork" without # realising there are sporks in more than one category), but # matching more than one cat/pkg is impossible without # explicit wildcards. installed = vdb.match(restriction) if not installed: fake_pkgs = fake_vdb.match(restriction) if fake_pkgs: fake.update(fake_pkgs) else: unknown.add(token) continue categories = set(pkg.category for pkg in installed) if len(categories) > 1: raise parserestrict.ParseError( "%r is in multiple categories (%s)" % ( token, ', '.join(sorted(set(pkg.key for pkg in installed))))) matches.update(installed) # fail out if no matches are found, otherwise just output a notification if unknown: unknowns = ', '.join(map(repr, unknown)) if matches: err.write(f"Skipping unknown matches: {unknowns}\n") else: raise Failure(f"no matches found: {unknowns}") if fake: err.write('Skipping virtual pkg%s: %s' % ( pluralism(fake_pkgs), ', '.join(f'{x.versioned_atom}::{x.repo_id}' for x in fake))) if matches: out.write(out.bold, 'The following packages are to be unmerged:') out.prefix = [out.bold, ' * ', out.reset] for pkg in matches: out.write(pkg.cpvstr) out.prefix = [] repo_obs = observer.repo_observer( observer.formatter_output(out), debug=options.debug) if options.pretend: return if (options.ask and not formatter.ask("Would you like to unmerge these packages?")): return return do_unmerge(options, out, err, vdb, matches, world_set, repo_obs)
def desc(self): s = pluralism(self.ebuilds) ebuilds = ', '.join(self.ebuilds) return f'invalid package name{s}: [ {ebuilds} ]'
def desc(self): s = pluralism(self.chars) chars = ', '.join(map(repr, self.chars)) return f'filename {self.filename!r} character{s} outside allowed set: {chars}'
def desc(self): files = ', '.join(map(repr, self.filenames)) y = pluralism(self.filenames, singular='y', plural='ies') return f'unknown entr{y}: {files}'
def test_singular(): # default assert pluralism([1]) == '' # different suffix for singular existence assert pluralism([1], singular='o') == 'o'
def main(options, out, err): token, restriction = options.target[0] domain = options.domain try: pkgs = options.repo.match(restriction, pkg_filter=None) except MetadataException as e: error = e.msg(verbosity=options.verbosity) argparser.error(f'{e.pkg.cpvstr}::{e.pkg.repo.repo_id}: {error}') if not pkgs: argparser.error(f"no matches: {token!r}") pkg = max(pkgs) if len(pkgs) > 1: argparser.err.write(f"got multiple matches for {token!r}:") if len(set((p.slot, p.repo) for p in pkgs)) != 1: for p in pkgs: repo_id = getattr(p.repo, 'repo_id', 'unknown') argparser.err.write(f"{p.cpvstr}:{p.slot}::{repo_id}", prefix=' ') argparser.err.write() argparser.error("please refine your restriction to one match") repo_id = getattr(pkg.repo, 'repo_id', 'unknown') argparser.err.write(f"choosing {pkg.cpvstr}:{pkg.slot}::{repo_id}", prefix=' ') sys.stderr.flush() kwds = {} phase_obs = observer.phase_observer(observer.formatter_output(out), options.debug) phases = [x for x in options.phase if x != 'clean'] clean = (len(phases) != len(options.phase)) if options.no_auto: kwds["ignore_deps"] = True if "setup" in phases: phases.insert(0, "fetch") # forcibly run test phase if selected force_test = 'test' in phases if force_test and 'test' in pkg.iuse: pkg.use.add('test') # by default turn off startup cleans; we clean by ourselves if # told to do so via an arg build = domain.build_pkg(pkg, failed=True, clean=False, allow_fetching=True, observer=phase_obs, force_test=force_test) if clean: build.cleanup(force=True) build._reload_state() phase_funcs = [(p, getattr(build, p, None)) for p in phases] unknown_phases = [p for p, func in phase_funcs if func is None] if unknown_phases: argparser.error( "unknown phase%s: %s" % (pluralism(unknown_phases), ', '.join(map(repr, unknown_phases)))) try: for phase, func in phase_funcs: out.write(f'executing phase {phase}') func(**kwds) except OperationError as e: raise ExitException( f"caught exception executing phase {phase}: {e}") from e
def test_none(): # default assert pluralism([]) == 's' # different suffix for nonexistence assert pluralism([], none='') == ''
def _parse_config(self): """Load data from the repo's metadata/layout.conf file.""" path = pjoin(self.location, self.layout_offset) data = read_dict(iter_read_bash( readlines(path, strip_whitespace=True, swallow_missing=True)), source_isiter=True, strip=True, filename=path, ignore_errors=True) sf = object.__setattr__ sf(self, 'repo_name', data.get('repo-name', None)) hashes = data.get('manifest-hashes', '').lower().split() if hashes: hashes = ['size'] + hashes hashes = tuple(iter_stable_unique(hashes)) else: hashes = self.default_hashes required_hashes = data.get('manifest-required-hashes', '').lower().split() if required_hashes: required_hashes = ['size'] + required_hashes required_hashes = tuple(iter_stable_unique(required_hashes)) else: required_hashes = self.default_required_hashes manifest_policy = data.get('use-manifests', 'strict').lower() d = { 'disabled': (manifest_policy == 'false'), 'strict': (manifest_policy == 'strict'), 'thin': (data.get('thin-manifests', '').lower() == 'true'), 'signed': (data.get('sign-manifests', 'true').lower() == 'true'), 'hashes': hashes, 'required_hashes': required_hashes, } sf(self, 'manifests', _immutable_attr_dict(d)) masters = data.get('masters') _missing_masters = False if masters is None: if not self.is_empty: logger.warning( f"{self.repo_id} repo at {self.location!r}, doesn't " "specify masters in metadata/layout.conf. Please explicitly " "set masters (use \"masters =\" if the repo is standalone)." ) _missing_masters = True masters = () else: masters = tuple(iter_stable_unique(masters.split())) sf(self, '_missing_masters', _missing_masters) sf(self, 'masters', masters) aliases = data.get('aliases', '').split() + [ self.config_name, self.repo_name, self.pms_repo_name, self.location ] sf(self, 'aliases', tuple(filter(None, iter_stable_unique(aliases)))) sf(self, 'eapis_deprecated', tuple(iter_stable_unique(data.get('eapis-deprecated', '').split()))) sf(self, 'eapis_banned', tuple(iter_stable_unique(data.get('eapis-banned', '').split()))) sf( self, 'properties_allowed', tuple( iter_stable_unique(data.get('properties-allowed', '').split()))) sf(self, 'restrict_allowed', tuple(iter_stable_unique(data.get('restrict-allowed', '').split()))) v = set(data.get('cache-formats', 'md5-dict').lower().split()) if not v: v = [None] else: # sort into favored order v = [f for f in self.supported_cache_formats if f in v] if not v: logger.warning( f'unknown cache format: falling back to md5-dict format') v = ['md5-dict'] sf(self, 'cache_format', list(v)[0]) profile_formats = set( data.get('profile-formats', 'pms').lower().split()) if not profile_formats: logger.info( f"{self.repo_id!r} repo at {self.location!r} has explicitly " "unset profile-formats, defaulting to pms") profile_formats = {'pms'} unknown = profile_formats.difference(self.supported_profile_formats) if unknown: logger.info("%r repo at %r has unsupported profile format%s: %s", self.repo_id, self.location, pluralism(unknown), ', '.join(sorted(unknown))) profile_formats.difference_update(unknown) profile_formats.add('pms') sf(self, 'profile_formats', profile_formats)
def test_plural(): # default assert pluralism([1, 2]) == 's' # different suffix for plural existence assert pluralism([1, 2], plural='ies') == 'ies'
def desc(self): s = pluralism(self.missing_sign_offs) sign_offs = ', '.join(self.missing_sign_offs) return f'commit {self.commit}, missing sign-off{s}: {sign_offs}'
def desc(self): es = pluralism(self.stable, plural='es') stable = ', '.join(self.stable) keywords = ', '.join(self.keywords) return f'slot({self.slot}), stabled arch{es}: [ {stable} ], lagging: [ {keywords} ]'
def test_int(): assert pluralism(0) == 's' assert pluralism(1) == '' assert pluralism(2) == 's'
def short_desc(self): return "depset %s: nonexistent dep%s: [ %s ]" % ( self.attr, pluralism(self.atoms), ', '.join(self.atoms))
tag = mo.group('tag') missing_tags.discard(tag) value = mo.group('value').strip() blocks.append((tag, line_ind + i, [value])) else: blocks[-1][-1].append(line) # parse each tag block for tag, line_ind, block in blocks: name, required, func, _default = self.tags[tag] data[name] = func(block, tag, line_ind) # check if any required tags are missing if missing_tags: missing_tags_str = ', '.join(map(repr, missing_tags)) s = pluralism(missing_tags) logger.warning( f'{repr(lines[0])}: missing tag{s}: {missing_tags_str}') return AttrDict(data) class EclassBlock(ParseEclassDoc): """ECLASS doc block.""" tag = '@ECLASS:' def __init__(self): tags = { '@ECLASS:': ('name', True, self._tag_inline_arg, None), '@VCSURL:': ('vcsurl', False, self._tag_inline_arg, None),
def short_desc(self): return "no change in %i days for unstable keyword%s: [ %s ]" % ( self.period, pluralism(self.keywords), ', '.join(self.keywords))
def short_desc(self): return "metadata.xml unused local use flag%s: [ %s ]" % (pluralism( self.flags), ', '.join(self.flags))