Esempio n. 1
0
def update_use_local_desc(repo, observer):
    """Update a repo's local USE flag description cache (profiles/use.local.desc)"""
    ret = 0
    use_local_desc = pjoin(repo.location, "profiles", "use.local.desc")
    f = None

    def _raise_xml_error(exc):
        observer.error(f'{cat}/{pkg}: failed parsing metadata.xml: {str(exc)}')
        nonlocal ret
        ret = 1

    try:
        f = AtomicWriteFile(use_local_desc)
        f.write(
            textwrap.dedent('''\
            # This file is deprecated as per GLEP 56 in favor of metadata.xml.
            # Please add your descriptions to your package's metadata.xml ONLY.
            # * generated automatically using pmaint *\n\n'''))
        with patch('pkgcore.log.logger.error', _raise_xml_error):
            for cat, pkgs in sorted(repo.packages.items()):
                for pkg in sorted(pkgs):
                    metadata = repo._get_metadata_xml(cat, pkg)
                    for flag, desc in sorted(metadata.local_use.items()):
                        f.write(f'{cat}/{pkg}:{flag} - {desc}\n')
        f.close()
    except IOError as e:
        observer.error(
            f"Unable to update use.local.desc file {use_local_desc!r}: {e.strerror}"
        )
        ret = os.EX_IOERR
    finally:
        if f is not None:
            f.discard()

    return ret
Esempio n. 2
0
 def parse_config_options(self, namespace, section='DEFAULT'):
     """Parse options from config if they exist."""
     config_args = [
         f'--{k}={v}' if v else f'--{k}'
         for k, v in self.config.items(section)
     ]
     if config_args:
         with patch('snakeoil.cli.arghparse.ArgumentParser.error',
                    self._config_error):
             namespace, _ = self.parse_known_optionals(
                 config_args, namespace)
     return namespace
Esempio n. 3
0
    def finish(self):
        # don't check for unknown category dirs on overlays
        if self.options.gentoo_repo:
            category_dirs = set(
                filterfalse(self.repo.false_categories.__contains__,
                            (x for x in listdir_dirs(self.repo.location)
                             if x[0] != '.')))
            unknown_categories = category_dirs.difference(self.repo.categories)
            if unknown_categories:
                yield UnknownCategories(sorted(unknown_categories))

        arches_without_profiles = set(self.arches) - set(
            self.repo.profiles.arches())
        if arches_without_profiles:
            yield ArchesWithoutProfiles(sorted(arches_without_profiles))

        root_profile_dirs = {'embedded'}
        available_profile_dirs = set()
        for root, _dirs, _files in os.walk(self.profiles_dir):
            d = root[len(self.profiles_dir):].lstrip('/')
            if d:
                available_profile_dirs.add(d)
        available_profile_dirs -= self.non_profile_dirs | root_profile_dirs

        profile_reports = []
        report_profile_warnings = lambda x: profile_reports.append(
            ProfileWarning(x))
        report_profile_errors = lambda x: profile_reports.append(
            ProfileError(x))

        # don't check for acceptable profile statuses on overlays
        if self.options.gentoo_repo:
            known_profile_statuses = self.known_profile_statuses
        else:
            known_profile_statuses = None

        # forcibly parse profiles.desc and convert log warnings/errors into reports
        with patch('pkgcore.log.logger.error', report_profile_errors), \
                patch('pkgcore.log.logger.warning', report_profile_warnings):
            profiles = Profiles.parse(self.profiles_dir,
                                      self.repo.repo_id,
                                      known_status=known_profile_statuses,
                                      known_arch=self.arches)

        yield from profile_reports

        seen_profile_dirs = set()
        lagging_profile_eapi = defaultdict(list)
        for p in profiles:
            try:
                profile = profiles_mod.ProfileStack(
                    pjoin(self.profiles_dir, p.path))
            except profiles_mod.ProfileError:
                yield NonexistentProfilePath(p.path)
                continue
            for parent in profile.stack:
                seen_profile_dirs.update(
                    dir_parents(parent.path[len(self.profiles_dir):]))
                # flag lagging profile EAPIs -- assumes EAPIs are sequentially
                # numbered which should be the case for the gentoo repo
                if (self.options.gentoo_repo
                        and str(profile.eapi) < str(parent.eapi)):
                    lagging_profile_eapi[profile].append(parent)

        for profile, parents in lagging_profile_eapi.items():
            parent = parents[-1]
            yield LaggingProfileEapi(profile.name, str(profile.eapi),
                                     parent.name, str(parent.eapi))

        unused_profile_dirs = available_profile_dirs - seen_profile_dirs
        if unused_profile_dirs:
            yield UnusedProfileDirs(sorted(unused_profile_dirs))
Esempio n. 4
0
    def finish(self):
        unknown_pkgs = defaultdict(lambda: defaultdict(list))
        unknown_pkg_use = defaultdict(lambda: defaultdict(list))
        unknown_use = defaultdict(lambda: defaultdict(list))
        unknown_keywords = defaultdict(lambda: defaultdict(list))

        def _pkg_atoms(filename, profile, vals):
            for a in iflatten_instance(vals, atom.atom):
                if not self.repo.match(a):
                    unknown_pkgs[profile.path][filename].append(a)

        def _pkg_keywords(filename, profile, vals):
            for atom, keywords in vals:
                invalid = set(keywords) - self.valid_keywords
                if invalid:
                    unknown_keywords[profile.path][filename].append(
                        (atom, invalid))

        def _pkg_use(filename, profile, vals):
            # TODO: give ChunkedDataDict some dict view methods
            d = vals
            if isinstance(d, misc.ChunkedDataDict):
                d = vals.render_to_dict()

            for _pkg, entries in d.items():
                for a, disabled, enabled in entries:
                    pkgs = self.repo.match(a)
                    if not pkgs:
                        unknown_pkgs[profile.path][filename].append(a)
                    else:
                        available = {
                            u
                            for pkg in pkgs for u in pkg.iuse_stripped
                        }
                        unknown_disabled = set(disabled) - available
                        unknown_enabled = set(enabled) - available
                        if unknown_disabled:
                            unknown_pkg_use[profile.path][filename].append(
                                (a, ('-' + u for u in unknown_disabled)))
                        if unknown_enabled:
                            unknown_pkg_use[profile.path][filename].append(
                                (a, unknown_enabled))

        def _use(filename, profile, vals):
            # TODO: give ChunkedDataDict some dict view methods
            d = vals.render_to_dict()
            for _, entries in d.items():
                for _, disabled, enabled in entries:
                    unknown_disabled = set(disabled) - self.available_iuse
                    unknown_enabled = set(enabled) - self.available_iuse
                    if unknown_disabled:
                        unknown_use[profile.path][filename].extend(
                            ('-' + u for u in unknown_disabled))
                    if unknown_enabled:
                        unknown_use[profile.path][filename].extend(
                            unknown_enabled)

        def _deprecated(filename, profile, vals):
            # make sure replacement profile exists
            if vals is not None:
                replacement, msg = vals
                try:
                    _ProfileNode(pjoin(self.profiles_dir, replacement))
                except profiles_mod.ProfileError as e:
                    yield ProfileError(
                        f'nonexistent replacement {replacement!r} '
                        f'for deprecated profile: {profile.name!r}')

        file_parse_map = {
            'packages': ('packages', _pkg_atoms),
            'package.mask': ('masks', _pkg_atoms),
            'package.unmask': ('unmasks', _pkg_atoms),
            'package.use': ('pkg_use', _pkg_use),
            'package.use.force': ('pkg_use_force', _pkg_use),
            'package.use.stable.force': ('pkg_use_stable_force', _pkg_use),
            'package.use.mask': ('pkg_use_mask', _pkg_use),
            'package.use.stable.mask': ('pkg_use_stable_mask', _pkg_use),
            'use.force': ('use_force', _use),
            'use.stable.force': ('use_stable_force', _use),
            'use.mask': ('use_mask', _use),
            'use.stable.mask': ('use_stable_mask', _use),
            'parent': ('parents', lambda *args: None),
            'deprecated': ('deprecated', _deprecated),

            # non-PMS files
            'package.keywords': ('keywords', _pkg_keywords),
            'package.accept_keywords': ('accept_keywords', _pkg_keywords),
        }

        profile_reports = []
        report_profile_warnings = lambda x: profile_reports.append(
            ProfileWarning(x))
        report_profile_errors = lambda x: profile_reports.append(
            ProfileError(x))

        for root, _dirs, files in os.walk(self.profiles_dir):
            if root not in self.non_profile_dirs:
                profile = _ProfileNode(root)
                for f in set(files).intersection(file_parse_map.keys()):
                    attr, func = file_parse_map[f]
                    file_path = pjoin(root[len(self.profiles_dir) + 1:], f)
                    # convert log warnings/errors into reports
                    with patch('pkgcore.log.logger.error', report_profile_errors), \
                            patch('pkgcore.log.logger.warning', report_profile_warnings):
                        vals = getattr(profile, attr)
                    results = func(f, profile, vals)
                    if results is not None:
                        yield from results

        yield from profile_reports

        for path, filenames in sorted(unknown_pkgs.items()):
            for filename, vals in filenames.items():
                pkgs = map(str, vals)
                yield UnknownProfilePackages(
                    pjoin(path[len(self.profiles_dir):].lstrip('/'), filename),
                    pkgs)

        for path, filenames in sorted(unknown_pkg_use.items()):
            for filename, vals in filenames.items():
                for pkg, flags in vals:
                    yield UnknownProfilePackageUse(
                        pjoin(path[len(self.profiles_dir):].lstrip('/'),
                              filename), str(pkg), flags)

        for path, filenames in sorted(unknown_use.items()):
            for filename, vals in filenames.items():
                yield UnknownProfileUse(
                    pjoin(path[len(self.profiles_dir):].lstrip('/'), filename),
                    vals)

        for path, filenames in sorted(unknown_keywords.items()):
            for filename, vals in filenames.items():
                for pkg, keywords in vals:
                    yield UnknownProfilePackageKeywords(
                        pjoin(path[len(self.profiles_dir):].lstrip('/'),
                              filename), str(pkg), keywords)
Esempio n. 5
0
    def finish(self):
        update_reports = []
        report_bad_updates = lambda x: update_reports.append(
            BadPackageUpdate(x))
        report_old_updates = lambda x: update_reports.append(
            MovedPackageUpdate(x))

        # convert log warnings/errors into reports
        with patch('pkgcore.log.logger.error', report_bad_updates), \
                patch('pkgcore.log.logger.warning', report_old_updates):
            repo_updates = self.repo.config.updates

        yield from update_reports

        multi_move_updates = {}
        old_move_updates = {}
        old_slotmove_updates = {}

        for pkg, updates in repo_updates.items():
            move_updates = [x for x in updates if x[0] == 'move']
            slotmove_updates = [x for x in updates if x[0] == 'slotmove']

            # check for multi-updates, a -> b, b -> c, ...
            if len(move_updates) > 1:
                # the most recent move should override all the older entries,
                # meaning only a single report for the entire chain should created
                multi_move_updates[move_updates[-1][2]] = (pkg, [
                    x[2] for x in move_updates
                ])
            else:
                # scan updates for old entries with removed packages
                for x in move_updates:
                    _, _old, new = x
                    if not self.repo.match(new):
                        old_move_updates[new] = x

            # scan updates for old entries with removed packages
            for x in slotmove_updates:
                _, pkg, newslot = x
                if not self.repo.match(pkg.unversioned_atom):
                    # reproduce updates file line data for result output
                    x = ('slotmove', str(pkg)[:-(len(pkg.slot) + 1)], pkg.slot,
                         newslot)
                    old_slotmove_updates[pkg.key] = x

        for pkg, v in multi_move_updates.items():
            orig_pkg, moves = v
            # check for multi-move chains ending in removed packages
            moves = [str(orig_pkg)] + list(map(str, moves))
            if not self.repo.match(pkg):
                yield OldMultiMovePackageUpdate(str(moves[-1]), moves)
                # don't generate duplicate old report
                old_move_updates.pop(pkg, None)
            else:
                yield MultiMovePackageUpdate(str(orig_pkg), moves)

        # report remaining old updates
        for pkg, move in chain(old_move_updates.items(),
                               old_slotmove_updates.items()):
            updates = map(str, move)
            yield OldPackageUpdate(str(pkg), updates)