コード例 #1
0
ファイル: base.py プロジェクト: chutz/pkgcheck
 def remove_caches(cls, options):
     """Remove all or selected caches."""
     force = getattr(options, 'force_cache', False)
     if force:
         try:
             shutil.rmtree(CACHE_DIR)
         except FileNotFoundError:
             pass
         except IOError as e:
             raise UserException(f'failed removing cache dir: {e}')
     else:
         try:
             for cache_type, paths in cls.existing().items():
                 if cache_type in options.cache_types:
                     for path in paths:
                         if options.dry_run:
                             print(f'Would remove {path}')
                         else:
                             path.unlink()
                             # remove empty cache dirs
                             try:
                                 while str(path) != CACHE_DIR:
                                     path.parent.rmdir()
                                     path = path.parent
                             except OSError as e:
                                 if e.errno == errno.ENOTEMPTY:
                                     continue
                                 raise
         except IOError as e:
             raise UserException(
                 f'failed removing {cache_type} cache: {path!r}: {e}')
     return 0
コード例 #2
0
def _replay(options, out, err):
    # assume JSON encoded file, fallback to pickle format
    processed = 0
    exc = None
    with options.reporter(out) as reporter:
        try:
            for result in reporters.JsonStream.from_file(options.results):
                reporter.report(result)
                processed += 1
        except reporters.DeserializationError as e:
            if not processed:
                options.results.seek(0)
                try:
                    for result in reporters.PickleStream.from_file(
                            options.results):
                        reporter.report(result)
                        processed += 1
                except reporters.DeserializationError as e:
                    exc = e
            else:
                exc = e

    if exc:
        if not processed:
            raise UserException('invalid or unsupported replay file')
        raise UserException(
            f'corrupted results file {options.results.name!r}: {exc}')

    return 0
コード例 #3
0
def _path_restrict(path, namespace):
    """Generate custom package restriction from a given path.

    This drops the repo restriction (initial entry in path restrictions)
    since runs can only be made against single repo targets so the extra
    restriction is redundant and breaks several custom sources involving
    raw pkgs (lacking a repo attr) or faked repos.
    """
    repo = namespace.target_repo
    restrictions = []
    path = os.path.realpath(path)
    try:
        restrictions = repo.path_restrict(path)[1:]
    except ValueError as e:
        raise UserException(str(e))

    restrict = packages.AndRestriction(
        *restrictions) if restrictions else packages.AlwaysTrue

    # allow location specific scopes to override the path restrict scope
    for scope in (x for x in base.scopes.values() if x.level == 0):
        scope_path = pjoin(namespace.target_repo.location, scope.desc)
        if path.startswith(scope_path):
            break
    else:
        scope = _restrict_to_scope(restrict)

    return scope, restrict
コード例 #4
0
ファイル: mangle.py プロジェクト: arthurzam/pkgdev
 def _kill_pipe(self, *args, error=None):
     """Handle terminating the mangling process group."""
     if self._runner.is_alive():
         os.killpg(self._runner.pid, signal.SIGKILL)
     if error is not None:
         # propagate exception raised during parallelized mangling
         raise UserException(error)
     raise KeyboardInterrupt
コード例 #5
0
ファイル: cli.py プロジェクト: sbraz/pkgcheck
 def parse_config(self, configs):
     """Parse given config files."""
     self.config = configparser.ConfigParser()
     try:
         for f in configs:
             self.config.read(f)
     except configparser.ParsingError as e:
         raise UserException(f'parsing config file failed: {e}')
     return self.config
コード例 #6
0
ファイル: cli.py プロジェクト: lucianposton/pkgcheck
 def config(self):
     """Config file object related to a given parser."""
     config = configparser.ConfigParser()
     try:
         for f in self.configs:
             config.read(f)
     except configparser.ParsingError as e:
         raise UserException(f'parsing config file failed: {e}')
     return config
コード例 #7
0
ファイル: addons.py プロジェクト: chutz/pkgcheck
 def __init__(self, *args):
     super().__init__(*args)
     try:
         from .net import Session
         self.session = Session(
             concurrent=self.options.tasks, timeout=self.options.timeout)
     except ImportError as e:
         if e.name == 'requests':
             raise UserException('network checks require requests to be installed')
         raise
コード例 #8
0
ファイル: git.py プロジェクト: arthurzam/pkgdev
def run(*args, **kwargs):
    """Wrapper for running git via subprocess.run()."""
    kwargs.setdefault('check', True)
    kwargs.setdefault('text', True)
    cmd = ['git'] + list(args)

    # output git command that would be run to stderr
    if '--dry-run' in args:
        git_cmd = ' '.join(x for x in cmd if x != '--dry-run')
        sys.stderr.write(f'{git_cmd}\n')

    try:
        return subprocess.run(cmd, **kwargs)
    except FileNotFoundError as e:
        raise UserException(str(e))
    except subprocess.CalledProcessError as e:
        raise GitError(e.returncode)
コード例 #9
0
def _path_restrict(path, namespace):
    """Generate custom package restriction from a given path.

    This drops the repo restriction (initial entry in path restrictions)
    since runs can only be made against single repo targets so the extra
    restriction is redundant and breaks several custom sources involving
    raw pkgs (lacking a repo attr) or faked repos.
    """
    repo = namespace.target_repo
    restrictions = []
    try:
        restrictions = repo.path_restrict(path)[1:]
    except ValueError as e:
        raise UserException(str(e))
    if restrictions:
        return packages.AndRestriction(*restrictions)
    return packages.AlwaysTrue
コード例 #10
0
    def __init__(self, *args, arches_addon=None):
        super().__init__(*args)
        target_repo = self.options.target_repo

        self.official_arches = target_repo.known_arches
        self.desired_arches = getattr(self.options, 'arches', None)
        if self.desired_arches is None or self.options.selected_arches is None:
            # copy it to be safe
            self.desired_arches = set(self.official_arches)

        self.global_insoluble = set()
        profile_filters = defaultdict(list)
        chunked_data_cache = {}
        cached_profiles = defaultdict(dict)

        if self.options.cache['profiles']:
            for repo in target_repo.trees:
                cache_file = self.cache_file(repo)
                # add profiles-base -> repo mapping to ease storage procedure
                cached_profiles[repo.config.profiles_base]['repo'] = repo
                try:
                    with open(cache_file, 'rb') as f:
                        cache = pickle.load(f)
                    if cache.version == self.cache.version:
                        cached_profiles[repo.config.profiles_base].update(
                            cache)
                    else:
                        logger.debug(
                            'forcing %s profile cache regen '
                            'due to outdated version', repo.repo_id)
                        os.remove(cache_file)
                except FileNotFoundError as e:
                    pass
                except (AttributeError, EOFError, ImportError,
                        IndexError) as e:
                    logger.debug('forcing %s profile cache regen: %s',
                                 repo.repo_id, e)
                    os.remove(cache_file)

        for k in sorted(self.desired_arches):
            if k.lstrip("~") not in self.desired_arches:
                continue
            stable_key = k.lstrip("~")
            unstable_key = "~" + stable_key
            stable_r = packages.PackageRestriction(
                "keywords", values.ContainmentMatch2((stable_key, )))
            unstable_r = packages.PackageRestriction(
                "keywords",
                values.ContainmentMatch2((
                    stable_key,
                    unstable_key,
                )))

            default_masked_use = tuple(
                set(x for x in self.official_arches if x != stable_key))

            for profile_obj, profile in self.options.arch_profiles.get(k, []):
                files = self.profile_data.get(profile, None)
                try:
                    cached_profile = cached_profiles[profile.base][
                        profile.path]
                    if files != cached_profile['files']:
                        # force refresh of outdated cache entry
                        raise KeyError

                    masks = cached_profile['masks']
                    unmasks = cached_profile['unmasks']
                    immutable_flags = cached_profile['immutable_flags']
                    stable_immutable_flags = cached_profile[
                        'stable_immutable_flags']
                    enabled_flags = cached_profile['enabled_flags']
                    stable_enabled_flags = cached_profile[
                        'stable_enabled_flags']
                    pkg_use = cached_profile['pkg_use']
                    iuse_effective = cached_profile['iuse_effective']
                    use = cached_profile['use']
                    provides_repo = cached_profile['provides_repo']
                except KeyError:
                    logger.debug('profile regen: %s', profile.path)
                    try:
                        masks = profile_obj.masks
                        unmasks = profile_obj.unmasks

                        immutable_flags = profile_obj.masked_use.clone(
                            unfreeze=True)
                        immutable_flags.add_bare_global((), default_masked_use)
                        immutable_flags.optimize(cache=chunked_data_cache)
                        immutable_flags.freeze()

                        stable_immutable_flags = profile_obj.stable_masked_use.clone(
                            unfreeze=True)
                        stable_immutable_flags.add_bare_global(
                            (), default_masked_use)
                        stable_immutable_flags.optimize(
                            cache=chunked_data_cache)
                        stable_immutable_flags.freeze()

                        enabled_flags = profile_obj.forced_use.clone(
                            unfreeze=True)
                        enabled_flags.add_bare_global((), (stable_key, ))
                        enabled_flags.optimize(cache=chunked_data_cache)
                        enabled_flags.freeze()

                        stable_enabled_flags = profile_obj.stable_forced_use.clone(
                            unfreeze=True)
                        stable_enabled_flags.add_bare_global((),
                                                             (stable_key, ))
                        stable_enabled_flags.optimize(cache=chunked_data_cache)
                        stable_enabled_flags.freeze()

                        pkg_use = profile_obj.pkg_use
                        iuse_effective = profile_obj.iuse_effective
                        provides_repo = profile_obj.provides_repo

                        # finalize enabled USE flags
                        use = set()
                        misc.incremental_expansion(use, profile_obj.use,
                                                   'while expanding USE')
                        use = frozenset(use)
                    except profiles_mod.ProfileError:
                        # unsupported EAPI or other issue, profile checks will catch this
                        continue

                    if self.options.cache['profiles']:
                        cached_profiles[profile.base]['update'] = True
                        cached_profiles[profile.base][profile.path] = {
                            'files': files,
                            'masks': masks,
                            'unmasks': unmasks,
                            'immutable_flags': immutable_flags,
                            'stable_immutable_flags': stable_immutable_flags,
                            'enabled_flags': enabled_flags,
                            'stable_enabled_flags': stable_enabled_flags,
                            'pkg_use': pkg_use,
                            'iuse_effective': iuse_effective,
                            'use': use,
                            'provides_repo': provides_repo,
                        }

                # used to interlink stable/unstable lookups so that if
                # unstable says it's not visible, stable doesn't try
                # if stable says something is visible, unstable doesn't try.
                stable_cache = set()
                unstable_insoluble = ProtectedSet(self.global_insoluble)

                # few notes.  for filter, ensure keywords is last, on the
                # offchance a non-metadata based restrict foregos having to
                # access the metadata.
                # note that the cache/insoluble are inversly paired;
                # stable cache is usable for unstable, but not vice versa.
                # unstable insoluble is usable for stable, but not vice versa
                vfilter = domain.generate_filter(target_repo.pkg_masks | masks,
                                                 unmasks)
                profile_filters[stable_key].append(
                    ProfileData(profile.path, stable_key, provides_repo,
                                packages.AndRestriction(vfilter, stable_r),
                                iuse_effective, use, pkg_use,
                                stable_immutable_flags, stable_enabled_flags,
                                stable_cache, ProtectedSet(unstable_insoluble),
                                profile.status, profile.deprecated))

                profile_filters[unstable_key].append(
                    ProfileData(profile.path, unstable_key, provides_repo,
                                packages.AndRestriction(vfilter, unstable_r),
                                iuse_effective, use, pkg_use,
                                immutable_flags, enabled_flags,
                                ProtectedSet(stable_cache), unstable_insoluble,
                                profile.status, profile.deprecated))

        # dump updated profile filters
        for k, v in cached_profiles.items():
            if v.pop('update', False):
                repo = v.pop('repo')
                cache_file = self.cache_file(repo)
                try:
                    os.makedirs(os.path.dirname(cache_file), exist_ok=True)
                    with open(cache_file, 'wb+') as f:
                        pickle.dump(
                            _ProfilesCache(
                                cached_profiles[repo.config.profiles_base]), f)
                except IOError as e:
                    msg = (f'failed dumping {repo.repo_id} profiles cache: '
                           f'{cache_file!r}: {e.strerror}')
                    raise UserException(msg)

        profile_evaluate_dict = {}
        for key, profile_list in profile_filters.items():
            similar = profile_evaluate_dict[key] = []
            for profile in profile_list:
                for existing in similar:
                    if (existing[0].masked_use == profile.masked_use
                            and existing[0].forced_use == profile.forced_use):
                        existing.append(profile)
                        break
                else:
                    similar.append([profile])

        self.profile_evaluate_dict = profile_evaluate_dict
        self.profile_filters = profile_filters
コード例 #11
0
ファイル: git.py プロジェクト: sbraz/pkgcheck
    def update_cache(self, output_lock, force=False):
        """Update related cache and push updates to disk."""
        try:
            # running from scan subcommand
            repos = self.options.target_repo.trees
        except AttributeError:
            # running from cache subcommand
            repos = self.options.domain.ebuild_repos

        if self.options.cache['git']:
            for repo in repos:
                try:
                    commit = self.get_commit_hash(repo.location)
                except ValueError as e:
                    continue

                # initialize cache file location
                cache_file = self.cache_file(repo)

                git_repo = None
                cache_repo = True
                if not force:
                    # try loading cached, historical repo data
                    try:
                        with open(cache_file, 'rb') as f:
                            git_repo = pickle.load(f)
                        if git_repo.version != self.cache.version:
                            logger.debug(
                                'forcing git repo cache regen due to outdated version'
                            )
                            os.remove(cache_file)
                            git_repo = None
                    except FileNotFoundError as e:
                        pass
                    except (AttributeError, EOFError, ImportError,
                            IndexError) as e:
                        logger.debug('forcing git repo cache regen: %s', e)
                        os.remove(cache_file)
                        git_repo = None

                if (git_repo is not None and repo.location == getattr(
                        git_repo, 'location', None)):
                    if commit != git_repo.commit:
                        with output_lock:
                            old, new = git_repo.commit[:13], commit[:13]
                            print(
                                f'updating {repo} git repo cache: {old} -> {new}',
                                file=sys.stderr,
                            )
                        git_repo.update(commit, debug=self.options.debug)
                    else:
                        cache_repo = False
                else:
                    with output_lock:
                        print(
                            f'creating {repo} git repo cache: {commit[:13]}',
                            file=sys.stderr,
                        )
                    git_repo = ParsedGitRepo(repo,
                                             commit,
                                             debug=self.options.debug)

                if git_repo:
                    self._cached_repos[repo.location] = git_repo
                    # push repo to disk if it was created or updated
                    if cache_repo:
                        try:
                            os.makedirs(os.path.dirname(cache_file),
                                        exist_ok=True)
                            with open(cache_file, 'wb+') as f:
                                pickle.dump(git_repo, f)
                        except IOError as e:
                            msg = f'failed dumping git pkg repo: {cache_file!r}: {e.strerror}'
                            raise UserException(msg)
コード例 #12
0
ファイル: addons.py プロジェクト: gyakovlev/pkgcheck
    def check_args(parser, namespace):
        profiles_dir = getattr(namespace, "profiles_dir", None)
        if profiles_dir is not None:
            profiles_dir = abspath(profiles_dir)
            if not os.path.isdir(profiles_dir):
                parser.error(f"invalid profiles base: {profiles_dir!r}")

        selected_profiles = namespace.profiles
        if selected_profiles is None:
            selected_profiles = ((), ())

        if profiles_dir:
            profiles_obj = repo_objs.BundledProfiles(profiles_dir)
        else:
            profiles_obj = namespace.target_repo.config.profiles

        def norm_name(s):
            """Expand status keywords and format paths."""
            if s in ('dev', 'exp', 'stable', 'deprecated'):
                for x in profiles_obj.paths(s):
                    yield x
            else:
                yield '/'.join([_f for _f in s.split('/') if _f])

        disabled, enabled = selected_profiles
        disabled = set(disabled)
        enabled = set(enabled)

        # remove profiles that are both enabled and disabled
        toggled = enabled.intersection(disabled)
        enabled = enabled.difference(toggled)
        disabled = disabled.difference(toggled)
        ignore_deprecated = 'deprecated' not in enabled

        # expand status keywords, e.g. 'stable' -> set of stable profiles
        disabled = set(chain.from_iterable(map(norm_name, disabled)))
        enabled = set(chain.from_iterable(map(norm_name, enabled)))

        # If no profiles are enabled, then all that are defined in
        # profiles.desc are scanned except ones that are explicitly disabled.
        if not enabled:
            enabled = {
                profile
                for profile, status in chain.from_iterable(
                    profiles_obj.arch_profiles.values())
            }

        profile_paths = enabled.difference(disabled)

        # only default to using cache when run without target args within a repo
        if namespace.cache is None and namespace.default_target is None:
            namespace.cache = False

        # initialize cache dir
        cache_dir = pjoin(const.USER_CACHE_PATH, 'pkgcheck')
        namespace.cache_file = pjoin(cache_dir, 'profiles.pickle')
        if ((namespace.cache is None or namespace.cache)
                and not os.path.exists(cache_dir)):
            try:
                os.makedirs(cache_dir)
            except IOError as e:
                raise UserException(
                    f'failed creating profiles cache: {cache_dir!r}: {e.strerror}'
                )
        namespace.forced_cache = bool(namespace.cache)

        # We hold onto the profiles as we're going, due to the fact that
        # profile nodes are weakly cached; hold onto all for this loop, avoids
        # a lot of reparsing at the expense of slightly more memory usage
        # temporarily.
        cached_profiles = []

        arch_profiles = defaultdict(list)
        for profile_path in profile_paths:
            try:
                p = profiles_obj.create_profile(profile_path)
            except profiles.ProfileError as e:
                # Only throw errors if the profile was selected by the user, bad
                # repo profiles will be caught during repo metadata scans.
                if namespace.profiles is not None:
                    parser.error(f'invalid profile: {e.path!r}: {e.error}')
                continue
            if ignore_deprecated and p.deprecated:
                continue
            cached_profiles.append(p)
            if p.arch is None:
                parser.error(
                    f"profile {p.path!r} lacks arch settings, unable to use it"
                )
            arch_profiles[p.arch].append((profile_path, p))

        namespace.arch_profiles = arch_profiles