Пример #1
0
    def __init__(self, *args, stable_arches_addon=None):
        super().__init__(*args)
        arches = {x.strip().lstrip("~") for x in self.options.stable_arches}

        # stable, then unstable, then file
        self.arch_restricts = {}
        for arch in arches:
            self.arch_restricts[arch] = [
                packages.PackageRestriction(
                    "keywords", values.ContainmentMatch2((arch,))),
                packages.PackageRestriction(
                    "keywords", values.ContainmentMatch2((f"~{arch}",)))
            ]
Пример #2
0
    def generate_intersects_from_pkg_node(self, pkg_node, tag=None):
        arch = pkg_node.get("arch")
        if arch is not None:
            arch = tuple(str(arch.strip()).split())
            if not arch or "*" in arch:
                arch = None

        vuln = list(pkg_node.findall("vulnerable"))
        if not vuln:
            return None
        elif len(vuln) > 1:
            vuln_list = [self.generate_restrict_from_range(x) for x in vuln]
            vuln = packages.OrRestriction(*vuln_list)
        else:
            vuln_list = [self.generate_restrict_from_range(vuln[0])]
            vuln = vuln_list[0]
        if arch is not None:
            vuln = packages.AndRestriction(
                vuln,
                packages.PackageRestriction(
                    "keywords", values.ContainmentMatch2(arch,
                                                         match_all=False)))
        invuln = (pkg_node.findall("unaffected"))
        if not invuln:
            # wrap it.
            return packages.KeyedAndRestriction(vuln, tag=tag)
        invuln_list = [
            self.generate_restrict_from_range(x, negate=True) for x in invuln
        ]
        invuln = [x for x in invuln_list if x not in vuln_list]
        if not invuln:
            if tag is None:
                return packages.KeyedAndRestriction(vuln, tag=tag)
            return packages.KeyedAndRestriction(vuln, tag=tag)
        return packages.KeyedAndRestriction(vuln, tag=tag, *invuln)
Пример #3
0
    def _make_keywords_filter(self, default_keys, accept_keywords, incremental=False):
        """Generates a restrict that matches iff the keywords are allowed."""
        if not accept_keywords and not self.profile.keywords:
            return packages.PackageRestriction(
                "keywords", values.ContainmentMatch2(frozenset(default_keys)))

        if self.unstable_arch not in default_keys:
            # stable; thus empty entries == ~arch
            def f(r, v):
                if not v:
                    return r, self.unstable_arch
                return r, v
            data = collapsed_restrict_to_data(
                ((packages.AlwaysTrue, default_keys),),
                (f(*i) for i in accept_keywords))
        else:
            if incremental:
                f = collapsed_restrict_to_data
            else:
                f = non_incremental_collapsed_restrict_to_data
            data = f(((packages.AlwaysTrue, default_keys),), accept_keywords)

        if incremental:
            raise NotImplementedError(self._incremental_apply_keywords_filter)
            #f = self._incremental_apply_keywords_filter
        else:
            f = self._apply_keywords_filter
        return delegate(partial(f, data))
Пример #4
0
    def __init__(self, false_use, true_use):
        v = []
        if false_use:
            v.append(values.ContainmentMatch2(false_use, negate=True, match_all=True))
        if true_use:
            v.append(values.ContainmentMatch2(true_use, match_all=True))

        l = len(v)
        if l == 2:
            v = values.AndRestriction(*v)
        elif l == 1:
            v = v[0]
        else:
            v = values.AlwaysTrue

        super().__init__('use', v)
Пример #5
0
class PerlCheck(Check):
    """Perl ebuild related checks."""

    _restricted_source = (sources.RestrictionRepoSource,
                          (packages.PackageRestriction(
                              'inherited',
                              values.ContainmentMatch2('perl-module')), ))
    _source = (sources.EbuildFileRepoSource, (), (('source',
                                                   _restricted_source), ))
    known_results = frozenset([MismatchedPerlVersion])

    def __init__(self, *args):
        super().__init__(*args)
        self.dist_version_re = re.compile(
            'DIST_VERSION=(?P<dist_version>\d+(\.\d+)*)\s*\n')
        # Initialize connection with perl script. This is done during
        # __init__() since only one running version of the script is shared
        # between however many scanning processes will be run. Also, it makes
        # it easier to disable this check if required perl deps are missing.
        self.perl = _PerlConnection(self.options)

    def feed(self, pkg):
        match = self.dist_version_re.search(''.join(pkg.lines))
        if match is not None:
            dist_version = match.group('dist_version')
            normalized = self.perl.normalize(dist_version)
            if normalized != pkg.version:
                yield MismatchedPerlVersion(dist_version, normalized, pkg=pkg)
Пример #6
0
    def __init__(self, options, stable_arches=None):
        super().__init__(options)
        arches = frozenset(arch.strip().lstrip("~") for arch in options.stable_arches)
        self.target_arches = frozenset(
            "~%s" % arch.strip().lstrip("~") for arch in arches)

        source_arches = options.source_arches
        if source_arches is None:
            source_arches = options.stable_arches
        self.source_arches = frozenset(
            arch.lstrip("~") for arch in source_arches)
        self.source_filter = packages.PackageRestriction(
            "keywords", values.ContainmentMatch2(self.source_arches))
Пример #7
0
    def __init__(self, *args, stable_arches_addon=None):
        super().__init__(*args)
        self.all_arches = frozenset(self.options.arches)
        self.stable_arches = frozenset(arch.strip().lstrip("~")
                                       for arch in self.options.stable_arches)
        self.target_arches = frozenset(f'~{arch}'
                                       for arch in self.stable_arches)

        source_arches = self.options.source_arches
        if source_arches is None:
            source_arches = self.options.stable_arches
        self.source_arches = frozenset(
            arch.lstrip("~") for arch in source_arches)
        self.source_filter = packages.PackageRestriction(
            "keywords", values.ContainmentMatch2(self.source_arches))
Пример #8
0
 def _parse(value):
     return packages.PackageRestriction(
         attr,
         values.ContainmentMatch2(
             values_kls(
                 token_kls(piece.strip()) for piece in value.split(','))))
Пример #9
0
def _mk_required_use_node(data):
    if data[0] == '!':
        return values.ContainmentMatch2(data[1:], negate=True)
    return values.ContainmentMatch2(data)
Пример #10
0
    def __init__(self, *args, arches_addon=None):
        super().__init__(*args)
        target_repo = self.options.target_repo

        self.official_arches = target_repo.known_arches
        self.desired_arches = getattr(self.options, 'arches', None)
        if self.desired_arches is None or self.options.selected_arches is None:
            # copy it to be safe
            self.desired_arches = set(self.official_arches)

        self.global_insoluble = set()
        profile_filters = defaultdict(list)
        chunked_data_cache = {}
        cached_profiles = defaultdict(dict)

        if self.options.cache['profiles']:
            for repo in target_repo.trees:
                cache_file = self.cache_file(repo)
                # add profiles-base -> repo mapping to ease storage procedure
                cached_profiles[repo.config.profiles_base]['repo'] = repo
                try:
                    with open(cache_file, 'rb') as f:
                        cache = pickle.load(f)
                    if cache.version == self.cache.version:
                        cached_profiles[repo.config.profiles_base].update(
                            cache)
                    else:
                        logger.debug(
                            'forcing %s profile cache regen '
                            'due to outdated version', repo.repo_id)
                        os.remove(cache_file)
                except FileNotFoundError as e:
                    pass
                except (AttributeError, EOFError, ImportError,
                        IndexError) as e:
                    logger.debug('forcing %s profile cache regen: %s',
                                 repo.repo_id, e)
                    os.remove(cache_file)

        for k in sorted(self.desired_arches):
            if k.lstrip("~") not in self.desired_arches:
                continue
            stable_key = k.lstrip("~")
            unstable_key = "~" + stable_key
            stable_r = packages.PackageRestriction(
                "keywords", values.ContainmentMatch2((stable_key, )))
            unstable_r = packages.PackageRestriction(
                "keywords",
                values.ContainmentMatch2((
                    stable_key,
                    unstable_key,
                )))

            default_masked_use = tuple(
                set(x for x in self.official_arches if x != stable_key))

            for profile_obj, profile in self.options.arch_profiles.get(k, []):
                files = self.profile_data.get(profile, None)
                try:
                    cached_profile = cached_profiles[profile.base][
                        profile.path]
                    if files != cached_profile['files']:
                        # force refresh of outdated cache entry
                        raise KeyError

                    masks = cached_profile['masks']
                    unmasks = cached_profile['unmasks']
                    immutable_flags = cached_profile['immutable_flags']
                    stable_immutable_flags = cached_profile[
                        'stable_immutable_flags']
                    enabled_flags = cached_profile['enabled_flags']
                    stable_enabled_flags = cached_profile[
                        'stable_enabled_flags']
                    pkg_use = cached_profile['pkg_use']
                    iuse_effective = cached_profile['iuse_effective']
                    use = cached_profile['use']
                    provides_repo = cached_profile['provides_repo']
                except KeyError:
                    logger.debug('profile regen: %s', profile.path)
                    try:
                        masks = profile_obj.masks
                        unmasks = profile_obj.unmasks

                        immutable_flags = profile_obj.masked_use.clone(
                            unfreeze=True)
                        immutable_flags.add_bare_global((), default_masked_use)
                        immutable_flags.optimize(cache=chunked_data_cache)
                        immutable_flags.freeze()

                        stable_immutable_flags = profile_obj.stable_masked_use.clone(
                            unfreeze=True)
                        stable_immutable_flags.add_bare_global(
                            (), default_masked_use)
                        stable_immutable_flags.optimize(
                            cache=chunked_data_cache)
                        stable_immutable_flags.freeze()

                        enabled_flags = profile_obj.forced_use.clone(
                            unfreeze=True)
                        enabled_flags.add_bare_global((), (stable_key, ))
                        enabled_flags.optimize(cache=chunked_data_cache)
                        enabled_flags.freeze()

                        stable_enabled_flags = profile_obj.stable_forced_use.clone(
                            unfreeze=True)
                        stable_enabled_flags.add_bare_global((),
                                                             (stable_key, ))
                        stable_enabled_flags.optimize(cache=chunked_data_cache)
                        stable_enabled_flags.freeze()

                        pkg_use = profile_obj.pkg_use
                        iuse_effective = profile_obj.iuse_effective
                        provides_repo = profile_obj.provides_repo

                        # finalize enabled USE flags
                        use = set()
                        misc.incremental_expansion(use, profile_obj.use,
                                                   'while expanding USE')
                        use = frozenset(use)
                    except profiles_mod.ProfileError:
                        # unsupported EAPI or other issue, profile checks will catch this
                        continue

                    if self.options.cache['profiles']:
                        cached_profiles[profile.base]['update'] = True
                        cached_profiles[profile.base][profile.path] = {
                            'files': files,
                            'masks': masks,
                            'unmasks': unmasks,
                            'immutable_flags': immutable_flags,
                            'stable_immutable_flags': stable_immutable_flags,
                            'enabled_flags': enabled_flags,
                            'stable_enabled_flags': stable_enabled_flags,
                            'pkg_use': pkg_use,
                            'iuse_effective': iuse_effective,
                            'use': use,
                            'provides_repo': provides_repo,
                        }

                # used to interlink stable/unstable lookups so that if
                # unstable says it's not visible, stable doesn't try
                # if stable says something is visible, unstable doesn't try.
                stable_cache = set()
                unstable_insoluble = ProtectedSet(self.global_insoluble)

                # few notes.  for filter, ensure keywords is last, on the
                # offchance a non-metadata based restrict foregos having to
                # access the metadata.
                # note that the cache/insoluble are inversly paired;
                # stable cache is usable for unstable, but not vice versa.
                # unstable insoluble is usable for stable, but not vice versa
                vfilter = domain.generate_filter(target_repo.pkg_masks | masks,
                                                 unmasks)
                profile_filters[stable_key].append(
                    ProfileData(profile.path, stable_key, provides_repo,
                                packages.AndRestriction(vfilter, stable_r),
                                iuse_effective, use, pkg_use,
                                stable_immutable_flags, stable_enabled_flags,
                                stable_cache, ProtectedSet(unstable_insoluble),
                                profile.status, profile.deprecated))

                profile_filters[unstable_key].append(
                    ProfileData(profile.path, unstable_key, provides_repo,
                                packages.AndRestriction(vfilter, unstable_r),
                                iuse_effective, use, pkg_use,
                                immutable_flags, enabled_flags,
                                ProtectedSet(stable_cache), unstable_insoluble,
                                profile.status, profile.deprecated))

        # dump updated profile filters
        for k, v in cached_profiles.items():
            if v.pop('update', False):
                repo = v.pop('repo')
                cache_file = self.cache_file(repo)
                try:
                    os.makedirs(os.path.dirname(cache_file), exist_ok=True)
                    with open(cache_file, 'wb+') as f:
                        pickle.dump(
                            _ProfilesCache(
                                cached_profiles[repo.config.profiles_base]), f)
                except IOError as e:
                    msg = (f'failed dumping {repo.repo_id} profiles cache: '
                           f'{cache_file!r}: {e.strerror}')
                    raise UserException(msg)

        profile_evaluate_dict = {}
        for key, profile_list in profile_filters.items():
            similar = profile_evaluate_dict[key] = []
            for profile in profile_list:
                for existing in similar:
                    if (existing[0].masked_use == profile.masked_use
                            and existing[0].forced_use == profile.forced_use):
                        existing.append(profile)
                        break
                else:
                    similar.append([profile])

        self.profile_evaluate_dict = profile_evaluate_dict
        self.profile_filters = profile_filters
Пример #11
0
    def _fast_identify_candidates(self, restrict, sorter):
        pkg_restrict = set()
        cat_restrict = set()
        cat_exact = set()
        pkg_exact = set()

        for x in collect_package_restrictions(restrict, (
                "category",
                "package",
        )):
            if x.attr == "category":
                cat_restrict.add(x.restriction)
            elif x.attr == "package":
                pkg_restrict.add(x.restriction)

        for e, s in ((pkg_exact, pkg_restrict), (cat_exact, cat_restrict)):
            l = [
                x for x in s
                if isinstance(x, values.StrExactMatch) and not x.negate
            ]
            s.difference_update(l)
            e.update(x.exact for x in l)
        del l

        if restrict.negate:
            cat_exact = pkg_exact = ()

        if cat_exact:
            if not cat_restrict and len(cat_exact) == 1:
                # Cannot use pop here, cat_exact is reused below.
                c = next(iter(cat_exact))
                if not pkg_restrict and len(pkg_exact) == 1:
                    cp = (c, pkg_exact.pop())
                    if cp in self.versions:
                        return [cp]
                    return []
                cats_iter = [c]
            else:
                cat_restrict.add(values.ContainmentMatch2(
                    frozenset(cat_exact)))
                cats_iter = sorter(self._cat_filter(cat_restrict))
        elif cat_restrict:
            cats_iter = self._cat_filter(cat_restrict, negate=restrict.negate)
        else:
            cats_iter = sorter(self.categories)

        if pkg_exact:
            if not pkg_restrict:
                if sorter is iter:
                    pkg_exact = tuple(pkg_exact)
                else:
                    pkg_exact = sorter(pkg_exact)
                return ((c, p) for c in cats_iter for p in pkg_exact)
            else:
                pkg_restrict.add(values.ContainmentMatch2(
                    frozenset(pkg_exact)))

        if pkg_restrict:
            return self._package_filter(cats_iter,
                                        pkg_restrict,
                                        negate=restrict.negate)
        elif not cat_restrict:
            if sorter is iter and not cat_exact:
                return self.versions
            else:
                return ((c, p) for c in cats_iter
                        for p in sorter(self.packages.get(c, ())))
        return ((c, p) for c in cats_iter
                for p in sorter(self.packages.get(c, ())))
Пример #12
0
    def parse(cls, dep_str, element_class,
              operators=None, attr=None,
              element_func=None, transitive_use_atoms=False,
              allow_src_uri_file_renames=False):
        """
        :param dep_str: string abiding by DepSet syntax
        :param operators: mapping of node -> callable for special operators
            in DepSet syntax
        :param element_func: if None, element_class is used for generating
            elements, else it's used to generate elements.
            Mainly useful for when you need to curry a few args for instance
            generation, since element_class _must_ be a class
        :param element_class: class of generated elements
        :param attr: name of the DepSet attribute being parsed
        """
        if element_func is None:
            element_func = element_class

        if cls.parse_depset is not None and not (allow_src_uri_file_renames):
            restrictions = None
            if operators is None:
                has_conditionals, restrictions = cls.parse_depset(
                    dep_str, element_func,
                    boolean.AndRestriction, boolean.OrRestriction)
            else:
                for x in operators:
                    if x not in ("", "||"):
                        break
                else:
                    has_conditionals, restrictions = cls.parse_depset(
                        dep_str, element_func,
                        operators.get(""), operators.get("||"))

            if restrictions is not None:
                if not has_conditionals and transitive_use_atoms:
                    has_conditionals = cls._has_transitive_use_atoms(restrictions)
                return cls(restrictions, element_class, has_conditionals)

        restrictions = []
        if operators is None:
            operators = {"||": boolean.OrRestriction, "": boolean.AndRestriction}

        raw_conditionals = []
        depsets = [restrictions]

        node_conds = False
        words = iter(dep_str.split())
        # we specifically do it this way since expandable_chain has a bit of nasty
        # overhead to the tune of 33% slower
        if allow_src_uri_file_renames:
            words = expandable_chain(words)
        k = None
        try:
            for k in words:
                if ")" == k:
                    # no elements == error. if closures don't map up,
                    # indexerror would be chucked from trying to pop
                    # the frame so that is addressed.
                    if not depsets[-1] or not raw_conditionals:
                        raise DepsetParseError(dep_str, attr=attr)
                    elif raw_conditionals[-1] in operators:
                        if len(depsets[-1]) == 1:
                            depsets[-2].append(depsets[-1][0])
                        else:
                            depsets[-2].append(
                                operators[raw_conditionals[-1]](*depsets[-1]))
                    else:
                        node_conds = True
                        c = raw_conditionals[-1]
                        if c[0] == "!":
                            c = values.ContainmentMatch2(c[1:-1], negate=True)
                        else:
                            c = values.ContainmentMatch2(c[:-1])

                        depsets[-2].append(
                            packages.Conditional("use", c, tuple(depsets[-1])))

                    raw_conditionals.pop()
                    depsets.pop()

                elif "(" == k:
                    k = ''
                    # push another frame on
                    depsets.append([])
                    raw_conditionals.append(k)

                elif k[-1] == '?' or k in operators:
                    # use conditional or custom op.
                    # no tokens left == bad dep_str.
                    k2 = next(words)

                    if k2 != "(":
                        raise DepsetParseError(dep_str, k2, attr=attr)

                    # push another frame on
                    depsets.append([])
                    raw_conditionals.append(k)

                elif "|" in k:
                    raise DepsetParseError(dep_str, k, attr=attr)
                elif allow_src_uri_file_renames:
                    try:
                        k2 = next(words)
                    except StopIteration:
                        depsets[-1].append(element_func(k))
                    else:
                        if k2 != '->':
                            depsets[-1].append(element_func(k))
                            words.appendleft((k2,))
                        else:
                            k3 = next(words)
                            # file rename
                            depsets[-1].append(element_func(k, k3))
                else:
                    # node/element
                    depsets[-1].append(element_func(k))

        except IGNORED_EXCEPTIONS:
            raise
        except DepsetParseError:
            # [][-1] for a frame access, which means it was a parse error.
            raise
        except StopIteration:
            if k is None:
                raise
            raise DepsetParseError(dep_str, k, attr=attr)
        except Exception as e:
            raise DepsetParseError(dep_str, e, attr=attr) from e

        # check if any closures required
        if len(depsets) != 1:
            raise DepsetParseError(dep_str, attr=attr)

        if transitive_use_atoms and not node_conds:
            # localize to this scope for speed.
            element_class = transitive_use_atom
            # we can't rely on iter(self) here since it doesn't
            # descend through boolean restricts.
            node_conds = cls._has_transitive_use_atoms(restrictions)

        return cls(tuple(restrictions), element_class, node_conds)