Ejemplo n.º 1
0
    def __init__(self, options, arches, *args):
        super(UnstableOnlyReport, self).__init__(options)
        arches = set(x.strip().lstrip("~") for x in options.arches)

        # stable, then unstable, then file
        self.arch_restricts = {}
        for arch in arches:
            self.arch_restricts[arch] = [
                packages.PackageRestriction("keywords",
                                            values.ContainmentMatch(arch)),
                packages.PackageRestriction(
                    "keywords", values.ContainmentMatch("~%s" % arch))
            ]
Ejemplo n.º 2
0
    def generate_intersects_from_pkg_node(self, pkg_node, tag=None):
        arch = pkg_node.get("arch")
        if arch is not None:
            arch = str(arch.strip()).split()
            if not arch or "*" in arch:
                arch = None

        vuln = list(pkg_node.findall("vulnerable"))
        if not vuln:
            return None
        elif len(vuln) > 1:
            vuln_list = [self.generate_restrict_from_range(x) for x in vuln]
            vuln = packages.OrRestriction(*vuln_list)
        else:
            vuln_list = [self.generate_restrict_from_range(vuln[0])]
            vuln = vuln_list[0]
        if arch is not None:
            vuln = packages.AndRestriction(
                vuln,
                packages.PackageRestriction(
                    "keywords", values.ContainmentMatch(all=False, *arch)))
        invuln = (pkg_node.findall("unaffected"))
        if not invuln:
            # wrap it.
            return packages.KeyedAndRestriction(vuln, tag=tag)
        invuln_list = [
            self.generate_restrict_from_range(x, negate=True) for x in invuln
        ]
        invuln = [x for x in invuln_list if x not in vuln_list]
        if not invuln:
            if tag is None:
                return packages.KeyedAndRestriction(vuln, tag=tag)
            return packages.KeyedAndRestriction(vuln, tag=tag)
        return packages.KeyedAndRestriction(vuln, tag=tag, *invuln)
Ejemplo n.º 3
0
    def __init__(self, false_use, true_use):
        v = []
        if false_use:
            v.append(values.ContainmentMatch(negate=True, all=True,
                                             *false_use))
        if true_use:
            v.append(values.ContainmentMatch(all=True, *true_use))

        l = len(v)
        if l == 2:
            v = values.AndRestriction(*v)
        elif l == 1:
            v = v[0]
        else:
            v = values.AlwaysTrue

        packages.PackageRestriction.__init__(self, 'use', v)
Ejemplo n.º 4
0
    def __init__(self, options, arches):
        super(ImlateReport, self).__init__(options)
        arches = frozenset(arch.strip().lstrip("~") for arch in options.arches)
        self.target_arches = frozenset("~%s" % arch.strip().lstrip("~")
                                       for arch in arches)

        source_arches = options.source_arches
        if source_arches is None:
            source_arches = options.arches
        self.source_arches = frozenset(
            arch.lstrip("~") for arch in source_arches)
        self.source_filter = packages.PackageRestriction(
            "keywords", values.ContainmentMatch(*self.source_arches))
Ejemplo n.º 5
0
    def make_keywords_filter(self,
                             arch,
                             default_keys,
                             accept_keywords,
                             profile_keywords,
                             incremental=False):
        """Generates a restrict that matches iff the keywords are allowed."""
        if not accept_keywords and not profile_keywords:
            return packages.PackageRestriction(
                "keywords", values.ContainmentMatch(*default_keys))

        if "~" + arch.lstrip("~") not in default_keys:
            # stable; thus empty entries == ~arch
            unstable = "~" + arch

            def f(r, v):
                if not v:
                    return r, unstable
                return r, v

            data = collapsed_restrict_to_data(
                ((packages.AlwaysTrue, default_keys), ),
                (f(*i) for i in accept_keywords))
        else:
            if incremental:
                f = collapsed_restrict_to_data
            else:
                f = non_incremental_collapsed_restrict_to_data
            data = f(((packages.AlwaysTrue, default_keys), ), accept_keywords)

        if incremental:
            raise NotImplementedError(self.incremental_apply_keywords_filter)
            #f = self.incremental_apply_keywords_filter
        else:
            f = self.apply_keywords_filter
        return delegate(partial(f, data, profile_keywords))
Ejemplo n.º 6
0
def _mk_required_use_node(data):
    if data[0] == '!':
        return values.ContainmentMatch(data[1:], negate=True)
    return values.ContainmentMatch(data, )
Ejemplo n.º 7
0
    def _fast_identify_candidates(self, restrict, sorter):
        pkg_restrict = set()
        cat_restrict = set()
        cat_exact = set()
        pkg_exact = set()

        for x in collect_package_restrictions(restrict,
                                              ("category", "package",)):
            if x.attr == "category":
                cat_restrict.add(x.restriction)
            elif x.attr == "package":
                pkg_restrict.add(x.restriction)

        for e, s in ((pkg_exact, pkg_restrict), (cat_exact, cat_restrict)):
            l = [x for x in s
                 if isinstance(x, values.StrExactMatch) and not x.negate]
            s.difference_update(l)
            e.update(x.exact for x in l)
        del l

        if restrict.negate:
            cat_exact = pkg_exact = ()

        if cat_exact:
            if not cat_restrict and len(cat_exact) == 1:
                # Cannot use pop here, cat_exact is reused below.
                c = iter(cat_exact).next()
                if not pkg_restrict and len(pkg_exact) == 1:
                    cp = (c, pkg_exact.pop())
                    if cp in self.versions:
                        return [cp]
                    return []
                cats_iter = [c]
            else:
                cat_restrict.add(values.ContainmentMatch(*cat_exact))
                cats_iter = sorter(self._cat_filter(cat_restrict))
        elif cat_restrict:
            cats_iter = self._cat_filter(
                cat_restrict, negate=restrict.negate)
        else:
            cats_iter = sorter(self.categories)

        if pkg_exact:
            if not pkg_restrict:
                if sorter is iter:
                    pkg_exact = tuple(pkg_exact)
                else:
                    pkg_exact = sorter(pkg_exact)
                return (
                    (c, p)
                    for c in cats_iter for p in pkg_exact)
            else:
                pkg_restrict.add(values.ContainmentMatch(*pkg_exact))

        if pkg_restrict:
            return self._package_filter(
                cats_iter, pkg_restrict, negate=restrict.negate)
        elif not cat_restrict:
            if sorter is iter and not cat_exact:
                return self.versions
            else:
                return (
                    (c, p) for c in
                    cats_iter for p in sorter(self.packages.get(c, ())))
        return (
            (c, p)
            for c in cats_iter for p in sorter(self.packages.get(c, ())))
Ejemplo n.º 8
0
    def parse(cls,
              dep_str,
              element_class,
              operators=None,
              element_func=None,
              transitive_use_atoms=False,
              allow_src_uri_file_renames=False):
        """
        :param dep_str: string abiding by DepSet syntax
        :param operators: mapping of node -> callable for special operators
            in DepSet syntax
        :param element_func: if None, element_class is used for generating
            elements, else it's used to generate elements.
            Mainly useful for when you need to curry a few args for instance
            generation, since element_class _must_ be a class
        :param element_class: class of generated elements
        """

        if not isinstance(element_class, type):
            # yes, this blocks non new style classes.  touch cookies.
            raise ValueError("element_class must be a new style class")

        if element_func is None:
            element_func = element_class

        if cls.parse_depset is not None and not (allow_src_uri_file_renames):
            restrictions = None
            if operators is None:
                has_conditionals, restrictions = cls.parse_depset(
                    dep_str, element_func, boolean.AndRestriction,
                    boolean.OrRestriction)
            else:
                for x in operators:
                    if x not in ("", "||"):
                        break
                else:
                    has_conditionals, restrictions = cls.parse_depset(
                        dep_str, element_func, operators.get(""),
                        operators.get("||"))

            if restrictions is not None:
                if not has_conditionals and transitive_use_atoms:
                    has_conditionals = cls._has_transitive_use_atoms(
                        restrictions)
                return cls(restrictions, element_class, has_conditionals)

        restrictions = []
        if operators is None:
            operators = {
                "||": boolean.OrRestriction,
                "": boolean.AndRestriction
            }

        raw_conditionals = []
        depsets = [restrictions]

        node_conds = False
        words = iter(dep_str.split())
        # we specifically do it this way since expandable_chain has a bit of nasty
        # overhead to the tune of 33% slower
        if allow_src_uri_file_renames:
            words = expandable_chain(words)
        k = None
        try:
            for k in words:
                if ")" == k:
                    # no elements == error. if closures don't map up,
                    # indexerror would be chucked from trying to pop
                    # the frame so that is addressed.
                    if not depsets[-1] or not raw_conditionals:
                        raise ParseError(dep_str)
                    elif raw_conditionals[-1] in operators:
                        if len(depsets[-1]) == 1:
                            depsets[-2].append(depsets[-1][0])
                        elif raw_conditionals[-1] == '' and (
                                len(raw_conditionals) == 1 or
                            ('' == raw_conditionals[-2])):
                            # if the frame is an and and the parent is an and, collapse it in.
                            depsets[-2].extend(depsets[-1])
                        else:
                            depsets[-2].append(
                                operators[raw_conditionals[-1]](*depsets[-1]))
                    else:
                        node_conds = True
                        c = raw_conditionals[-1]
                        if c[0] == "!":
                            c = values.ContainmentMatch(c[1:-1], negate=True)
                        else:
                            c = values.ContainmentMatch(c[:-1])

                        depsets[-2].append(
                            packages.Conditional("use", c, tuple(depsets[-1])))

                    raw_conditionals.pop()
                    depsets.pop()

                elif "(" == k:
                    k = ''
                    # push another frame on
                    depsets.append([])
                    raw_conditionals.append(k)

                elif k[-1] == '?' or k in operators:
                    # use conditional or custom op.
                    # no tokens left == bad dep_str.
                    k2 = words.next()

                    if k2 != "(":
                        raise ParseError(dep_str, k2)

                    # push another frame on
                    depsets.append([])
                    raw_conditionals.append(k)

                elif "|" in k:
                    raise ParseError(dep_str, k)
                elif allow_src_uri_file_renames:
                    try:
                        k2 = words.next()
                    except StopIteration:
                        depsets[-1].append(element_func(k))
                    else:
                        if k2 != '->':
                            depsets[-1].append(element_func(k))
                            words.appendleft((k2, ))
                        else:
                            k3 = words.next()
                            # file ename.
                            depsets[-1].append(element_func(k, k3))
                else:
                    # node/element.
                    depsets[-1].append(element_func(k))

        except IGNORED_EXCEPTIONS:
            raise
        except IndexError:
            # [][-1] for a frame access, which means it was a parse error.
            raise
        except StopIteration:
            if k is None:
                raise
            raise ParseError(dep_str, k)
        except Exception as e:
            raise_from(ParseError(dep_str, e))

        # check if any closures required
        if len(depsets) != 1:
            raise ParseError(dep_str)

        if transitive_use_atoms and not node_conds:
            # localize to this scope for speed.
            element_class = transitive_use_atom
            # we can't rely on iter(self) here since it doesn't
            # descend through boolean restricts.
            node_conds = cls._has_transitive_use_atoms(restrictions)

        return cls(tuple(restrictions), element_class, node_conds)