示例#1
0
文件: tester.py 项目: Debian/britney2
    def _check_loop(self, universe, suite_contents, stats, musts, never,
                    cbroken, choices, check, len=len,
                    frozenset=frozenset):
        """Finds all guaranteed dependencies via "check".

        If it returns False, t is not installable.  If it returns True
        then "check" is exhausted.  If "choices" are empty and this
        returns True, then t is installable.
        """
        # Local variables for faster access...
        not_satisfied = partial(filter, musts.isdisjoint)

        # While we have guaranteed dependencies (in check), examine all
        # of them.
        for cur in iter_except(check.pop, IndexError):
            relations = universe.relations_of(cur)

            if relations.negative_dependencies:
                # Conflicts?
                if cur in never:
                    # cur adds a (reverse) conflict, so check if cur
                    # is in never.
                    #
                    # - there is a window where two conflicting
                    #   packages can be in check.  Example "A" depends
                    #   on "B" and "C".  If "B" conflicts with "C",
                    #   then both "B" and "C" could end in "check".
                    return False
                # We must install cur for the package to be installable,
                # so "obviously" we can never choose any of its conflicts
                never.update(relations.negative_dependencies & suite_contents)

            # depgroup can be satisfied by picking something that is
            # already in musts - lets pick that (again).  :)
            for depgroup in not_satisfied(relations.dependencies):

                # Of all the packages listed in the relation remove those that
                # are either:
                #  - not in the suite
                #  - known to be broken (by cache)
                #  - in never
                candidates = (depgroup & suite_contents) - never

                if not candidates:
                    # We got no candidates to satisfy it - this
                    # package cannot be installed with the current
                    # (version of the) suite
                    if cur not in cbroken and depgroup.isdisjoint(never):
                        # cur's dependency cannot be satisfied even if never was empty.
                        # This means that cur itself is broken (as well).
                        cbroken.add(cur)
                        suite_contents.remove(cur)
                    return False
                if len(candidates) == 1:
                    # only one possible solution to this choice and we
                    # haven't seen it before
                    check.extend(candidates)
                    musts.update(candidates)
                else:
                    possible_eqv = set(x for x in candidates if x in universe.equivalent_packages)
                    if len(possible_eqv) > 1:
                        # Exploit equivalency to reduce the number of
                        # candidates if possible.  Basically, this
                        # code maps "similar" candidates into a single
                        # candidate that will give a identical result
                        # to any other candidate it eliminates.
                        #
                        # See InstallabilityTesterBuilder's
                        # _build_eqv_packages_table method for more
                        # information on how this works.
                        new_cand = set(x for x in candidates if x not in possible_eqv)
                        stats.eqv_table_times_used += 1

                        for chosen in iter_except(possible_eqv.pop, KeyError):
                            new_cand.add(chosen)
                            possible_eqv -= universe.packages_equivalent_to(chosen)
                        stats.eqv_table_total_number_of_alternatives_eliminated += len(candidates) - len(new_cand)
                        if len(new_cand) == 1:
                            check.extend(new_cand)
                            musts.update(new_cand)
                            stats.eqv_table_reduced_to_one += 1
                            continue
                        elif len(candidates) == len(new_cand):
                            stats.eqv_table_reduced_by_zero += 1

                        candidates = frozenset(new_cand)
                    else:
                        # Candidates have to be a frozenset to be added to choices
                        candidates = frozenset(candidates)
                    # defer this choice till later
                    choices.add(candidates)
        return True
示例#2
0
    def _check_loop(self,
                    universe,
                    suite_contents,
                    stats,
                    musts,
                    never,
                    cbroken,
                    choices,
                    check,
                    len=len,
                    frozenset=frozenset):
        """Finds all guaranteed dependencies via "check".

        If it returns False, t is not installable.  If it returns True
        then "check" is exhausted.  If "choices" are empty and this
        returns True, then t is installable.
        """
        # Local variables for faster access...
        not_satisfied = partial(filter, musts.isdisjoint)

        # While we have guaranteed dependencies (in check), examine all
        # of them.
        for cur in iter_except(check.pop, IndexError):
            relations = universe.relations_of(cur)

            if relations.negative_dependencies:
                # Conflicts?
                if cur in never:
                    # cur adds a (reverse) conflict, so check if cur
                    # is in never.
                    #
                    # - there is a window where two conflicting
                    #   packages can be in check.  Example "A" depends
                    #   on "B" and "C".  If "B" conflicts with "C",
                    #   then both "B" and "C" could end in "check".
                    return False
                # We must install cur for the package to be installable,
                # so "obviously" we can never choose any of its conflicts
                never.update(relations.negative_dependencies & suite_contents)

            # depgroup can be satisfied by picking something that is
            # already in musts - lets pick that (again).  :)
            for depgroup in not_satisfied(relations.dependencies):

                # Of all the packages listed in the relation remove those that
                # are either:
                #  - not in the suite
                #  - known to be broken (by cache)
                #  - in never
                candidates = (depgroup & suite_contents) - never

                if not candidates:
                    # We got no candidates to satisfy it - this
                    # package cannot be installed with the current
                    # (version of the) suite
                    if cur not in cbroken and depgroup.isdisjoint(never):
                        # cur's dependency cannot be satisfied even if never was empty.
                        # This means that cur itself is broken (as well).
                        cbroken.add(cur)
                        suite_contents.remove(cur)
                    return False
                if len(candidates) == 1:
                    # only one possible solution to this choice and we
                    # haven't seen it before
                    check.extend(candidates)
                    musts.update(candidates)
                else:
                    possible_eqv = set(x for x in candidates
                                       if x in universe.equivalent_packages)
                    if len(possible_eqv) > 1:
                        # Exploit equivalency to reduce the number of
                        # candidates if possible.  Basically, this
                        # code maps "similar" candidates into a single
                        # candidate that will give a identical result
                        # to any other candidate it eliminates.
                        #
                        # See InstallabilityTesterBuilder's
                        # _build_eqv_packages_table method for more
                        # information on how this works.
                        new_cand = set(x for x in candidates
                                       if x not in possible_eqv)
                        stats.eqv_table_times_used += 1

                        for chosen in iter_except(possible_eqv.pop, KeyError):
                            new_cand.add(chosen)
                            possible_eqv -= universe.packages_equivalent_to(
                                chosen)
                        stats.eqv_table_total_number_of_alternatives_eliminated += len(
                            candidates) - len(new_cand)
                        if len(new_cand) == 1:
                            check.extend(new_cand)
                            musts.update(new_cand)
                            stats.eqv_table_reduced_to_one += 1
                            continue
                        elif len(candidates) == len(new_cand):
                            stats.eqv_table_reduced_by_zero += 1

                        candidates = frozenset(new_cand)
                    else:
                        # Candidates have to be a frozenset to be added to choices
                        candidates = frozenset(candidates)
                    # defer this choice till later
                    choices.add(candidates)
        return True
示例#3
0
    def solve_groups(self, groups):
        sat_in_testing = self._testing.isdisjoint
        universe = self._universe
        revuniverse = self._revuniverse
        result = []
        emitted = set()
        queue = deque()
        order = {}
        ptable = {}
        key2item = {}
        going_out = set()
        going_in = set()
        debug_solver = 0

        try:  # pragma: no cover
            debug_solver = int(os.environ.get('BRITNEY_DEBUG', '0'))
        except:  # pragma: no cover
            pass

        # Build the tables
        for (item, adds, rms) in groups:
            key = str(item)
            key2item[key] = item
            order[key] = {'before': set(), 'after': set()}
            going_in.update(adds)
            going_out.update(rms)
            for a in adds:
                ptable[a] = key
            for r in rms:
                ptable[r] = key

        if debug_solver > 1:  # pragma: no cover
            self._dump_groups(groups)

        # This large loop will add ordering constrains on each "item"
        # that migrates based on various rules.
        for (item, adds, rms) in groups:
            key = str(item)
            oldcons = set()
            newcons = set()
            for r in rms:
                oldcons.update(universe[r][1])
            for a in adds:
                newcons.update(universe[a][1])
            current = newcons & oldcons
            oldcons -= current
            newcons -= current
            if oldcons:
                # Some of the old binaries have "conflicts" that will
                # be removed.
                for o in ifilter_only(ptable, oldcons):
                    # "key" removes a conflict with one of
                    # "other"'s binaries, so it is probably a good
                    # idea to migrate "key" before "other"
                    other = ptable[o]
                    if other == key:
                        # "Self-conflicts" => ignore
                        continue
                    if debug_solver and other not in order[key][
                            'before']:  # pragma: no cover
                        print("N: Conflict induced order: %s before %s" %
                              (key, other))
                    order[key]['before'].add(other)
                    order[other]['after'].add(key)

            for r in ifilter_only(revuniverse, rms):
                # The binaries have reverse dependencies in testing;
                # check if we can/should migrate them first.
                for rdep in revuniverse[r][0]:
                    for depgroup in universe[rdep][0]:
                        rigid = depgroup - going_out
                        if not sat_in_testing(rigid):
                            # (partly) satisfied by testing, assume it is okay
                            continue
                        if rdep in ptable:
                            other = ptable[rdep]
                            if other == key:
                                # "Self-dependency" => ignore
                                continue
                            if debug_solver and other not in order[key][
                                    'after']:  # pragma: no cover
                                print(
                                    "N: Removal induced order: %s before %s" %
                                    (key, other))
                            order[key]['after'].add(other)
                            order[other]['before'].add(key)

            for a in adds:
                # Check if this item should migrate before others
                # (e.g. because they depend on a new [version of a]
                # binary provided by this item).
                for depgroup in universe[a][0]:
                    rigid = depgroup - going_out
                    if not sat_in_testing(rigid):
                        # (partly) satisfied by testing, assume it is okay
                        continue
                    # okay - we got three cases now.
                    # - "swap" (replace existing binary with a newer version)
                    # - "addition" (add new binary without removing any)
                    # - "removal" (remove binary without providing a new)
                    #
                    # The problem is that only the two latter requires
                    # an ordering.  A "swap" (in itself) should not
                    # affect us.
                    other_adds = set()
                    other_rms = set()
                    for d in ifilter_only(ptable, depgroup):
                        if d in going_in:
                            # "other" provides something "key" needs,
                            # schedule accordingly.
                            other = ptable[d]
                            other_adds.add(other)
                        else:
                            # "other" removes something "key" needs,
                            # schedule accordingly.
                            other = ptable[d]
                            other_rms.add(other)

                    for other in (other_adds - other_rms):
                        if debug_solver and other != key and other not in order[
                                key]['after']:  # pragma: no cover
                            print(
                                "N: Dependency induced order (add): %s before %s"
                                % (key, other))
                        order[key]['after'].add(other)
                        order[other]['before'].add(key)

                    for other in (other_rms - other_adds):
                        if debug_solver and other != key and other not in order[
                                key]['before']:  # pragma: no cover
                            print(
                                "N: Dependency induced order (remove): %s before %s"
                                % (key, other))
                        order[key]['before'].add(other)
                        order[other]['after'].add(key)

        ### MILESTONE: Partial-order constrains computed ###

        # At this point, we have computed all the partial-order
        # constrains needed.  Some of these may have created strongly
        # connected components (SSC) [of size 2 or greater], which
        # represents a group of items that (we believe) must migrate
        # together.
        #
        # Each one of those components will become an "easy" hint.

        comps = self._compute_scc(order, ptable)
        merged = {}
        scc = {}
        # Now that we got the SSCs (in comps), we select on item from
        # each SSC to represent the group and become an ID for that
        # SSC.
        #  * ssc[ssc_id] => All the items in that SSC
        #  * merged[item] => The ID of the SSC to which the item belongs.
        #
        # We also "repair" the ordering, so we know in which order the
        # hints should be emitted.
        for com in comps:
            scc_id = com[0]
            scc[scc_id] = com
            merged[scc_id] = scc_id
            if len(com) > 1:
                so_before = order[scc_id]['before']
                so_after = order[scc_id]['after']
                for n in com:
                    if n == scc_id:
                        continue
                    so_before.update(order[n]['before'])
                    so_after.update(order[n]['after'])
                    merged[n] = scc_id
                    del order[n]
                if debug_solver:  # pragma: no cover
                    print("N: SCC: %s -- %s" % (scc_id, str(sorted(com))))

        for com in comps:
            node = com[0]
            nbefore = set(merged[b] for b in order[node]['before'])
            nafter = set(merged[b] for b in order[node]['after'])

            # Drop self-relations (usually caused by the merging)
            nbefore.discard(node)
            nafter.discard(node)
            order[node]['before'] = nbefore
            order[node]['after'] = nafter

        if debug_solver:  # pragma: no cover
            print("N: -- PARTIAL ORDER --")

        initial_round = []
        for com in sorted(order):
            if debug_solver and order[com]['before']:  # pragma: no cover
                print("N: %s <= %s" % (com, str(sorted(order[com]['before']))))
            if not order[com]['after']:
                # This component can be scheduled immediately, add it
                # to the queue
                initial_round.append(com)
            elif debug_solver:  # pragma: no cover
                print("N: %s >= %s" % (com, str(sorted(order[com]['after']))))

        queue.extend(sorted(initial_round, key=len))
        del initial_round

        if debug_solver:  # pragma: no cover
            print("N: -- END PARTIAL ORDER --")
            print("N: -- LINEARIZED ORDER --")

        for cur in iter_except(queue.popleft, IndexError):
            if order[cur]['after'] <= emitted and cur not in emitted:
                # This item is ready to be emitted right now
                if debug_solver:  # pragma: no cover
                    print("N: %s -- %s" % (cur, sorted(scc[cur])))
                emitted.add(cur)
                result.append([key2item[x] for x in scc[cur]])
                if order[cur]['before']:
                    # There are components that come after this one.
                    # Add it to queue:
                    # - if it is ready, it will be emitted.
                    # - else, it will be dropped and re-added later.
                    queue.extend(
                        sorted(order[cur]['before'] - emitted, key=len))

        if debug_solver:  # pragma: no cover
            print("N: -- END LINEARIZED ORDER --")

        return result
示例#4
0
    def solve_groups(self, groups):
        result = []
        emitted = set()
        queue = deque()
        key2item = {}
        debug_solver = self.logger.isEnabledFor(logging.DEBUG)

        order = self._compute_group_order(groups, key2item)

        # === MILESTONE: Partial-order constrains computed ===

        # At this point, we have computed all the partial-order
        # constrains needed.  Some of these may have created strongly
        # connected components (SSC) [of size 2 or greater], which
        # represents a group of items that (we believe) must migrate
        # together.
        #
        # Each one of those components will become an "easy" hint.

        comps = compute_scc(order)
        # Now that we got the SSCs (in comps), we select on item from
        # each SSC to represent the group and become an ID for that
        # SSC.
        #  * scc_keys[ssc_id] => All the item-keys in that SSC
        #
        # We also "repair" the ordering, so we know in which order the
        # hints should be emitted.
        scc_keys = self._merge_items_into_components(comps, order)

        if debug_solver:  # pragma: no cover
            self.logger.debug("-- PARTIAL ORDER --")

        initial_round = []
        for com in sorted(order):
            if debug_solver and order[com].before:  # pragma: no cover
                self.logger.debug("N: %s <= %s", com,
                                  str(sorted(order[com].before)))
            if not order[com].after:
                # This component can be scheduled immediately, add it
                # to the queue
                initial_round.append(com)
            elif debug_solver:  # pragma: no cover
                self.logger.debug("N: %s >= %s", com,
                                  str(sorted(order[com].after)))

        queue.extend(sorted(initial_round, key=len))
        del initial_round

        if debug_solver:  # pragma: no cover
            self.logger.debug("-- END PARTIAL ORDER --")
            self.logger.debug("-- LINEARIZED ORDER --")

        for cur in iter_except(queue.popleft, IndexError):
            if order[cur].after <= emitted and cur not in emitted:
                # This item is ready to be emitted right now
                if debug_solver:  # pragma: no cover
                    self.logger.debug("%s -- %s", cur, sorted(scc_keys[cur]))
                emitted.add(cur)
                result.append([key2item[x] for x in scc_keys[cur]])
                if order[cur].before:
                    # There are components that come after this one.
                    # Add it to queue:
                    # - if it is ready, it will be emitted.
                    # - else, it will be dropped and re-added later.
                    queue.extend(sorted(order[cur].before - emitted, key=len))

        if debug_solver:  # pragma: no cover
            self.logger.debug("-- END LINEARIZED ORDER --")

        return result
示例#5
0
    def request_tests_for_source(self, item, arch, source_data_srcdist, pkg_arch_result):
        pkg_universe = self.britney.pkg_universe
        target_suite = self.suite_info.target_suite
        sources_s = item.suite.sources
        packages_s_a = item.suite.binaries[arch]
        source_name = item.package
        source_version = source_data_srcdist.version
        # request tests (unless they were already requested earlier or have a result)
        tests = self.tests_for_source(source_name, source_version, arch)
        is_huge = False
        try:
            is_huge = len(tests) > int(self.options.adt_huge)
        except AttributeError:
            pass

        # Here we figure out what is required from the source suite
        # for the test to install successfully.
        #
        # Loop over all binary packages from trigger and
        # recursively look up which *versioned* dependencies are
        # only satisfied in the source suite.
        #
        # For all binaries found, look up which packages they
        # break/conflict with in the target suite, but not in the
        # source suite. The main reason to do this is to cover test
        # dependencies, so we will check Testsuite-Triggers as
        # well.
        #
        # OI: do we need to do the first check in a smart way
        # (i.e. only for the packages that are actully going to be
        # installed) for the breaks/conflicts set as well, i.e. do
        # we need to check if any of the packages that we now
        # enforce being from the source suite, actually have new
        # versioned depends and new breaks/conflicts.
        #
        # For all binaries found, add the set of unique source
        # packages to the list of triggers.

        bin_triggers = set()
        bin_new = set(source_data_srcdist.binaries)
        for binary in iter_except(bin_new.pop, KeyError):
            if binary in bin_triggers:
                continue
            bin_triggers.add(binary)

            # Check if there is a dependency that is not
            # available in the target suite.
            # We add slightly too much here, because new binaries
            # will also show up, but they are already properly
            # installed. Nevermind.
            depends = pkg_universe.dependencies_of(binary)
            # depends is a frozenset{frozenset{BinaryPackageId, ..}}
            for deps_of_bin in depends:
                # We'll figure out which version later
                bin_new.update(added_pkgs_compared_to_target_suite(deps_of_bin, target_suite))

        # Check if the package breaks/conflicts anything. We might
        # be adding slightly too many source packages due to the
        # check here as a binary package that is broken may be
        # coming from a different source package in the source
        # suite. Nevermind.
        bin_broken = set()
        for binary in bin_triggers:
            # broken is a frozenset{BinaryPackageId, ..}
            broken = pkg_universe.negative_dependencies_of(binary)
            # We'll figure out which version later
            bin_broken.update(added_pkgs_compared_to_target_suite(broken, target_suite, invert=True))
        bin_triggers.update(bin_broken)

        triggers = set()
        for binary in bin_triggers:
            if binary.architecture == arch:
                try:
                    source_of_bin = packages_s_a[binary.package_name].source
                    triggers.add(
                        source_of_bin + '/' +
                        sources_s[source_of_bin].version)
                except KeyError:
                    # Apparently the package was removed from
                    # unstable e.g. if packages are replaced
                    # (e.g. -dbg to -dbgsym)
                    pass
                if binary not in source_data_srcdist.binaries:
                    for tdep_src in self.testsuite_triggers.get(binary.package_name, set()):
                        try:
                            triggers.add(
                                tdep_src + '/' +
                                sources_s[tdep_src].version)
                        except KeyError:
                            # Apparently the source was removed from
                            # unstable (testsuite_triggers are unified
                            # over all suites)
                            pass
        trigger = source_name + '/' + source_version
        triggers.discard(trigger)
        trigger_str = trigger
        if triggers:
            # Make the order (minus the "real" trigger) deterministic
            trigger_str += ' ' + ' '.join(sorted(list(triggers)))

        for (testsrc, testver) in tests:
            self.pkg_test_request(testsrc, arch, trigger_str, huge=is_huge)
            (result, real_ver, run_id, url) = self.pkg_test_result(testsrc, testver, arch, trigger)
            pkg_arch_result[(testsrc, real_ver)][arch] = (result, run_id, url)
示例#6
0
    def build(self):
        """Compile the installability tester

        This method will compile an installability tester from the
        information given and (where possible) try to optimise a
        few things.
        """
        package_table = self._package_table
        reverse_package_table = self._reverse_package_table
        intern_set = self._intern_set
        safe_set = set()
        broken = self._broken
        not_broken = ifilter_except(broken)
        check = set(broken)

        def safe_set_satisfies(t):
            """Check if t's dependencies can be satisfied by the safe set"""
            if not package_table[t][0]:
                # If it has no dependencies at all, then it is safe.  :)
                return True
            for depgroup in package_table[t][0]:
                if not any(dep for dep in depgroup if dep in safe_set):
                    return False
            return True

        # Merge reverse conflicts with conflicts - this saves some
        # operations in _check_loop since we only have to check one
        # set (instead of two) and we remove a few duplicates here
        # and there.
        #
        # At the same time, intern the rdep sets
        for pkg in reverse_package_table:
            if pkg not in package_table:  # pragma: no cover
                raise RuntimeError("%s/%s/%s referenced but not added!" % pkg)
            deps, con = package_table[pkg]
            rdeps, rcon, rdep_relations = reverse_package_table[pkg]
            if rcon:
                if not con:
                    con = intern_set(rcon)
                else:
                    con = intern_set(con | rcon)
                package_table[pkg] = (deps, con)
            reverse_package_table[pkg] = (intern_set(rdeps), con,
                                          intern_set(rdep_relations))

        # Check if we can expand broken.
        for t in not_broken(iter_except(check.pop, KeyError)):
            # This package is not known to be broken... but it might be now
            isb = False
            for depgroup in package_table[t][0]:
                if not any(not_broken(depgroup)):
                    # A single clause is unsatisfiable, the
                    # package can never be installed - add it to
                    # broken.
                    isb = True
                    break

            if not isb:
                continue

            broken.add(t)

            if t not in reverse_package_table:
                continue
            check.update(reverse_package_table[t][0] - broken)

        if broken:
            # Since a broken package will never be installable, nothing that depends on it
            # will ever be installable.  Thus, there is no point in keeping relations on
            # the broken package.
            seen = set()
            empty_set = frozenset()
            null_data = (frozenset([empty_set]), empty_set)
            for b in (x for x in broken if x in reverse_package_table):
                for rdep in (r for r in not_broken(reverse_package_table[b][0])
                             if r not in seen):
                    ndep = intern_set(
                        (x - broken) for x in package_table[rdep][0])
                    package_table[rdep] = (ndep,
                                           package_table[rdep][1] - broken)
                    seen.add(rdep)

            # Since they won't affect the installability of any other package, we might as
            # as well null their data.  This memory for these packages, but likely there
            # will only be a handful of these "at best" (fsvo of "best")
            for b in broken:
                package_table[b] = null_data
                if b in reverse_package_table:
                    del reverse_package_table[b]

        # Now find an initial safe set (if any)
        check = set()
        for pkg in package_table:

            if package_table[pkg][1]:
                # has (reverse) conflicts - not safe
                continue
            if not safe_set_satisfies(pkg):
                continue
            safe_set.add(pkg)
            if pkg in reverse_package_table:
                # add all rdeps (except those already in the safe_set)
                check.update(reverse_package_table[pkg][0] - safe_set)

        # Check if we can expand the initial safe set
        for pkg in iter_except(check.pop, KeyError):
            if package_table[pkg][1]:
                # has (reverse) conflicts - not safe
                continue
            if safe_set_satisfies(pkg):
                safe_set.add(pkg)
                if pkg in reverse_package_table:
                    # add all rdeps (except those already in the safe_set)
                    check.update(reverse_package_table[pkg][0] - safe_set)

        eqv_table = self._build_eqv_packages_table(package_table,
                                                   reverse_package_table)

        return InstallabilitySolver(package_table, reverse_package_table,
                                    self._testing, self._broken,
                                    self._essentials, safe_set, eqv_table)
示例#7
0
    def build(self):
        """Compile the installability tester

        This method will compile an installability tester from the
        information given and (where possible) try to optimise a
        few things.
        """
        package_table = self._package_table
        reverse_package_table = self._reverse_package_table
        intern_set = self._intern_set
        broken = self._broken
        not_broken = ifilter_except(broken)
        check = set(broken)

        # Merge reverse conflicts with conflicts - this saves some
        # operations in _check_loop since we only have to check one
        # set (instead of two) and we remove a few duplicates here
        # and there.
        #
        # At the same time, intern the rdep sets
        for pkg in reverse_package_table:
            if pkg not in package_table:  # pragma: no cover
                raise AssertionError("%s referenced but not added!" % str(pkg))
            deps, con = package_table[pkg]
            rdeps, rcon, rdep_relations = reverse_package_table[pkg]
            if rcon:
                if not con:
                    con = intern_set(rcon)
                else:
                    con = intern_set(con | rcon)
                package_table[pkg] = (deps, con)
            reverse_package_table[pkg] = (intern_set(rdeps), con,
                                          intern_set(rdep_relations))

        # Check if we can expand broken.
        for t in not_broken(iter_except(check.pop, KeyError)):
            # This package is not known to be broken... but it might be now
            isb = False
            for depgroup in package_table[t][0]:
                if not any(not_broken(depgroup)):
                    # A single clause is unsatisfiable, the
                    # package can never be installed - add it to
                    # broken.
                    isb = True
                    break

            if not isb:
                continue

            broken.add(t)

            if t not in reverse_package_table:
                continue
            check.update(reverse_package_table[t][0] - broken)

        if broken:
            # Since a broken package will never be installable, nothing that depends on it
            # will ever be installable.  Thus, there is no point in keeping relations on
            # the broken package.
            seen = set()
            empty_set = frozenset()
            null_data = (frozenset([empty_set]), empty_set)
            for b in (x for x in broken if x in reverse_package_table):
                for rdep in (r for r in not_broken(reverse_package_table[b][0])
                             if r not in seen):
                    ndep = intern_set((x - broken) for x in package_table[rdep][0])
                    package_table[rdep] = (ndep, package_table[rdep][1] - broken)
                    seen.add(rdep)

            # Since they won't affect the installability of any other package, we might as
            # as well null their data.  This memory for these packages, but likely there
            # will only be a handful of these "at best" (fsvo of "best")
            for b in broken:
                package_table[b] = null_data
                if b in reverse_package_table:
                    del reverse_package_table[b]

        relations, eqv_set = self._build_relations_and_eqv_packages_set(package_table, reverse_package_table)

        universe = BinaryPackageUniverse(relations,
                                         intern_set(self._essentials),
                                         intern_set(broken),
                                         intern_set(eqv_set))

        solver = InstallabilityTester(universe, self._testing)

        return universe, solver