def test_rewrite_remove_order(data):
    """Rewritings that only remove hyperedges and add nodes may be applied in any order if
    we ignore already removed."""
    for congruence in [True, False]:
        h1 = data.draw(PE.gen_hypergraph(congruence=congruence))
        h2 = Hypergraph(congruence=congruence)
        h3 = Hypergraph(congruence=congruence)
        mapping12 = h2.add_from(h1)
        mapping23 = h3.add_from(h2)

        rwa = data.draw(PE.gen_rewrite(h1, num_add_hyperedges=0, num_merge=0))
        rwb = data.draw(PE.gen_rewrite(h1, num_add_hyperedges=0, num_merge=0))

        h1.rewrite(**rwa)
        h1.rewrite(**rwb, ignore_already_removed=True)

        rwa2 = PE.map_rewrite(rwa, mapping12)
        rwb2 = PE.map_rewrite(rwb, mapping12)

        h2.rewrite(**rwb2)
        h2.rewrite(**rwa2, ignore_already_removed=True)

        rwa3 = PE.map_rewrite(rwa2, mapping23)
        rwb3 = PE.map_rewrite(rwb2, mapping23)

        h3.rewrite(add=(rwa3['add'] + rwb3['add']), remove=(rwa3['remove'] + rwb3['remove']))

        assert h1.isomorphic(h2)
        assert h1.isomorphic(h3)
def test_rewrite_noremove_order(data):
    """Rewritings that don't remove may be applied in any order"""
    for congruence in [True, False]:
        h1 = data.draw(PE.gen_hypergraph(congruence=congruence))
        h2 = Hypergraph(congruence=congruence)
        h3 = Hypergraph(congruence=congruence)
        mapping12 = h2.add_from(h1)
        mapping23 = h3.add_from(h2)

        rwa = data.draw(PE.gen_rewrite(h1, num_remove=0))
        rwb = data.draw(PE.gen_rewrite(h1, num_remove=0))

        h1.rewrite(**rwa)
        h1.rewrite(**rwb)

        rwa2 = PE.map_rewrite(rwa, mapping12)
        rwb2 = PE.map_rewrite(rwb, mapping12)

        h2.rewrite(**rwb2)
        h2.rewrite(**rwa2)

        rwa3 = PE.map_rewrite(rwa2, mapping23)
        rwb3 = PE.map_rewrite(rwb2, mapping23)

        h3.rewrite(add=(rwa3['add'] + rwb3['add']), merge=(rwa3['merge'] + rwb3['merge']))

        assert h1.isomorphic(h2)
        assert h1.isomorphic(h3)
def test_stat(data):
    """This isn't really a test. Here we simply show the stats of creating a hypergraph and
    transforming it"""
    PE.GloballyIndexed.reset_global_index()

    h = Hypergraph()

    nodes_stat = []
    hyperedges_stat = []

    for i in range(5):
        rw = data.draw(PE.gen_rewrite(h))
        h.rewrite(**rw)
        nodes_stat.append(len(h.nodes()))
        hyperedges_stat.append(len(h.hyperedges()))

    def _str(val):
        if val >= 5:
            if val > 10:
                return "> 10"
            else:
                return "5-10"
        else:
            return str(val)

    hypothesis.event("Max nodes: " + _str(max(nodes_stat)))
    hypothesis.event("Max hyperedges: " + _str(max(hyperedges_stat)))
def test_rewriting(data):
    """Rewriting leaves graph in a consistent state. Also adding really adds and merging
    really merges, removing is tested separately
    (because addition has higher priority than removing)."""
    for congruence in [True, False]:
        h = data.draw(PE.gen_hypergraph(congruence=congruence))
        for i in range(2):
            rw = data.draw(PE.gen_rewrite(h))
            added = h.rewrite(**rw)
            h.check_integrity()

            nodes = h.nodes()
            hyperedges = h.hyperedges()

            for n1, n2 in rw['merge']:
                n1 = n1.follow()
                n2 = n2.follow()
                assert n1 == n2
                assert n1 in nodes

            for e in added:
                if isinstance(e, Node):
                    assert e in nodes
                else:
                    assert e in hyperedges
def test_remove(data):
    """Removing indeed removes"""
    for congruence in [True, False]:
        h = data.draw(PE.gen_hypergraph(congruence=congruence))
        rw = data.draw(PE.gen_rewrite(h, num_add_hyperedges=0, num_merge=0))
        h.rewrite(**rw)
        hyperedges = h.hyperedges()
        for h in rw['remove']:
            assert not h in hyperedges
def test_smallest_hyperedge_tracker(data):
    for congruence in [True, False]:
        PE.GloballyIndexed.reset_global_index()
        h1 = PE.Hypergraph(congruence=congruence)
        tracker1 = PE.SmallestHyperedgeTracker(measure=PE.SmallestHyperedgeTracker.size)
        tracker2 = PE.SmallestHyperedgeTracker(measure=PE.SmallestHyperedgeTracker.depth)
        h1.listeners.add(tracker1)
        h1.listeners.add(tracker2)

        max_number_of_smallest = 0
        there_was_by_size_ineq_by_depth = False

        for i in range(data.draw(strategies.integers(2, 5))):
            rw = data.draw(PE.gen_rewrite(h1))
            h1.rewrite(**rw)
            if data.draw(strategies.booleans()):
                h1.remove_nodes(data.draw(strategies.sampled_from(list(h1.nodes()))))

            for n in h1.nodes():
                # TODO: Sometimes there are just too many terms. In this case we assume false
                # but this isn't very elegant. A better approach is to find smallest terms instead
                # of enumerating all terms.
                terms = []
                for t in PE.finite_terms(n):
                    i = i + 1
                    if i > 1000:
                        print("Ooups, too many terms")
                        hypothesis.assume(False)
                    terms.append((PE.measure_term(t, tracker1.measure),
                                  PE.measure_term(t, tracker2.measure),
                                  t))

                if terms:
                    (min_val1, _, min_term1) = min(terms, key=lambda x: x[0])
                    (_, min_val2, min_term2) = min(terms, key=lambda x: x[1])

                    assert min_val1 == tracker1.smallest[n][0]
                    assert min_val2 == tracker2.smallest[n][0]

                    smallest1 = set(t for v, _, t in terms if v == min_val1)
                    smallest2 = set(t for _, v, t in terms if v == min_val2)
                    assert set(tracker1.smallest_terms(n)) == smallest1
                    # For depth tracker will not return the full set of shallowest terms
                    assert set(tracker2.smallest_terms(n)).issubset(smallest2)

                    max_number_of_smallest = max(max_number_of_smallest, len(smallest1))
                    max_number_of_smallest = max(max_number_of_smallest, len(smallest2))
                    if smallest1 != smallest2:
                        there_was_by_size_ineq_by_depth = True
                else:
                    assert tracker1.smallest[n][0] == tracker1.worst_value
                    assert tracker2.smallest[n][0] == tracker2.worst_value

        hypothesis.event("Max number of smallest: " + str(max_number_of_smallest))
        hypothesis.event("There was a node where the num of smallest by size != by depth: " +
                         str(there_was_by_size_ineq_by_depth))
def test_listener(data):
    """This tests events. Note that the integrity is non-strict."""
    for congruence in [True, False]:
        h1 = data.draw(PE.gen_hypergraph(congruence=congruence))

        class _L:
            def __init__(self, to_add):
                self.to_add = to_add

            def on_add(self, hypergraph, elements):
                hypergraph.check_integrity(False)
                for e in elements:
                    assert e in hypergraph
                    assert e not in self.to_add

                self.to_add |= set(elements)

            def on_merge(self, hypergraph, node, removed, added, reason):
                hypergraph.check_integrity(False)
                assert node not in hypergraph
                assert node.merged in hypergraph
                assert node in self.to_add
                assert node.merged in self.to_add
                for h in removed:
                    assert h not in hypergraph
                    assert h.merged in hypergraph
                    assert h in self.to_add
                for h in added:
                    assert h in hypergraph
                    assert h not in self.to_add

                self.to_add -= set(removed)
                self.to_add -= set([node])
                self.to_add |= set(added)

            def on_remove(self, hypergraph, elements):
                hypergraph.check_integrity(False)
                for e in elements:
                    assert e not in hypergraph
                    assert e in self.to_add

                self.to_add -= set(elements)

        lis = _L(set(h1.nodes()) | set(h1.hyperedges()))
        h1.listeners.add(lis)

        rw = data.draw(PE.gen_rewrite(h1))
        h1.rewrite(**rw)

        h2 = Hypergraph(congruence=congruence)
        h2.rewrite(add=lis.to_add)
        assert h1.isomorphic(h2)
def test_add_from(data):
    """add_from results in isomorphic graph, and its mapping can be used to apply the
    same transformations to each copy. Moreover, the order of elements in the rewrite may be
    different."""
    for congruence in [True, False]:
        h1 = data.draw(PE.gen_hypergraph(congruence=congruence))
        h2 = Hypergraph(congruence=congruence)
        mapping = h2.add_from(h1)
        assert h1.isomorphic(h2)

        rw1 = data.draw(PE.gen_rewrite(h1))
        rw2 = PE.map_rewrite(data.draw(PE.gen_permuted_rewrite(rw1)), mapping)

        h1.rewrite(**rw1)
        h2.rewrite(**rw2)

        assert h1.isomorphic(h2)
def test_add_removed(data):
    """Removing hyperedges and then adding the same hyperedges is noop."""
    for congruence in [True, False]:
        h1 = data.draw(PE.gen_hypergraph(congruence=congruence))
        h2 = Hypergraph(congruence=congruence)
        mapping = h2.add_from(h1)

        rw1 = data.draw(PE.gen_rewrite(h1))
        rw2 = PE.map_rewrite(rw1, mapping)
        rw2['remove'] = []

        h1.rewrite(**rw1)
        if congruence:
            h1.rewrite(add=rw1['remove'])
        else:
            # For the noncongruent case we have to have to be careful with duplicates
            h1.rewrite(add=set(rw1['remove']))

        h2.rewrite(**rw2)

        assert h1.isomorphic(h2)