def test_expected_growth(self):
        """Test if function performs a growth and swap phase, followed by another growth and
        attempted swap phase. This is carried out by starting with a 4+1 node lollipop graph,
        adding a connection from node 4 to node 2 and starting with the [3, 4] clique. The local
        search algorithm should first grow to [2, 3, 4] and then swap to either [0, 2, 3] or [1,
        2, 3], and then grow again to [0, 1, 2, 3] and being unable to swap, hence returning [0,
        1, 2, 3]"""

        graph = nx.lollipop_graph(4, 1)
        graph.add_edge(4, 2)

        c = [3, 4]
        result = clique.search(c, graph, iterations=100)
        assert result == [0, 1, 2, 3]
    def test_dead_end(self, monkeypatch):
        """Test if function stops when in a dead end (i.e., ``grow == swap``) such that no
        swapping is possible. This is achieved by monkeypatching ``grow`` and
        ``swap`` so that they simply return the same clique as fed in, which should cause
        the local search algorithm to conclude that it is already in a dead end and return the
        same clique."""

        graph = nx.complete_graph(5)
        c = [0, 1, 2]

        with monkeypatch.context() as m:
            m.setattr(clique, "grow", patch_resize)
            m.setattr(clique, "swap", patch_resize)
            result = clique.search(c, graph, iterations=100)

        assert result == c
    def test_max_iterations(self, monkeypatch):
        """Test if function stops after 5 iterations despite not being in a dead end (i.e., when
        ``grow != swap``). This is achieved by monkeypatching the ``np.random.choice`` call in
        ``grow`` and ``swap`` so that for a 5-node wheel graph starting as a [0,
        1] node clique, the algorithm simply oscillates between [0, 1, 2] and [0, 2, 3] for each
        iteration of growth & swap. For odd iterations, we get [0, 2, 3]."""

        graph = nx.wheel_graph(5)
        c = [0, 1]

        with monkeypatch.context() as m:
            p = functools.partial(patch_random_choice, element=0)
            m.setattr(np.random, "choice", p)
            result = clique.search(c, graph, iterations=5)

        assert result == [0, 2, 3]
# What is the average clique size? How about the largest and smallest clique size?

clique_sizes = [len(s) for s in shrunk]
print("First ten clique sizes = ", clique_sizes[:10])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))
print("Maximum clique size = ", np.max(clique_sizes))
print("Minimum clique size = ", np.min(clique_sizes))

##############################################################################
# Even in the first few samples, we've already identified larger cliques than the 4-node clique
# we studied before. Awesome! Indeed, this simple shrinking strategy gives cliques with average
# size of roughly five. We can enlarge these cliques by searching for larger cliques in their
# vicinity. We'll do this by taking ten iterations of local search and studying the results.
# Note: this may take a few seconds.

searched = [clique.search(s, TA_graph, 10) for s in shrunk]
clique_sizes = [len(s) for s in searched]
print("First two cliques = ", searched[:2])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))

##############################################################################
# Wow! Local search is very helpful, we've found cliques with the maximum size of eight for
# essentially all samples 🤩.  Let's take a look at the first clique we found

clique_fig = plot.graph(TA_graph, searched[0])
plotly.offline.plot(clique_fig, filename="maximum_clique.html")

##############################################################################
# .. raw:: html
#     :file: ../../examples_gbs/maximum_clique.html
 def test_bad_iterations(self):
     """Test if function raises a ``ValueError`` when a non-positive number of iterations is
     specified"""
     with pytest.raises(ValueError, match="Number of iterations must be a positive int"):
         clique.search(clique=[0, 1, 2, 3], graph=nx.complete_graph(5), iterations=-1)
Example #6
0
# What is the average clique size? How about the largest and smallest clique size?

clique_sizes = [len(s) for s in shrunk]
print("First ten clique sizes = ", clique_sizes[:10])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))
print("Maximum clique size = ", np.max(clique_sizes))
print("Minimum clique size = ", np.min(clique_sizes))

##############################################################################
# Even in the first few samples, we've already identified larger cliques than the 4-node clique
# we studied before. Awesome! Indeed, this simple shrinking strategy gives cliques with average
# size of roughly five. We can enlarge these cliques by searching for larger cliques in their
# vicinity. We'll do this by taking ten iterations of local search and studying the results.
# Note: this may take a few seconds.

searched = [clique.search(s, TA_graph, 10) for s in shrunk]
clique_sizes = [len(s) for s in searched]
print("First two cliques = ", searched[:2])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))

##############################################################################
# Wow! Local search is very helpful, we've found cliques with the maximum size of eight for
# essentially all samples 🤩.  Let's take a look at the first clique we found

clique_fig = plot.plot_graph(TA_graph, searched[0])
plotly.offline.plot(clique_fig, filename="maximum_clique.html")

##############################################################################
# .. raw:: html
#     :file: ../../examples_gbs/maximum_clique.html