def test_bad_iterations(self):
     """Test if function raises a ``ValueError`` when a non-positive number of iterations is
     specified"""
     with pytest.raises(
             ValueError,
             match="Number of iterations must be a positive int"):
         clique.search(clique=[0, 1, 2, 3],
                       graph=nx.complete_graph(5),
                       iterations=-1)
    def test_expected_growth(self):
        """Test if function performs a growth and swap phase, followed by another growth and
        attempted swap phase. This is carried out by starting with a 4+1 node lollipop graph,
        adding a connection from node 4 to node 2 and starting with the [3, 4] clique. The local
        search algorithm should first grow to [2, 3, 4] and then swap to either [0, 2, 3] or [1,
        2, 3], and then grow again to [0, 1, 2, 3] and being unable to swap, hence returning [0,
        1, 2, 3]"""

        graph = nx.lollipop_graph(4, 1)
        graph.add_edge(4, 2)

        c = [3, 4]
        result = clique.search(c, graph, iterations=100)
        assert result == [0, 1, 2, 3]
    def test_dead_end(self, monkeypatch):
        """Test if function stops when in a dead end (i.e., ``grow == swap``) such that no
        swapping is possible. This is achieved by monkeypatching ``grow`` and
        ``swap`` so that they simply return the same clique as fed in, which should cause
        the local search algorithm to conclude that it is already in a dead end and return the
        same clique."""

        graph = nx.complete_graph(5)
        c = [0, 1, 2]

        with monkeypatch.context() as m:
            m.setattr(clique, "grow", patch_resize)
            m.setattr(clique, "swap", patch_resize)
            result = clique.search(c, graph, iterations=100)

        assert result == c
    def test_max_iterations(self, monkeypatch):
        """Test if function stops after 5 iterations despite not being in a dead end (i.e., when
        ``grow != swap``). This is achieved by monkeypatching the ``np.random.choice`` call in
        ``grow`` and ``swap`` so that for a 5-node wheel graph starting as a [0,
        1] node clique, the algorithm simply oscillates between [0, 1, 2] and [0, 2, 3] for each
        iteration of growth & swap. For odd iterations, we get [0, 2, 3]."""

        graph = nx.wheel_graph(5)
        c = [0, 1]

        with monkeypatch.context() as m:
            p = functools.partial(patch_random_choice, element=0)
            m.setattr(np.random, "choice", p)
            result = clique.search(c, graph, iterations=5)

        assert result == [0, 2, 3]
# What is the average clique size? How about the largest and smallest clique size?

clique_sizes = [len(s) for s in shrunk]
print("First ten clique sizes = ", clique_sizes[:10])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))
print("Maximum clique size = ", np.max(clique_sizes))
print("Minimum clique size = ", np.min(clique_sizes))

##############################################################################
# Even in the first few samples, we've already identified larger cliques than the 4-node clique
# we studied before. Awesome! Indeed, this simple shrinking strategy gives cliques with average
# size of roughly five. We can enlarge these cliques by searching for larger cliques in their
# vicinity. We'll do this by taking ten iterations of local search and studying the results.
# Note: this may take a few seconds.

searched = [clique.search(s, TA_graph, 10) for s in shrunk]
clique_sizes = [len(s) for s in searched]
print("First two cliques = ", searched[:2])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))

##############################################################################
# Wow! Local search is very helpful, we've found cliques with the maximum size of eight for
# essentially all samples 🤩.  Let's take a look at the first clique we found

clique_fig = plot.graph(TA_graph, searched[0])
plotly.offline.plot(clique_fig, filename="maximum_clique.html")

##############################################################################
# .. raw:: html
#     :file: ../../examples_apps/maximum_clique.html
    GBS_dens.append(nx.density(PH_graph.subgraph(s)))
    u_dens.append(nx.density(PH_graph.subgraph(uniform)))

print("GBS mean density = {:.4f}".format(np.mean(GBS_dens)))
print("Uniform mean density = {:.4f}".format(np.mean(u_dens)))
# GBS mean density = 0.3714
# Uniform mean density = 0.1673

---------------------------------------------------------------------

# Perform greedy shrinking until the subgraph is a maxclique
shrunk = [clique.shrink(s, PH_graph) for s in samples]
print(clique.is_clique(PH_graph.subgraph(shrunk[0])))

# Find the average clique size
searched = [clique.search(s, PH_graph, 10) for s in shrunk]
clique_sizes = [len(s) for s in searched]
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))
# Average clique size = 6.664

---------------------------------------------------------------------

# Plot one of the samples of the average sized cliques
clique_fig = plot.graph(PH_graph, searched[0])
clique_fig.show()

# Find the largest clique (aka the maxclique) and plot it!
largest_clique = searched[np.argmax(clique_sizes)]  
print("Largest clique subgraph is = ", largest_clique)
# Largest clique subgraph is =  [48, 90, 104, 109, 159, 196, 263, 295]
# What is the average clique size? How about the largest and smallest clique size?

clique_sizes = [len(s) for s in shrunk]
print("First ten clique sizes = ", clique_sizes[:10])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))
print("Maximum clique size = ", np.max(clique_sizes))
print("Minimum clique size = ", np.min(clique_sizes))

##############################################################################
# Even in the first few samples, we've already identified larger cliques than the 4-node clique
# we studied before. Awesome! Indeed, this simple shrinking strategy gives cliques with average
# size of roughly five. We can enlarge these cliques by searching for larger cliques in their
# vicinity. We'll do this by taking ten iterations of local search and studying the results.
# Note: this may take a few seconds.

searched = [clique.search(s, TA_graph, 10) for s in shrunk]
clique_sizes = [len(s) for s in searched]
print("First two cliques = ", searched[:2])
print("Average clique size = {:.3f}".format(np.mean(clique_sizes)))

##############################################################################
# Wow! Local search is very helpful, we've found cliques with the maximum size of eight for
# essentially all samples 🤩.  Let's take a look at the first clique we found

clique_fig = plot.graph(TA_graph, searched[0])
plotly.offline.plot(clique_fig, filename="maximum_clique.html")

##############################################################################
# .. raw:: html
#     :file: ../../examples_apps/maximum_clique.html