def dmeasure(G1, G2, w=WEIGHTS_DEFAULT):
    """Calculates the D-measure between two graphs.

    Args:
        G1 (nx.Graph)
        G2 (nx.Graph)
        w (list of floats): weights w1, w2 and w3 from equation 2 of the
            original paper.

    Returns:
        D (float): D-measure between G1 and G2.
    """
    assert len(
        w) == 3, 'three weights have to be specified. Check argument `w`.'
    w1, w2, w3 = w

    # First term
    Pij_1 = distance_distribution(G1)
    Pij_2 = distance_distribution(G2)

    nnd_1, mu_1 = NND(Pij_1)
    nnd_2, mu_2 = NND(Pij_2)

    mu_1, mu_2 = zero_pad(mu_1, mu_2, end=True)

    mu_mean = (mu_1 + mu_2) / 2
    first = np.sqrt(
        np.maximum((entropy(mu_mean) -
                    (entropy(mu_1) + entropy(mu_2)) / 2), 0) / np.log(2))

    # Second term
    second = np.abs(np.sqrt(nnd_1) - np.sqrt(nnd_2))

    # Third term
    alphaG_1 = alpha_centrality(G1)
    alphaG_2 = alpha_centrality(G2)
    alphaG_1, alphaG_2 = zero_pad(alphaG_1, alphaG_2, end=False)

    alphaG_mean = (alphaG_1 + alphaG_2) / 2
    third_1 = np.sqrt(
        np.maximum(
            (entropy(alphaG_mean) -
             (entropy(alphaG_1) + entropy(alphaG_2)) / 2), 0) / np.log(2))

    # Complement
    alphaGcomp_1 = alpha_centrality(nx.complement(G1))
    alphaGcomp_2 = alpha_centrality(nx.complement(G2))
    alphaGcomp_1, alphaGcomp_2 = zero_pad(alphaGcomp_1,
                                          alphaGcomp_2,
                                          end=False)

    alphaGcomp_mean = (alphaGcomp_1 + alphaGcomp_2) / 2
    third_2 = np.sqrt(
        np.maximum((entropy(alphaGcomp_mean) -
                    (entropy(alphaGcomp_1) + entropy(alphaGcomp_2)) / 2), 0) /
        np.log(2))

    third = third_1 + third_2

    return w1 * first + w2 * second + w3 / 2 * third
Beispiel #2
0
 def setup_class(cls):
     cls.Gnp = nx.gnp_random_graph(20, 0.8)
     cls.Anp = _AntiGraph(nx.complement(cls.Gnp))
     cls.Gd = nx.davis_southern_women_graph()
     cls.Ad = _AntiGraph(nx.complement(cls.Gd))
     cls.Gk = nx.karate_club_graph()
     cls.Ak = _AntiGraph(nx.complement(cls.Gk))
     cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)]
Beispiel #3
0
 def setUp(self):
     self.Gnp = nx.gnp_random_graph(20, 0.8)
     self.Anp = _AntiGraph(nx.complement(self.Gnp))
     self.Gd = nx.davis_southern_women_graph()
     self.Ad = _AntiGraph(nx.complement(self.Gd))
     self.Gk = nx.karate_club_graph()
     self.Ak = _AntiGraph(nx.complement(self.Gk))
     self.GA = [(self.Gnp, self.Anp), (self.Gd, self.Ad),
                (self.Gk, self.Ak)]
 def setUp(self):
     self.Gnp = nx.gnp_random_graph(20, 0.8)
     self.Anp = _AntiGraph(nx.complement(self.Gnp))
     self.Gd = nx.davis_southern_women_graph()
     self.Ad = _AntiGraph(nx.complement(self.Gd))
     self.Gk = nx.karate_club_graph()
     self.Ak = _AntiGraph(nx.complement(self.Gk))
     self.GA = [(self.Gnp, self.Anp),
                (self.Gd, self.Ad),
                (self.Gk, self.Ak)]
def test_complement(testgraph):
    """
    Test the Complement of the graph are same
    """

    a = nx.complement(testgraph[0])
    b = sg.graph_operations.complement(testgraph[2])
    c = nx.complement(testgraph[1])
    d = sg.graph_operations.complement(testgraph[3])
    graph_equals(a, b)
    graph_equals(c, d)
def test_complement(testgraph):
    """
    Test the Complement of the graph are same
    """

    a = nx.complement(testgraph[0])
    b = sg.digraph_operations.complement(testgraph[2])
    c = nx.complement(testgraph[1])
    d = sg.digraph_operations.complement(testgraph[3])
    digraph_equals(a, b)
    digraph_equals(c, d)
def test_complement():
    null = nx.null_graph()
    empty1 = nx.empty_graph(1)
    empty10 = nx.empty_graph(10)
    K3 = nx.complete_graph(3)
    K5 = nx.complete_graph(5)
    K10 = nx.complete_graph(10)
    P2 = nx.path_graph(2)
    P3 = nx.path_graph(3)
    P5 = nx.path_graph(5)
    P10 = nx.path_graph(10)
    # complement of the complete graph is empty

    G = nx.complement(K3)
    assert nx.is_isomorphic(G, nx.empty_graph(3))
    G = nx.complement(K5)
    assert nx.is_isomorphic(G, nx.empty_graph(5))
    # for any G, G=complement(complement(G))
    P3cc = nx.complement(nx.complement(P3))
    assert nx.is_isomorphic(P3, P3cc)
    nullcc = nx.complement(nx.complement(null))
    assert nx.is_isomorphic(null, nullcc)
    b = nx.bull_graph()
    bcc = nx.complement(nx.complement(b))
    assert nx.is_isomorphic(b, bcc)
Beispiel #8
0
 def k_color(self):
     '''
     _claw_and_co_free
     finds if graph G is k-is_critical for some k
     requires the G to be claw and co-claw free
     Parameters:
         None
     Returns:
         int: the k-is_critical graph
         None: if graph is not k-is_critical
     '''
     clique = self.clique_number()
     print("Clique number:", clique)
     print(self._g.nodes())
     k = None
     if clique is None:
         # is not a clique
         cycle = self.cycle_nodes()
         if len(cycle) > 3:
             cycle.pop() # don't need the full cycle path just the vertices
         if len(cycle) == 0 or len(cycle) % 2 == 0:
             # no cycle or even hole so done
             k = None
         elif len(cycle) > 5:
             # odd-hole
             if len(cycle) == len(self._g.nodes()):
                 # just an odd-hole
                 k = 3
         if k is None:
             # check for anti-hole
             co_g = DalGraph(nx.complement(nx.Graph.copy(self._g)))
             cycle = co_g.cycle_nodes()
             if len(cycle) > 3:
                 cycle.pop() # don't need the full cycle path just the vertices
             if len(cycle) == 0 or len(cycle) % 2 == 0:
                 # even hole or no hole
                 k = None
             else:
                 co_g._g = nx.complement(co_g._g)
                 co_g.remove_vertices(cycle)
                 k2 = co_g.k_color()
                 if k2 is not None:
                     k = math.ceil(len(cycle) / 2) + k2
                 else:
                     k = None
     else:
         k = clique
     return k
def heuristic_independent_set_finder(Graph,Weight_vector,first_node,second_node_index):

    first_node_neighborhood=Graph[first_node].keys()

    second_node=first_node_neighborhood[second_node_index]
    second_node_neighborhood=Graph[second_node].keys()

    #print 'second node',second_node
    #print 'first node_neighborhood', first_node_neighborhood
    #print 'second node_neighborhood', second_node_neighborhood

    first_node_neighborhood_set=set(first_node_neighborhood)
    second_node_neighborhood_set=set(second_node_neighborhood)

    common_neighbors=first_node_neighborhood_set.intersection(second_node_neighborhood_set)

    #print 'common neighbors:', common_neighbors

    induced_subgraph=nx.subgraph(Graph,common_neighbors)
    complement_induced_subgraph=nx.complement(induced_subgraph)

    #print 'induced_subgraph.nodes()', induced_subgraph.nodes()
    #print 'induced_subgraph.edges()', induced_subgraph.edges()
    #print 'complement_induced_subgraph.nodes()', complement_induced_subgraph.nodes()
    #print 'complement_induced_subgraph.edges()', complement_induced_subgraph.edges()

    current_complement_induced_subgraph=nx.complement(induced_subgraph)
    current_independent_set=[]

    print current_complement_induced_subgraph.nodes()
    print current_complement_induced_subgraph.edges()

    while (len(current_complement_induced_subgraph.edges())>0):
	current_minimum_degree_node=sort_by_degree(current_complement_induced_subgraph)[0][0]
	current_minimum_degree_node_neighborhood=current_complement_induced_subgraph[current_minimum_degree_node].keys()
	current_independent_set.append(current_minimum_degree_node)
	current_complement_induced_subgraph.remove_nodes_from(current_minimum_degree_node_neighborhood)
	current_complement_induced_subgraph.remove_node(current_minimum_degree_node)

	print current_minimum_degree_node
	print current_minimum_degree_node_neighborhood
	print current_independent_set
	print current_complement_induced_subgraph.nodes()

    current_independent_set.extend(current_complement_induced_subgraph.nodes())
    current_independent_set.extend([first_node,second_node])
    maximum_clique=current_independent_set
    return maximum_clique
def SingleEdgeRewiring(Input_Graph):
	
	max_count=1000
	N=Input_Graph.order()
	m=Input_Graph.size()
	G=Input_Graph.copy()
	
	#############################################################################
	#############################DOUBLE REWIRINGS################################
	#############################################################################
	
	EdgeList=G.edges()
	K=nx.complement(G)
	NonEdgeList=K.edges()
	
	ConnectedGraph=0
	trial_count=0
	while ConnectedGraph==0:
		H=G.copy()
		trial_count+=1
		OldEdge=random.choice(EdgeList)
		NewEdge=random.choice(NonEdgeList)		
		H.remove_edges_from([OldEdge])
		H.add_edges_from([NewEdge])
		
		if nx.is_connected(H):ConnectedGraph=1
		if trial_count>max_count:
			print 'A connected graph could not be found.'
			return -1
			break
			
	return H
Beispiel #11
0
def create_graph_from_input(adj_matrix):
    """
	Creates the networkx graph to be used for coloring from input.

	Iterates over the contents of the list having admissibility
	matrix in order to create the corresponding networkx graph, 
	adds information about vertex weight to each vertex
	and changes input graph to graph whose coloring would fetch
	the sharability network.

	Args:
		adj_matrix: The adjacency matrix of the graph created
		from the input data.

	Returns:
		A networkx graph which is complement of graph corresponding
		to graph given by admissibility matrix with each vertex 
		containing extra information about the vertex weight.
	"""
    data = nx.Graph()
    for i in range(len(adj_matrix)):
        data.add_node(i)

    for i, source in enumerate(adj_matrix):
        for j, value in enumerate(source):
            if i != j and int(value) == 1:
                data.add_edge(i, j)

    nx.set_node_attributes(data, 1, 'weight')

    data = nx.complement(data)
    return data
Beispiel #12
0
def fully_connected_padding(G):
    '''Given a graph G with edge data, make it fully connected
    with zeros on the new edges, and an extra bit to distinguish
    new edges from old.
    '''
    G_ = copy.deepcopy(G)

    for u, v in G.edges():
        if G.graph['edge_dim'] != 0:
            temp = G.edge[u][v]['data']
            G_.edge[u][v]['data'] = np.concatenate(
                (temp, np.zeros(G.graph['batch_size'], 1)), 1)
        else:
            G_.edge[u][v]['data'] = np.zeros(G.graph['batch_size'], 1)

    for u, v in nx.complement(G).edges():
        G_.add_edge(u, v)
        if G.graph['edge_dim'] != 0:
            temp = np.zeros(G.graph['batch_size'], G.graph['edge_dim'] + 1)
            temp[:, -1] = 1
            G_.edge[u][v]['data'] = temp
        else:
            G_.edge[u][v]['data'] = np.ones(G.graph['batch_size'], 1)

    G_.graph['edge_dim'] += 1

    return G_
Beispiel #13
0
def makeCircuit6(graph, n, edges, cedges, gammasbetas, p):
    cgraph = nx.complement(graph)
    ans = ClassicalRegister(n)
    sol = QuantumRegister(n)
    QAOA = QuantumCircuit(sol, ans)
    r = 2

    for j in range(p):

        for i in range(n):
            QAOA.u1(-gammasbetas[j], i)

        for l in range(r):
            for i in range(n):
                nbrs = 0
                nbs = []
                for nbr in cgraph[i]:
                    QAOA.x(nbr)
                    nbrs += 1
                    nbs.append(nbr)

                nbs.append(i)
                if (nbrs != 0):
                    #gate = MCMT(RXGate(gammasbetas[p+j]/r),nbrs ,1)
                    gate = MCMT(RXGate(2 * gammasbetas[l + j] / r), nbrs, 1)
                    QAOA.append(gate, nbs)
                else:
                    #QAOA.rx(gammasbetas[p+j]/r,i)
                    QAOA.rx(2 * gammasbetas[l + j] / r, i)
                for nbr in cgraph[i]:
                    QAOA.x(nbr)

    return QAOA
Beispiel #14
0
def search(graph, b, p, k):
	#gets the initial independent set
	indepSet=greedyAlg(graph.copy())
	indepSetBest=indepSet
	condition=True
	count=0
	while condition:
		#gets a random vertex from the independent set and removes it and any vertices too close to it in the independent set
		i = indepSet[random.randint(0,len(indepSet)-1)]
		indepSetLoop=distanceVertices(indepSet, i, b, p)
		
		#gets the subgraph of vertices which could be added to the reduced independent set
		subGraph=graphWithNeighborsRemoved(graph,indepSetLoop)
		subGraph.remove_node(i)
		print(len(subGraph.nodes()))
		
		#gets the maximum independent set in the subgraph and adds it to the independent set
		maxInd=cliquer(nx.complement(subGraph))
		indepSet= unionList(indepSetLoop,maxInd)
		
		#if the new independent set is better we store it
		if(len(indepSet)>len(indepSetBest)):
			indepSetBest=indepSet
		
		#increases count
		count+=1
		print(count)
		#if the termination condition is met then we stop looping
		iters=100
		if(count>iters):
			condition=False
	#returns the best independent set
	return indepSetBest
def modela(G):
    H = nx.complement(G)
    #Grafo Complementar de G
    EAux = copy.deepcopy(list(H.edges()))
    #Cópia Profunda das Arestas de H
    A = list(itertools.combinations(EAux, 1))
    #Listas de {e}, onde e é uma aresta de H
    Tur = torre(G, A)
    #Usa A para calcular todos os grafos obtidos de G pela adição de uma única aresta de H
    Matrix = np.array([graph_to_list_differences(Gr, H) for Gr in Tur])
    #Cada linha é a representação binária dos grafos de Tur
    #A representação binária tem 1 se há aresta de H e 0 caso contrário
    vector = np.array([sum(wiener_impact_v_removal(Gr)) for Gr in Tur])
    #Vetor de Impactos de Wiener dos Grafos em Tur
    clf = Ridge(alpha=0.2)
    clf.fit(Matrix, np.array(vector))
    w1 = clf.predict(Matrix)
    print(w1)
    Modelo = clf.coef_
    index_modelo = np.argmin(w1)
    if type(index_modelo) != list:
        index_modelo = [index_modelo]
    edge_list = list(H.edges())
    edge_sol = [edge_list[i] for i in index_modelo]
    #print([edge_list[i] for i in edge_sol])
    print('')
    print('modelo')
    print(Modelo)
    print('aresta')
    print(edge_sol)
    return (edge_sol)
def calculates_two_additions(g: nx.Graph) -> List[dict]:
    """
    Calcula os impactos gerados, de fato, pela adição de duas arestas e retorna
    a lista dos valores calculados.
    """
    # Complementar de g
    h = nx.complement(g)
    # Cópia das arestas de h
    aux_edge_list = copy.deepcopy(list(h.edges()))
    # Lista de pares de arestas de h
    double_edge_lists = list(itertools.combinations(aux_edge_list, 2))
    # Converte para as formas binárias de cada possível adição de par de arestas
    double_edge_addition_graphs = torre(g, double_edge_lists)
    impacts = np.array([
        sum(wiener_impact_v_removal(graph))
        for graph in double_edge_addition_graphs
    ])
    actuals = []
    abs_index = 0
    for k in range(len(h.edges())):
        for j in range(k):
            pred_dict = {}
            pred_dict["edge"] = [k, j]
            pred_dict["actual"] = impacts[abs_index]
            actuals.append(pred_dict)
            abs_index += 1

    return actuals
def evolve(g, p_c, p):
    """ 

   Do one markovian evolution where existing edge disappears with prob
p (stays with prob 1-p) and a non-existing one appears with prob q
(and stays off with prob 1-q). Arbitrary values of p and q however
change the density of edges in the cluster. It turns out however if we
set q= p*p_c/(1-p_c) then the density of the cluster stays the same on
average.

    """
    g_new = nx.Graph()
    for e in g.edges_iter(data=True):
        if random.random() <= 1-p:  # edge remains on with prob 1-p
            g_new.add_edge(e[0], e[1], weight=1.0)
        

    # q is chosen so as to keep density invariant
    # set fraction of edges turning on equal to those turning off
    # Solve p *p_c = (1-p_c) q
    # Check this logic!!

    # note p_c <= 1/(1+p) for q <= 1
    q = (p_c*p)/(1-p_c)

    g_complement = nx.complement(g)  
    for e in g_complement.edges_iter():
        if random.random() <= q:  # add edge from g_complement with prob q
            g_new.add_edge(e[0], e[1], weight=1.0)

    return g_new
Beispiel #18
0
def test_config_T3(pool : Pool):

    from configurations import find_config_T3

    graph = nx.Graph()

    graph.add_edges_from((
        (0, 1), (2, 3), (0, 3), (1, 2), (2, 4), (3, 5) 
    ))

    graph.add_edges_from((
        (6, 0), (6, 1), (6, 4),
        (7, 0), (7, 1), (7, 4),
    ))

    graph.add_edges_from((
        (4, 8), (8, 9), (9, 5),
    ))

    x = find_config_T3(graph, pool)

    assert(x is not None)
    assert(x[:6] == (0, 1, 2, 3, 4, 5))
    assert(set(x[6]) == {6, 7})
    assert(set(x[7]) == {4, 8, 9, 5})

    for i in range(10):
        graph = random_bipartite(n1=12, n2=12, p=.5)
        assert(find_config_T3(graph, pool) is None)
        assert(find_config_T3(nx.complement(graph), pool) is None)
Beispiel #19
0
def makeCircuit9(graph, n, edges, cedges, gammasbetas, p, orderings):
    cgraph = nx.complement(graph)
    ans = ClassicalRegister(n)
    sol = QuantumRegister(n)
    QAOA = QuantumCircuit(sol, ans)
    for j in range(p):
        if (j != 0):

            for i in range(n):

                QAOA.u1(-gammasbetas[j - 1], i)
                QAOA.barrier()

        for i in orderings[j]:

            nbrs = 0
            nbs = []
            for nbr in cgraph[i]:
                QAOA.x(nbr)
                nbrs += 1
                nbs.append(nbr)

            nbs.append(i)
            if (nbrs != 0):
                gate = MCMT(RXGate(2 * gammasbetas[p - 1 + j]), nbrs, 1)
                QAOA.append(gate, nbs)
            else:
                QAOA.rx(2 * gammasbetas[p - 1 + j], i)
            for nbr in cgraph[i]:
                QAOA.x(nbr)
            QAOA.barrier()
    return QAOA
Beispiel #20
0
def inter_community_non_edges(G, partition):
    """Returns the number of inter-community non-edges according to the
    given partition of the nodes of `G`.

    `G` must be a NetworkX graph.

    `partition` must be a partition of the nodes of `G`.

    A *non-edge* is a pair of nodes (undirected if `G` is undirected)
    that are not adjacent in `G`. The *inter-community non-edges* are
    those non-edges on a pair of nodes in different blocks of the
    partition.

    Implementation note: this function creates two intermediate graphs,
    which may require up to twice the amount of memory as required to
    store `G`.

    """
    # Alternate implementation that does not require constructing two
    # new graph objects (but does require constructing an affiliation
    # dictionary):
    #
    #     aff = dict(chain.from_iterable(((v, block) for v in block)
    #                                    for block in partition))
    #     return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v])
    #
    return inter_community_edges(nx.complement(G), partition)
Beispiel #21
0
def generateTrainData2(G, nodes01, nodes02, edges02, path):
    period1_not_in_2 = nodes01.append(nodes02).drop_duplicates(keep=False)
    period1_node_shuffle = random.Random(23).sample(list(period1_not_in_2),
                                                    650)
    sub_graph = G.subgraph(period1_node_shuffle)
    sub_graph_complement = nx.complement(sub_graph)
    # pos = nx.spring_layout(sub_graph)  # 圖的畫法
    # nx.draw(sub_graph_complement, pos=pos, node_size=40, vim=0.0, vmax=1.0, node_color="red")

    # tag label
    train_label = []
    # 注意順序
    for edge in edges02:
        train_label.append(1)
    for edge in list(sub_graph_complement.edges()):
        train_label.append(0)

    train_data_edge = edges02 + list(sub_graph_complement.edges())
    train_data = pd.DataFrame(data={
        'edges': train_data_edge,
        'label': train_label
    })
    train_data = shuffle(train_data, random_state=32).reset_index(drop=True)
    train_data.head()
    train_data.to_csv(path, encoding='utf-8', index=False)

    return train_data
Beispiel #22
0
def generate_rb_maxclique(path, seed, temp_dir, n, a, p, r):
    """
    num_vertices = n * n^a (n cliques with n^a vertices = n variables with domain sizes n^a)
    num_edges = p * n^{2a} * r * n * ln(n) (r n ln n constraints for a variable pair)
    Run generator for MIS/MVC and compute the complement graph.
    """
    rb_generator_output_path = os.path.join(temp_dir, "vc.clq")
    with open(rb_generator_output_path, "w") as vc_file:
        subprocess.check_call([
            "python2", RB_GENERATOR_PATH, "-e", "VC", "-s",
            str(seed),
            str(n),
            str(a),
            str(p),
            str(r)
        ],
                              stdout=vc_file)

    vc_in_file_path = os.path.join(temp_dir, "vc.in")
    convert_dimacs_to_in(rb_generator_output_path, vc_in_file_path)

    graph = load_networkx_graph_from_in(vc_in_file_path)
    graph = nx.complement(graph)
    save_networkx_graph_to_in(graph, path)
    store_parameters(path,
                     method=generate_rb_maxclique.__name__,
                     seed=seed,
                     n=n,
                     a=a,
                     p=p,
                     r=r)
Beispiel #23
0
 def ensure_full(infr):
     """
     Explicitly places all edges, but does not make any feedback items
     """
     infr.print('ensure_full with %d nodes' % (len(infr.graph)), 2)
     new_edges = list(nx.complement(infr.graph).edges())
     infr.ensure_edges_from(new_edges)
def reduction_to_vc(G,k,permanent):
	#print("REductionto VC",G.nodes())
	solution=set()
	neighbors=set()
	'''removes all vertex from G that are not adjacent to any permanent vertex and are adjacent to more than one permanent vertex'''
	for p_vertex in permanent:
		neighbors.add(p_vertex)
		for neighbor in G.neighbors(p_vertex):
			if neighbor  not in neighbors and neighbor not in permanent:
				neighbors.add(neighbor)
			elif neighbor not in solution and neighbor not in permanent:
				neighbors.remove(neighbor)
				solution.add(neighbor)
	
	nodes=set(G.nodes())
	solution=(nodes-neighbors)	
	k-=len(solution)
	G.remove_nodes_from(list(solution))
	'''convert G to instance of vertex cover by removing adjacent vertices of non permanent vertices or insert otherwise '''	
	delete_edges=[]
	add_edges=[]
	for p_vertex in permanent:
		neighbors=[]
		neighbors+=G.neighbors(p_vertex)
		neighbors.append(p_vertex)
		G2=nx.subgraph(G,neighbors)
		delete_edges+=G2.edges()
		add_edges+=(nx.complement(G2)).edges()
	G.remove_edges_from(delete_edges)
	G.add_edges_from(add_edges)
	return G,k,list(solution)		
Beispiel #25
0
    def find_connecting_edges(infr):
        """
        Searches for a small set of edges, which if reviewed as positive would
        ensure that each PCC is k-connected.  Note that in somes cases this is
        not possible
        """
        label = 'name_label'
        node_to_label = infr.get_node_attrs(label)
        label_to_nodes = ut.group_items(node_to_label.keys(), node_to_label.values())

        # k = infr.params['redun.pos']
        k = 1
        new_edges = []
        prog = ut.ProgIter(
            list(label_to_nodes.keys()),
            label='finding connecting edges',
            enabled=infr.verbose > 0,
        )
        for nid in prog:
            nodes = set(label_to_nodes[nid])
            G = infr.pos_graph.subgraph(nodes, dynamic=False)
            impossible = nxu.edges_inside(infr.neg_graph, nodes)
            impossible |= nxu.edges_inside(infr.incomp_graph, nodes)

            candidates = set(nx.complement(G).edges())
            candidates.difference_update(impossible)

            aug_edges = nxu.k_edge_augmentation(G, k=k, avail=candidates)
            new_edges += aug_edges
        prog.ensure_newline()
        return new_edges
Beispiel #26
0
def inter_community_non_edges(G, partition):
    """Returns the number of inter-community non-edges according to the
    given partition of the nodes of `G`.

    `G` must be a NetworkX graph.

    `partition` must be a partition of the nodes of `G`.

    A *non-edge* is a pair of nodes (undirected if `G` is undirected)
    that are not adjacent in `G`. The *inter-community non-edges* are
    those non-edges on a pair of nodes in different blocks of the
    partition.

    Implementation note: this function creates two intermediate graphs,
    which may require up to twice the amount of memory as required to
    store `G`.

    """
    # Alternate implementation that does not require constructing two
    # new graph objects (but does require constructing an affiliation
    # dictionary):
    #
    #     aff = dict(chain.from_iterable(((v, block) for v in block)
    #                                    for block in partition))
    #     return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v])
    #
    return inter_community_edges(nx.complement(G), partition)
Beispiel #27
0
def SingleEdgeRewiring(Input_Graph):

    max_count = 1000
    N = Input_Graph.order()
    m = Input_Graph.size()
    G = Input_Graph.copy()

    #############################################################################
    #############################DOUBLE REWIRINGS################################
    #############################################################################

    EdgeList = G.edges()
    K = nx.complement(G)
    NonEdgeList = K.edges()

    ConnectedGraph = 0
    trial_count = 0
    while ConnectedGraph == 0:
        H = G.copy()
        trial_count += 1
        OldEdge = random.choice(EdgeList)
        NewEdge = random.choice(NonEdgeList)
        H.remove_edges_from([OldEdge])
        H.add_edges_from([NewEdge])

        if nx.is_connected(H): ConnectedGraph = 1
        if trial_count > max_count:
            return Input_Graph
            break

    return H
Beispiel #28
0
def AUC (n, preds, graph, removed, nodes):
    # dopelnienie oryginalnego grafu
    nodes = [int(i) for i in nodes]
    compl = nx.complement(graph)
    missing = list(compl.edges())
    # krawedzie nie istniejace
    random.shuffle(missing)
    count = 0
    # zgodnie z def AUC z danego papera losuje i porownuje prawd.
    for i in range(0, n):
        x = random.randint(0, len(missing) - 1)
        fst = int(missing[x][0])
        snd = int(missing[x][1])
        # similarity danej krawedzi
        prob = preds[nodes.index(fst), nodes.index(snd)]
        # removed to krawedzie faktycznie istniejace grafie,
        # ale usuniete w celu testow
        x = random.randint(0, len(removed) - 1)
        fst = int(removed[x][0])
        snd = int(removed[x][1])
        chance = preds[nodes.index(fst), nodes.index(snd)]
        if (chance - prob > 0):
            count = count + 1
        elif(chance - prob == 0):
            count = count + 0.5
    return count / n
Beispiel #29
0
 def generate_independent_sets(self):
     # Construct generator containing independent sets
     # Don't generate anything that depends on the entire Hilbert space as to save space
     # Generate complement graph
     complement = nx.complement(self.graph)
     # These are your independent sets of the original graphs, ordered by node and size
     independent_sets, backup = tee(
         nx.algorithms.clique.enumerate_all_cliques(complement))
     self.num_independent_sets = sum(
         1 for _ in backup) + 1  # We add one to include the empty set
     # Generate a list of integers corresponding to the independent sets in binary
     indices = np.zeros(self.num_independent_sets, dtype=int)
     indices[-1] = 2**self.n - 1
     k = self.num_independent_sets - 2
     self.mis_size = 0
     IS = dict.fromkeys(np.arange(self.num_independent_sets))
     # All spins down should be at the end
     IS[self.num_independent_sets - 1] = (2**self.n - 1, 0,
                                          np.ones(self.n, dtype=int))
     for i in independent_sets:
         indices[k] = 2**self.n - sum(2**j for j in i) - 1
         IS[k] = (indices[k], len(i),
                  tools.int_to_nary(indices[k], size=self.n))
         if len(i) > self.mis_size:
             self.mis_size = len(i)
         k -= 1
     binary_to_index = dict.fromkeys(indices)
     for j in range(self.num_independent_sets):
         binary_to_index[indices[j]] = j
     self.binary_to_index = binary_to_index
     self.independent_sets = IS
     return IS, binary_to_index, self.num_independent_sets
def analyze_connected_components(graph, data, filename, clusters=None):
    print('Nodes:', graph.number_of_nodes())
    print("Components:", nx.number_weakly_connected_components(graph), '\n')

    print(max([len(nodes) for nodes in nx.weakly_connected_components(graph)]))
    for i, nodes in enumerate(nx.weakly_connected_components(graph)):
        if clusters is not None and i not in clusters:
            continue
        # skip is cluster is small or is complete (for now)
        if len(nodes) < 3 or not nx.complement(nx.Graph(
                graph.subgraph(nodes))).number_of_edges():
            continue
        print('meta-cluster id:', i, 'quality:',
              cluster_quality(graph.subgraph(nodes), data))
        print(r'\begin{center}')
        print(r'\includegraphics[width=4in]{./plots/analyze_streaming_alg/' +
              filename + '/' + str(i) + '.png}')
        print(r'\end{center}')
        for node in nodes:
            comp = graph.nodes[node]['contains']
            ad_texts = data.data.loc[data.data['ad_id'].isin(
                comp)]['u_Description']
            text = remove_punctuation(ad_texts.iloc[0])
            print(node, text, '\n')
        print(r'\newpage ')
Beispiel #31
0
def double_star_cutset_combinations(graph : nx.Graph):
    Gc = nx.complement(graph)

    for u, v in graph.edges:
        for comp in nx.connected_components(graph):
            for x, y in Gc.subgraph(comp).edges():
                yield (u, v, x, y)
Beispiel #32
0
def add_edge(g,m):
    compl=nx.complement(g)
    if len(compl.edges())>=m:
        aedge=random.sample(compl.edges(),m)
        return aedge
    else:
        print "not enough nodes to add m edges, please check m"
Beispiel #33
0
def solve_mcq_approximation(G):
    """
    最大クリーク問題を既存の近似解法で解く
    (Reference: Approximating Maximum Independent Sets by Excluding Subgraphs)
    入力
        G: 無向グラフ
    出力
        なし
        

    # ><><><><><><><><><><><><><><><><><><><><><
    # ><><><><><><><><><><><><><><><><><><><><><
    # ><><><><><><><><><><><><><><><><><><><><><

    """
    

    if G is None:
        raise ValueError("Expected NetworkX graph!")

    st1 = time.time()
    cgraph = nx.complement(G)
    et1 = time.time()

    print('elapsed_time (complement) :', et1-st1)

    st2 = time.time()
    iset, _ = clique_removal(cgraph)
    et2 = time.time()

    print('elapsed_time (clique) :', et2-st2)
    print(f'maximum clique number: {len(iset)}')
    print(f'maximum clique: {iset}')
Beispiel #34
0
def connected_components():
    # g = nx.read_edgelist("data/as-caida20040105.txt")
    g = nx.read_edgelist("data/random5000by6.txt")
    # g = nx.complete_graph(100)
    cc = nx.connected_components(g)
    cc_list = [c for c in sorted(cc, key=len, reverse=True)]
    for c in cc_list:
        print c
        print "size: {}".format(len(c))
        break

    # draw(g, "g")

    print "G: number of nodes = {}. edges = {}".format(g.number_of_nodes(),
                                                       g.number_of_edges())

    gc = nx.complement(g, "gc")
    print "GC: number of nodes = {}. edges = {}".format(
        gc.number_of_nodes(), gc.number_of_edges())

    cc = nx.connected_components(gc)
    cc_list = [c for c in sorted(cc, key=len, reverse=True)]
    for c in cc_list:
        print c
        print "size: {}".format(len(c))
        # break

    draw(gc, "gc")
def evolve(g, p_c, p):
    """ 

   Do one markovian evolution where existing edge disappears with prob
p (stays with prob 1-p) and a non-existing one appears with prob q
(and stays off with prob 1-q). Arbitrary values of p and q however
change the density of edges in the cluster. It turns out however if we
set q= p*p_c/(1-p_c) then the density of the cluster stays the same on
average.

    """
    g_new = nx.Graph()
    for e in g.edges_iter(data=True):
        if random.random() <= 1 - p:  # edge remains on with prob 1-p
            g_new.add_edge(e[0], e[1], weight=1.0)

    # q is chosen so as to keep density invariant
    # set fraction of edges turning on equal to those turning off
    # Solve p *p_c = (1-p_c) q
    # Check this logic!!

    # note p_c <= 1/(1+p) for q <= 1
    q = (p_c * p) / (1 - p_c)

    g_complement = nx.complement(g)
    for e in g_complement.edges_iter():
        if random.random() <= q:  # add edge from g_complement with prob q
            g_new.add_edge(e[0], e[1], weight=1.0)

    return g_new
def make_seating(agreement_graph, chaotic=True):

    num_people = len(agreement_graph.nodes)
    if not chaotic:
        agreement_graph = nx.complement(agreement_graph)
    # Make the agreement graph from people and edges (agreements)
    # Make the table graph with adjacency for neighboring seats
    # Also generate the positions of seats for plotting
    table, seat_positions = make_longtable_graph(num_people)
    # For every person, list their cliques
    clique_dict = {}
    #clique_list = list(nx.find_cliques(agreement_graph))
    # List every clique for every person
    clique_list = (list(nx.enumerate_all_cliques(agreement_graph)))
    for person in agreement_graph.nodes():
        clique_dict[person] = []
        for clique in [
                c for c in clique_list if str(person) in c and len(c) > 1
        ]:
            clique_dict[person].append(clique)
    # print(clique_dict)
    mapping = {}  # Dictionary of seat number to person sitting in it
    # For every seat
    for seat in table.nodes():
        # Give people scores
        scores = {}
        # For every neighbor
        for person in [
                p for p in agreement_graph.nodes()
                if p not in mapping.values()
        ]:
            for neighbor in nx.neighbors(table, seat):
                for clique in clique_dict[person]:
                    # "Punish" candidates who are in cliques with neighbors
                    if mapping.get(neighbor, None) in clique:
                        scores[person] = scores.get(person, 0) - 1
                    scores[person] = scores.get(person, 0)
        # Seat unseated person with best score here
        print("{}: {}\n".format(
            seat, sorted(scores.items(), key=lambda x: x[1], reverse=True)))
        for person in sorted(scores.items(), key=lambda x: x[1], reverse=True):
            if person[0] not in mapping.values():
                mapping[seat] = person[0]
                break

    print(mapping)
    # Apply the calculated seating arrangement
    T = nx.relabel_nodes(table, mapping)
    pos = {mapping[k]: v for k, v in seat_positions.items()}
    # Plot the seating arrangement
    fig = Figure()
    output = io.BytesIO()
    axis = fig.add_subplot(1, 1, 1)
    axis.set_xlim(-0.1, num_people / 2 + 1)
    axis.set_ylim(-0.1, 0.6)
    nx.draw_networkx(T, pos=pos, with_labels=True, ax=axis)

    # Return an image to the flask application
    FigureCanvas(fig).print_png(output)
    return b64encode(output.getvalue()).decode("utf-8")
def randomEdges(Graph):
    G = Graph.copy()
    for (u, v) in copy.deepcopy(G.edges()):
        G.remove_edge(u, v)
    nodes = len(G.nodes())
    comp = list(complement(G).edges())
    e = list(Graph.edges())
    for i in range(randint(3, 6)):
        for j in range(randint(2, 5)):
            shuffle(comp)

    for (u, v) in comp:
        for j in range(randint(2, 7)):
            for k in range(randint(2, 5)):
                num = randint(1, nodes)
        if num == 1:
            G.add_edge(u, v)
            if is_directed_acyclic_graph(G) == False:
                G.remove_edge(u, v)
    if len(G.edges()) == 0:
        G = randomEdges(G)

    if len(e) != len(G.edges()):
        return G
    for (u, v) in list(G.edges()):
        if (u, v) not in e:
            return G
    G = randomEdges(G)
Beispiel #38
0
def modularDecomposition(G):
    """ Computes the cotree of a cograph.
    This is done by modular decomposition - http://en.wikipedia.org/wiki/Modular_decomposition
    As the algorithm only works for initial connected graphs, for non-connected graphs the algorithm is applied to the complement graph.

    Parameters
    ----------
    G : graph
        A networkx graph
        As cotrees can only be computed for cographs an error is raised if the input graph is not a cograph

    Returns
    -------
    out : graph
        The resulting cotree
    """
    if hasattr(G, 'graph') and isinstance(G.graph, dict):
        Gres = nx.DiGraph()
        if nx.is_connected(G):
            decomp(G, Gres, 1)
        else:
            # The cotree T' of G', is exactly T with 0 and 1 nodes interchanged.
            # http://www.lirmm.fr/~paul/Biblio/Postscript/wg03.pdf Section 1.2 observation 2
            decomp(nx.complement(G), Gres, 0)
        return Gres
    else:
        raise nx.NetworkXError("Input is not a correct NetworkX graph.")
def make_friends(groups):
    # groups is a list of lists of names
    # At the end of this method, it should be guaranteed
    # that groups is an optimal partition on the graph G

    G = nx.Graph()

    # Add nodes to graph
    for g in groups:
        G.add_nodes_from(g)

    for i in range(len(groups)):
        g = groups[i]
        for j in range(len(groups)):
            if i == j:
                continue
            h = groups[j]
            # pick something in each other group            
            for node in g:
                randomly_connect(G, node, h, n_connections=int(len(g) * 0.5))

    # At this point, G should have the property that every node g in a
    # group G1 is connected by at least one edge to every other
    # group. That is, every node is connected to every group except
    # its own.

    # Turn friends into non-friends, and vice versa
    return nx.complement(G)
Beispiel #40
0
def independence_number(graph):
    """
    Compute the independence number of 'graph'.
    """
    if graph.number_of_nodes() == 0:
        return 0
    else:
        return graph_clique_number(complement(graph))
Beispiel #41
0
 def solve(self, *args, **kw):
     import networkx as nx
     graph = nx.complement(self.graph)
     from openopt import STAB
     KW = self.__init_kwargs
     KW.update(kw)
     P = STAB(graph, **KW)
     r = P.solve(*args)
     return r
Beispiel #42
0
def make_co_cycle(n):
    '''
    a function the creates an complement of a cycle of size n
    Parameters:
        n: the size of the anti cycle
    Returns:
        co_cycle: a networkx graph (networkx)
    '''
    return nx.complement(make_cycle(n))
Beispiel #43
0
def make_2K2():
    '''
    a function which assembles a 2K2
    Parameters:
        None
    Returns:
        g: 2K2 graph (network)
    '''
    return nx.complement(make_cycle(4))
Beispiel #44
0
def make_co_diamond():
    '''
    make_co_diamond
    assembles a co-diamond
    Parameters:
        None
    Returns:
        co_diamond: the co-diamond graph (networkx)
    '''
    return nx.complement(make_diamond())
Beispiel #45
0
def make_co_claw():
    '''
    make_co_claw
    assembles a co-claw
    Parameters:
        None
    Returns:
        co_claw: the co_claw (Graph)
    '''
    return nx.complement(make_claw())
Beispiel #46
0
def isCographAux(G):
    modules = nx.connected_component_subgraphs(nx.complement(G))
    if len(modules) == 1:
        if len(modules[0]) == 1:
            # return leaf node
            return True
        else:
            return False
    else:
        return all(isCographAux(module) for module in modules)
Beispiel #47
0
def even_hole_free(G):
    i_set = graph_clique_number(complement(G))
    free = True
    i = 4
    while i <= i_set * 2 and free:
        g = make_cycle(i)
        if induced_subgraph(G, g):
            free = False
        i += 2
    return free
Beispiel #48
0
def even_hole_free(G):
    i_set = graph_clique_number(complement(G))
    free = None
    i = 4
    while i <= i_set * 2 and free is None:
        g = make_cycle(i)
        induced = induced_subgraph(G, g) 
        if induced is not None:
            free = induced
        i += 2
    return free
Beispiel #49
0
def balance_triade_in_graph(G, triade, p):
    edges = triade.edges().copy()
    nodes = triade.nodes().copy()
    if len(edges) == 0: 
        G.add_edge(*np.random.choice(nodes, size=2, replace=False))
    else:
        if np.random.random() < p:
            comp = nx.complement(triade)
            G.add_edge(*comp.edges()[0])
        else:
            G.remove_edge(*rd.choice(edges))
Beispiel #50
0
 def alpha_number(self):
     '''
     returns the stabe set number of the graph
         Parameters:
             None
         Returns:
             stable: the max size of the stable set (int)
                     None if no stable set
     '''
     complement = nx.complement(self._g)
     return len(list(nx.find_cliques(complement)))
Beispiel #51
0
def complement(g):
    '''
    complement
    a function which takes the complement of g
    Parameters:
        g: the graph (networkx)
    Returns:
        co_g: the complement graph (networkx)
    Note:
        does not have a unittest since not needed (written by someone else)
    '''
    return nx.complement(g)
    def modify_moral_graph(self, modification):
        L = []

        for case in switch(modification.type):
            if case(Modification.ADD_NODE):
                self._graph_m.add_node(modification.data)
                break
            if case(Modification.REMOVE_NODE):
                self._graph_m.remove_node(modification.data)
                break
            if case(Modification.ADD_LINK):
                pair = set(modification.data)
                parents = set(self._old_network.predecessors(modification.data[1]))
                nodes = pair.union(parents)
                subgraph = self._graph_m.subgraph(nodes)
                complement = nx.complement(subgraph)
                for edge in complement.edges_iter():
                    L.append(edge)
                break
            if case(Modification.REMOVE_LINK):
                head = modification.data[1]
                tail = modification.data[0]

                children_head = set(self._old_network.successors(head))
                children_tail = set(self._old_network.successors(tail))

                if len(children_tail.intersection(children_head)) <= 0:
                    self._graph_m.remove_edge(modification.data)
                    L.append(modification.data)

                for parent in self._old_network.predecessors_iter(head):
                    if parent == tail:
                        continue
                    children_z_i = set(self._old_network.successors(parent)).intersection(children_tail)

                    if not len(children_z_i) == 1:
                        continue
                    if head not in children_z_i:
                        continue
                    if not self._old_network.has_edge(parent, tail):
                        continue
                    if self._old_network.has_edge(tail, parent):
                        continue

                    self._graph_m.remove_edge(tail, parent)
                    L.append((tail, parent))
                break
            if case():
                raise Exception("Not a defined modification")

        return L
Beispiel #53
0
 def find_claw(self):
     '''
     a method that finds a claw in G
     Parameters:
         None
     Returns:
         claw: the list of nodes forming the claw
              None if there is no claw
     '''
     temp = self._g.copy()
     self._g = nx.complement(self._g)
     claw = self.find_co_claw()
     self._g = temp.copy()
     return claw
Beispiel #54
0
    def _get_unique_sets(self, C, n=6):
        G = nx.Graph()
        edge_list = []
        for i in range(len(C)):
            G.add_node(i)
            for j in range(i+1, len(C)):
                if np.any(C[i] - C[j] == 0):
                    edge_list.append((i,j))
        G.add_edges_from(edge_list)
        H = nx.complement(G)
        cliques = nx.find_cliques(H)
#        cliques = list(cliques)
#        if len(cliques) > 0:
#            max_c = max([len(l) for l in cliques])
#            print('clique:', max_c)
#        return [C[l] for l in cliques if len(l) == n-1]
        return [C[l] for l in cliques if len(l) == n]
Beispiel #55
0
def decomp(G, Gres, rootLabel):
    # list of modules from complement graph
    modules = nx.connected_component_subgraphs(nx.complement(G))
    if len(modules) == 1:
        if len(modules[0]) == 1:
            # return leaf node
            return modules[0].nodes()[0]
        else:
            raise nx.NetworkXUnfeasible(
                "input graph is not a valid cograph and corresponding cotree cannot be computed")
    else:
        # add internal node and connect all trees above as children
        root = nx.utils.generate_unique_node()
        Gres.add_node(root, label=rootLabel)
        Gres.add_edges_from([(root, decomp(module, Gres, 1 - rootLabel)) for module in modules])
        # return new internal root node
        return root
Beispiel #56
0
def max_clique(graph):
    r"""Find the Maximum Clique

    Finds the `O(|V|/(log|V|)^2)` apx of maximum clique/independent set
    in the worst case.

    Parameters
    ----------
    nxgraph : NetworkX nxgraph
        Undirected nxgraph

    Returns
    -------
    clique : set
        The apx-maximum clique of the nxgraph

    Notes
    ------
    A clique in an undirected nxgraph G = (V, E) is a subset of the vertex set
    `C \subseteq V`, such that for every two vertices in C, there exists an edge
    connecting the two. This is equivalent to saying that the subgraph
    induced by C is complete (in some cases, the term clique may also refer
    to the subgraph).

    A maximum clique is a clique of the largest possible size in a given nxgraph.
    The clique number `\omega(G)` of a nxgraph G is the number of
    vertices in a maximum clique in G. The intersection number of
    G is the smallest number of cliques that together cover all edges of G.

    http://en.wikipedia.org/wiki/Maximum_clique

    References
    ----------
    .. [1] Boppana, R., & Halldórsson, M. M. (1992).
        Approximating maximum independent sets by excluding subgraphs.
        BIT Numerical Mathematics, 32(2), 180–196. Springer.
        doi:10.1007/BF01994876
    """
    if graph is None:
        raise ValueError("Expected NetworkX nxgraph!")

    # finding the maximum clique in a nxgraph is equivalent to finding
    # the independent set in the complementary nxgraph
    cgraph = nx.complement(graph)
    iset, _ = clique_removal(cgraph)
    return iset
 def generate_rand_graph(self):
     """method for generating weighted undirect graph with V vertices and E edges."""
     # self.G = nx.dense_gnm_random_graph(self.V,self.E)
     
     self.G.add_edge(0,1)
     for v in range(2,self.V):
         # pick random vertex
         u = random.choice(self.G.nodes())
         self.G.add_edge(u,v)
     
     print "cvorovi: ", self.G.nodes()
     
     if self.E-self.G.number_of_edges()>0:
         cG = nx.complement(self.G)        
         self.G.add_edges_from(random.sample(cG.edges(), self.E-self.G.number_of_edges()))
         
     
     if nx.is_connected(self.G) : print("Generated graph is connected.")
Beispiel #58
0
def mis(graph):
    """
    Dado um grafo calcula seu conjunto independente maximo, retornando os
    elementos esperados.

    A ideia aqui eh procurar o clique maximo no grafo complementar ao grafo
    informado.

    XXX NOTE: nao estamos preocupados com performance aqui ;)
    """
    cgraph = networkx.complement(graph)
    cliques = [ len(c) for c in networkx.find_cliques(cgraph) ]

    if len(cliques) == 0:
        resp = 1
    if len(cliques) > 0:
        resp = max(cliques)

    #import pdb;pdb.set_trace()
    return resp
Beispiel #59
0
def isCograph(G):
    """ Determines whether G is a valid cograph
    Parameters
    ----------
    G : graph
        A networkx graph

    Returns
    -------
    out : bool
        Boolean stating whether input graph is a valid cograph
    """
    if hasattr(G, 'graph') and isinstance(G.graph, dict):
        if len(G) <= 3:
            # every graph of at most size 3 are cographs
            return True
        if nx.is_connected(G):
            return isCographAux(G)
        else:
            return isCographAux(nx.complement(G))
    else:
        raise nx.NetworkXError("Input is not a correct NetworkX graph.")
Beispiel #60
0
        -------
        adj_iter : iterator
           An iterator of (node, adjacency set) for all nodes in
           the graph.

        """
        for n in self.adj:
            yield (n, set(self.adj) - set(self.adj[n]) - set([n]))


if __name__ == '__main__':
    # Build several pairs of graphs, a regular graph
    # and the AntiGraph of it's complement, which behaves
    # as if it were the original graph.
    Gnp = nx.gnp_random_graph(20,0.8)
    Anp = AntiGraph(nx.complement(Gnp))
    Gd = nx.davis_southern_women_graph()
    Ad = AntiGraph(nx.complement(Gd))
    Gk = nx.karate_club_graph()
    Ak = AntiGraph(nx.complement(Gk))
    pairs = [(Gnp, Anp), (Gd, Ad), (Gk, Ak)]
    # test connected components
    for G, A in pairs:
        gc = [set(c) for c in nx.connected_components(G)]
        ac = [set(c) for c in nx.connected_components(A)]
        for comp in ac:
            assert comp in gc
    # test biconnected components
    for G, A in pairs:
        gc = [set(c) for c in nx.biconnected_components(G)]
        ac = [set(c) for c in nx.biconnected_components(A)]