Exemple #1
0
def cluster_nodes(g):
    """
    Use Clauset-Newman-Moore greedy modularity maximization to cluster nodes.
    """
    undirected_g = networkx.Graph(g)
    for i, comm in enumerate(greedy_modularity_communities(undirected_g)):
        for node in comm:
            g.nodes[node]['modularity'] = i
    return g
Exemple #2
0
def calc_graph_measures(data_matrix, thresh=0):
    from networkx import eccentricity
    from networkx.algorithms.efficiency import global_efficiency
    from networkx.algorithms.shortest_paths.generic import average_shortest_path_length
    from networkx.algorithms.centrality import betweenness_centrality
    from networkx.algorithms.cluster import average_clustering
    from networkx.algorithms.community.modularity_max import greedy_modularity_communities
    from networkx.algorithms.community.quality import performance

    def _avg_values(results):
        values = []
        if isinstance(results, dict):
            for k in results:
                values.append(results[k])
        elif isinstance(results, list):
            for tup in results:
                values.append(tup[1])

        return np.mean(values)

    below_thresh_indices = np.abs(data_matrix) < thresh
    data_matrix[below_thresh_indices] = 0
    if isinstance(data_matrix, np.ndarray):
        graph = networkx.convert_matrix.from_numpy_matrix(np.real(data_matrix))
    if isinstance(data_matrix, pd.DataFrame):
        graph = networkx.convert_matrix.from_pandas_adjacency(data_matrix)

    degree = list(graph.degree)
    global_eff = global_efficiency(graph)
    b_central = betweenness_centrality(graph)
    modularity = performance(graph, greedy_modularity_communities(graph))
    try:
        ecc = eccentricity(graph)
    except networkx.exception.NetworkXError:
        ecc = [(0, 0)]

    try:
        clust = average_clustering(graph)
    except networkx.exception.NetworkXError:
        clust = 0

    try:
        char_path = average_shortest_path_length(graph)
    except networkx.exception.NetworkXError:
        char_path = 0

    graph_dict = {'degree': _avg_values(degree),
                  'eccentricity': _avg_values(ecc),
                  'global_efficiency': global_eff,
                  'characteristic_path_length': char_path,
                  'betweenness_centrality': _avg_values(b_central),
                  'clustering_coefficient': clust,
                  'modularity': modularity}

    return graph_dict
def modularity_communities(G):
    """
    Finds communities that maximize modularity.

    Arguments:
        G (networkx.Graph): Graph for which communities will be found
    Returns:
        communities (list): List of tuples of nodes, where each tuple of nodes
            represents a community
    """
    communities = greedy_modularity_communities(G)
    return list(communities)
Exemple #4
0
def partition_calculate(G):

    from networkx.algorithms.community.centrality import girvan_newman
    from networkx.algorithms.community import k_clique_communities
    from networkx.algorithms.community.modularity_max import greedy_modularity_communities
    from networkx.algorithms.community.modularity_max import _naive_greedy_modularity_communities

    comp = list(greedy_modularity_communities(G))
    partition = dict()
    for j in range(len(comp)):
        communities = comp[j]
        for x in communities:
            partition[x] = j
    return partition
def fast_greedy_find_communities(ntx_graph):
    """
    This function detects community structures in a graph using Clauset-Newman-Moore algorithm
    :param ntx_graph: A graph created with networkx
    :return: Total number of community, a python dictionary with detected communities, modularity of the network
    """
    print(
        'Finding communities with fast-greedy (Clauset-Newman-Moore) algorithm.....',
        log_type='info')
    communities = modularity_max.greedy_modularity_communities(ntx_graph,
                                                               weight=None)

    # Return
    return communities
Exemple #6
0
def community_maker(dismat,threshold,tags=None):
    adjmat = dismat.copy()
    np.fill_diagonal(adjmat,
                     np.min(dismat))  # Set the diagonal elements to a small value so that they won't be zeroed out
    adjmat = adjmat.reshape((-1,))
    adjmat[adjmat > threshold] = 0
    # adjmat[adjmat > 0] = 1
    print("{} out of {} values set to zero".format(len(adjmat[adjmat == 0]), len(adjmat)))
    adjmat = adjmat.reshape(dismat.shape)
    #
    G = make_graph(adjmat,labels=tags)
    print(len(G.nodes))

    from networkx.algorithms.community.centrality import girvan_newman
    from networkx.algorithms.community import k_clique_communities
    from networkx.algorithms.community.modularity_max import greedy_modularity_communities
    from networkx.algorithms.community.modularity_max import _naive_greedy_modularity_communities

    comp = list(greedy_modularity_communities(G))
    print(len(comp))
    shown_count = 1
    possibilities = []
    color_map = ['white' for x in range(len(G))]
    color = 0
    partition = dict()
    for j in range(len(comp)):
        communities = comp[j]
        print("Possibility", shown_count, ": ", end='')
        for x in communities:
            partition[x] = j
        print(communities)
        print(len(communities))
        possibilities.append(communities)
        indices = [i for i, x in enumerate(G.nodes) if x in communities]
        for i in indices:
            color_map[i] = Config.colors[color]
        color += 1
    F = community.modularity(partition, G)
    print(F)
    shown_count += 1
    plt.figure(figsize=(17, 15))
    pos = nx.spring_layout(G)
    plt.rcParams['font.sans-serif'] = ['SimHei']
    plt.rcParams['axes.unicode_minus'] = False
    print(color_map)
    nx.draw(G, pos, node_color=color_map, with_labels=True)
    plt.savefig('Graph.png')
    plt.show()
Exemple #7
0
def greed_max_modularity(graph):
    communities = greedy_modularity_communities(graph)
    largest_community = communities_info(communities, graph)
    return largest_community
random_network = RandomNetwork(n, m)

part = random_network.clustering()
mod_rand = random_network.modularity(part)
print(f'\nmod_random = {mod_rand}\n', flush=True)

#%% using networkx

from networkx.algorithms.bipartite.edgelist import read_edgelist
from networkx.generators.random_graphs import erdos_renyi_graph
from networkx.algorithms.community.modularity_max import greedy_modularity_communities
from networkx.algorithms.community.quality import modularity

netx = read_edgelist(file)

partx = greedy_modularity_communities(netx)
modx_real = modularity(G=netx, communities=partx)
print(f'\nmod_real_nx = {modx_real}')

p = (2. * m) / n**2
netx_er = erdos_renyi_graph(n=n, p=p)

partx = greedy_modularity_communities(netx_er)
modx_rand = modularity(G=netx_er, communities=partx)
print(f'\nmod_random_nx = {modx_rand}')

#%% data plotting

import pandas as pd
import seaborn as sns
import pylab as plt