Esempio n. 1
0
def find_and_print_network_communities(G, code_dict=None):
    """
    Finds network communities through modularity maximization and returns dictionary of community
    members by country name with community numbers as keys.

    :param G: networkx Graph to find communities in
    :param code_dict: dictionary mapping country codes to names - if passed in, will use mappings for
                      recording community members
    :return: 1. dictionary with community numbers as keys and list of string country names as values
             2. modularity of discovered community partitions
    """

    comm_dict = partition(G)

    comm_members = {}
    for comm in set(comm_dict.values()):
        countries = [node for node in comm_dict if comm_dict[node] == comm]
        if code_dict is not None:
            countries = [code_dict[code] for code in countries]

        comm_members[comm] = countries

    return comm_members, get_modularity(G, comm_dict)
Esempio n. 2
0
def analyze_convert(gmlfile, outputfile, outputfile_format='json'):
    """
    Converts GML file to json or gexf while adding statistics and community information,
    node and edge coloring and alpha, node size and edge weight

    # see: https://cambridge-intelligence.com/keylines-faqs-social-network-analysis/
    """
    print('Starting conversion to JSON\n')
    print(outputfile_format.upper(), 'output file selected')
    print('\nReading GML file:', gmlfile)
    di_graph = nx.read_gml('../../data/processed/' + gmlfile, label='id')

    # re-assign node id as attr
    node_id_dict = {}
    for id in di_graph.node:
        node_id_dict[id] = int(id)
    nx.set_node_attributes(di_graph, name='id', values=node_id_dict)

    # find communities and assign
    print('Identifying communities...')
    comm_dict = partition(di_graph)

    print('\nModularity of such partition for network is %.3f' % \
          get_modularity(di_graph, comm_dict))

    print('\nAssigning Communities...')

    # get unique set of communities
    comm_unique_set = set()
    for n, d in di_graph.nodes(data=True):
        d['mc'] = comm_dict[n]
        comm_unique_set.add(d['mc'])

    # create colormap
    cmap = plt.get_cmap('cool')
    colors = (cmap(np.linspace(0, 1, len(comm_unique_set)))) * 255
    colors = np.round(colors, decimals=0)

    # assign colors to each community group
    color_mapping = {}
    counter = 0
    for i in list(comm_unique_set):
        color_mapping[i] = colors[counter]
        counter += 1

    # applying colors to nodes iteratively
    for n, d in di_graph.nodes(data=True):
        for group in color_mapping.keys():
            if d['mc'] == group:
                d['color'] = re.sub(
                    r'\s+', '',
                    np.array2string(color_mapping[group], separator=','))
                d['color'] = str.replace(d['color'], '255.]', '1)')
                d['color'] = str.replace(d['color'], '[', 'rgba(')
                d['color'] = str.replace(d['color'], '.', '')

    # loop through nodes and edges, if edge source == node id then color same
    for n, node_d in di_graph.nodes(data=True):
        for source, target, edge_d in di_graph.edges(data=True):
            if source == n:
                edge_d['color'] = node_d['color']
                edge_d['color'] = edge_d['color'].replace(',1)', ',0.1)')

    # set positions of nodes using layout algorithm
    print('\nCreating layout...')

    pos = nx.spring_layout(G=di_graph,
                           iterations=50,
                           weight='weight',
                           scale=5,
                           k=1)

    # positions from layout applied to node attributes
    for node, (x, y) in pos.items():
        di_graph.node[node]['x'] = float(x)
        di_graph.node[node]['y'] = float(y)

    print('\nCalculating network statistics...')

    # betweeness centrality
    bc = nx.betweenness_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='bc', values=bc)

    # degree centrality
    size = nx.degree_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='size', values=size)

    idc = nx.in_degree_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='idc', values=idc)

    odc = nx.out_degree_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='odc', values=odc)

    # eigen-vector centrality
    edc = nx.eigenvector_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='edc', values=edc)

    # closeness centrality
    cc = nx.closeness_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='cc', values=cc)

    # page rank
    pr = nx.pagerank(di_graph)
    nx.set_node_attributes(di_graph, name='pr', values=pr)

    # choose which output file to write
    if outputfile_format.upper() == 'JSON':

        print('\nExporting ' + outputfile + '.json')

        # create a dictionary in a node-link format that is suitable for JSON serialization
        with open('../../data/processed/' + outputfile + '.json',
                  'w') as outfile:
            outfile.write(
                json.dumps(
                    nx.readwrite.json_graph.node_link_data(G=di_graph,
                                                           attrs={
                                                               'link': 'edges',
                                                               'name': 'id',
                                                               'source':
                                                               'source',
                                                               'target':
                                                               'target'
                                                           })))
        print('Complete!')

    elif outputfile_format.upper() == 'GEXF':
        print('\nExporting GEXF file...', outputfile, '.gexf')
        nx.write_gexf(di_graph, '../../data/processed/' + outputfile + '.gexf')
        print('\nComplete!')

    else:
        print('Please enter a valid output file format: JSON or GEXF')
import networkx as nx
from modularity_maximization import partition
from modularity_maximization.utils import get_modularity

networkfile = "Feb2018toNov2018_new.gexf"
G = nx.read_gexf(networkfile)

comm_dict = partition(G)
print get_modularity(G, comm_dict)
print comm_dict
for node in G.nodes():
    print node
nx.set_node_attributes(G, name='community', values=comm_dict)
# for comm in set(comm_dict.values()):
#     print("Community %d"%comm)
#     print(', '.join([node for node in comm_dict if comm_dict[node] == comm]))
#
#     for node in comm_dict:
#         if comm_dict[node] == comm:

nx.write_gexf(G, "Feb2018toNov2018_communities_new.gexf")
Esempio n. 4
0
        unpacked_community = unpack(community)
        label_propaged_graph = [*unpacked_community, ]
    return label_propaged_graph, save_for_drawings

#These two functions must be used with a random_generated graph, for a read graph, use convert function




graph_of_communities_read, drawings = convert(H_read)

#graph_of_communities_random, drawings = generator_to_graph(H_random)



error_read_label_propagation = get_modularity(G_read,graph_of_communities_read)

#error_random = get_modularity(G_read,graph_of_communities_random)



print("Modularity = ", error_read_label_propagation)

#print("Modularity = ", error_random)


#Drawing
pos = nx.spring_layout(G_read)
cycol = cycle('bgrcmk')
plt.figure(1)
for community in drawings:
Esempio n. 5
0
 def getCommunitiesModularity(self):
     comm_dict = partition(self.WG)
     return get_modularity(self.WG, comm_dict)
#%%
print(nx.info(karate))


#%%
comm_dict = partition(karate)


#%%
for comm in set(comm_dict.values()):
    print("Community %d"%comm)
    print(', '.join([node for node in comm_dict if comm_dict[node] == comm]))


#%%
print('Modularity of such partition for karate is %.3f' % get_modularity(karate, comm_dict))

#%% [markdown]
# #### Jazz Network

#%%
jazz = nx.Graph(nx.read_pajek("data/jazz.net"))


#%%
print(nx.info(jazz))


#%%
comm_dict = partition(jazz)
Esempio n. 7
0
cmd[2] = path_to_louvain_executable
cmd[3] = relative_path_to_file
cmd[4] = relative_path_to_output_file

call(cmd)

G_louvain = nx.read_edgelist(absolute_path_to_output_file)

H_louvain = lp.label_propagation_communities(
    G_louvain)  #Take a neglectable amount of time  \
#since G has already been processed by louvain.c

graph_of_communities_louvain, drawings = convert(H_louvain)

error_louvain = get_modularity(G_louvain, graph_of_communities_louvain)

print("Total error of Louvain algorithm on random graph = ", error_louvain)

#Drawing
pos = nx.spring_layout(G_louvain)
cycol = cycle('bgrcmk')
plt.figure(1)
for community in drawings:
    nx.draw_networkx_nodes(G_louvain,
                           pos,
                           nodelist=community,
                           node_color=next(cycol),
                           node_size=1)

plt.figure(2)
Esempio n. 8
0
# -*- coding: utf-8 -*-
'''
https://zhiyzuo.github.io/python-modularity-maximization/doc/quick-start.html
'''

import networkx as nx
from modularity_maximization import partition
from modularity_maximization.utils import get_modularity

G = nx.karate_club_graph()
nx.clustering(G)
# 使用最大模块度的方法划分社区
comm_dict = partition(G)
# for comm in set(comm_dict.values()):
#     print "Community %d" % comm
#     print ", ".join([str(node) for node in comm_dict if comm_dict[node] == comm])
print comm_dict
print "Modularity of such partition for kerate is %.3f " % get_modularity(
    G, comm_dict)