Esempio n. 1
0
def main():
    if sys.argv < 2:
        print("Usage: python modularity.py name_of_my_adjMatrix.csv")
        exit()
    createRandomNetwork("network.csv", 20)
    graph = matrixToNxGraph(sys.argv[1])
    comm_dict = mm.partition(graph)
    print(comm_dict)
    for comm in set(comm_dict.values()):
        print("Community %d" % comm)
        print(', '.join(
            [node for node in comm_dict if comm_dict[node] == str(comm)]))
    print('Modularity of such partition for graph is %.3f' %
          mm.get_modularity(graph, comm_dict))
Esempio n. 2
0
def find_and_print_network_communities(G, code_dict=None):
    """
    Finds network communities through modularity maximization and returns dictionary of community
    members by country name with community numbers as keys.

    :param G: networkx Graph to find communities in
    :param code_dict: dictionary mapping country codes to names - if passed in, will use mappings for
                      recording community members
    :return: 1. dictionary with community numbers as keys and list of string country names as values
             2. modularity of discovered community partitions
    """

    comm_dict = partition(G)

    comm_members = {}
    for comm in set(comm_dict.values()):
        countries = [node for node in comm_dict if comm_dict[node] == comm]
        if code_dict is not None:
            countries = [code_dict[code] for code in countries]

        comm_members[comm] = countries

    return comm_members, get_modularity(G, comm_dict)
Esempio n. 3
0
def analyze_convert(gmlfile, outputfile, outputfile_format='json'):
    """
    Converts GML file to json or gexf while adding statistics and community information,
    node and edge coloring and alpha, node size and edge weight

    # see: https://cambridge-intelligence.com/keylines-faqs-social-network-analysis/
    """
    print('Starting conversion to JSON\n')
    print(outputfile_format.upper(), 'output file selected')
    print('\nReading GML file:', gmlfile)
    di_graph = nx.read_gml('../../data/processed/' + gmlfile, label='id')

    # re-assign node id as attr
    node_id_dict = {}
    for id in di_graph.node:
        node_id_dict[id] = int(id)
    nx.set_node_attributes(di_graph, name='id', values=node_id_dict)

    # find communities and assign
    print('Identifying communities...')
    comm_dict = partition(di_graph)

    print('\nModularity of such partition for network is %.3f' % \
          get_modularity(di_graph, comm_dict))

    print('\nAssigning Communities...')

    # get unique set of communities
    comm_unique_set = set()
    for n, d in di_graph.nodes(data=True):
        d['mc'] = comm_dict[n]
        comm_unique_set.add(d['mc'])

    # create colormap
    cmap = plt.get_cmap('cool')
    colors = (cmap(np.linspace(0, 1, len(comm_unique_set)))) * 255
    colors = np.round(colors, decimals=0)

    # assign colors to each community group
    color_mapping = {}
    counter = 0
    for i in list(comm_unique_set):
        color_mapping[i] = colors[counter]
        counter += 1

    # applying colors to nodes iteratively
    for n, d in di_graph.nodes(data=True):
        for group in color_mapping.keys():
            if d['mc'] == group:
                d['color'] = re.sub(
                    r'\s+', '',
                    np.array2string(color_mapping[group], separator=','))
                d['color'] = str.replace(d['color'], '255.]', '1)')
                d['color'] = str.replace(d['color'], '[', 'rgba(')
                d['color'] = str.replace(d['color'], '.', '')

    # loop through nodes and edges, if edge source == node id then color same
    for n, node_d in di_graph.nodes(data=True):
        for source, target, edge_d in di_graph.edges(data=True):
            if source == n:
                edge_d['color'] = node_d['color']
                edge_d['color'] = edge_d['color'].replace(',1)', ',0.1)')

    # set positions of nodes using layout algorithm
    print('\nCreating layout...')

    pos = nx.spring_layout(G=di_graph,
                           iterations=50,
                           weight='weight',
                           scale=5,
                           k=1)

    # positions from layout applied to node attributes
    for node, (x, y) in pos.items():
        di_graph.node[node]['x'] = float(x)
        di_graph.node[node]['y'] = float(y)

    print('\nCalculating network statistics...')

    # betweeness centrality
    bc = nx.betweenness_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='bc', values=bc)

    # degree centrality
    size = nx.degree_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='size', values=size)

    idc = nx.in_degree_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='idc', values=idc)

    odc = nx.out_degree_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='odc', values=odc)

    # eigen-vector centrality
    edc = nx.eigenvector_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='edc', values=edc)

    # closeness centrality
    cc = nx.closeness_centrality(di_graph)
    nx.set_node_attributes(di_graph, name='cc', values=cc)

    # page rank
    pr = nx.pagerank(di_graph)
    nx.set_node_attributes(di_graph, name='pr', values=pr)

    # choose which output file to write
    if outputfile_format.upper() == 'JSON':

        print('\nExporting ' + outputfile + '.json')

        # create a dictionary in a node-link format that is suitable for JSON serialization
        with open('../../data/processed/' + outputfile + '.json',
                  'w') as outfile:
            outfile.write(
                json.dumps(
                    nx.readwrite.json_graph.node_link_data(G=di_graph,
                                                           attrs={
                                                               'link': 'edges',
                                                               'name': 'id',
                                                               'source':
                                                               'source',
                                                               'target':
                                                               'target'
                                                           })))
        print('Complete!')

    elif outputfile_format.upper() == 'GEXF':
        print('\nExporting GEXF file...', outputfile, '.gexf')
        nx.write_gexf(di_graph, '../../data/processed/' + outputfile + '.gexf')
        print('\nComplete!')

    else:
        print('Please enter a valid output file format: JSON or GEXF')
import networkx as nx
from modularity_maximization import partition
from modularity_maximization.utils import get_modularity

networkfile = "Feb2018toNov2018_new.gexf"
G = nx.read_gexf(networkfile)

comm_dict = partition(G)
print get_modularity(G, comm_dict)
print comm_dict
for node in G.nodes():
    print node
nx.set_node_attributes(G, name='community', values=comm_dict)
# for comm in set(comm_dict.values()):
#     print("Community %d"%comm)
#     print(', '.join([node for node in comm_dict if comm_dict[node] == comm]))
#
#     for node in comm_dict:
#         if comm_dict[node] == comm:

nx.write_gexf(G, "Feb2018toNov2018_communities_new.gexf")
Esempio n. 5
0
#   BBall
#df = pd.read_csv('bb_net.csv')
#Graphtype = nx.Graph()
#G_fb = nx.convert_matrix.from_pandas_edgelist(df, source = 'Person 1'\
#                             ,target = 'Person 2',edge_attr = 'Weight',\
#                             create_using=Graphtype)
#durations = [i['Weight'] for i in dict(G_fb.edges).values()]
#d = dict(G_fb.degree)
#nx.draw(G_fb, nodelist=d.keys(), node_size=[v * 100 for v in d.values()])

print(nx.info(G_fb))
#%% - Sort the graph into different communities using Spectral Methods (Newman)
start_time = time.time()

partition = partition(G_fb)
print("Took {} seconds to detect communities".\
      format( (time.time() - start_time)))
#

size = float(len(set(partition.values())))
pos = nx.spring_layout(G_fb)
count = 0.

start_time = time.time()

# - Draw the graph.
for com in set(partition.values()):

    count = count + 1.
Esempio n. 6
0
 def getCommunitiesModularity(self):
     comm_dict = partition(self.WG)
     return get_modularity(self.WG, comm_dict)
from modularity_maximization import partition
from modularity_maximization.utils import get_modularity

#%% [markdown]
# #### Undirected Network: Karate

#%%
karate = nx.Graph(nx.read_pajek("data/karate.net"))


#%%
print(nx.info(karate))


#%%
comm_dict = partition(karate)


#%%
for comm in set(comm_dict.values()):
    print("Community %d"%comm)
    print(', '.join([node for node in comm_dict if comm_dict[node] == comm]))


#%%
print('Modularity of such partition for karate is %.3f' % get_modularity(karate, comm_dict))

#%% [markdown]
# #### Jazz Network

#%%