Example #1
0
 def test_directed_modularity(self):
     "Directed Modularity matrix"
     B = numpy.matrix([[-0.2,  0.6,  0.8, -0.4, -0.4, -0.4],
                       [ 0. ,  0. ,  0. ,  0. ,  0. ,  0. ],
                       [ 0.7,  0.4, -0.3, -0.6,  0.4, -0.6],
                       [-0.2, -0.4, -0.2, -0.4,  0.6,  0.6],
                       [-0.2, -0.4, -0.2,  0.6, -0.4,  0.6],
                       [-0.1, -0.2, -0.1,  0.8, -0.2, -0.2]])
     node_permutation = [5, 1, 2, 3, 4, 6]
     idx_permutation = [4, 0, 1, 2, 3, 5]
     assert_equal(nx.directed_modularity_matrix(self.DG), B)
     assert_equal(nx.directed_modularity_matrix(self.DG, nodelist=node_permutation),
                  B[numpy.ix_(idx_permutation, idx_permutation)])
Example #2
0
def modularity_spectrum(G):
    """Return eigenvalues of the modularity matrix of G.

    Parameters
    ----------
    G : Graph
       A NetworkX Graph or DiGraph

    Returns
    -------
    evals : NumPy array
      Eigenvalues

    See Also
    --------
    modularity_matrix

    References
    ----------
    .. [1] M. E. J. Newman, "Modularity and community structure in networks",
       Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006.
    """
    from scipy.linalg import eigvals
    if G.is_directed():
        return eigvals(nx.directed_modularity_matrix(G))
    else:
        return eigvals(nx.modularity_matrix(G))
def graph_analysis(A,
                   hierarchy,
                   s_i,
                   rent,
                   analyze_path_length=False,
                   analyze_small_world=False):

    print('performing graph analysis ...')

    graph_data = dict()

    print('  initializing networkx object ...')
    G = nx.from_numpy_matrix(A, create_using=nx.DiGraph())
    tot_num_nodes = G.number_of_nodes()

    if analyze_path_length == True:
        print('  calculating average shortest path length ...')
        st = time.time()
        spl = nx.average_shortest_path_length(
            G
        )  # networkx calculation of the average shortest path length. # nx.average_shortest_path_length(G[, weight])
        print('    that took {:7.2}s'.format(time.time() - st))
        graph_data['average_shortest_path_length'] = spl

    print('  analyzing node degrees ...')
    in_degree = G.in_degree()
    out_degree = G.out_degree()

    in_degree_vec = np.zeros([tot_num_nodes])
    out_degree_vec = np.zeros([tot_num_nodes])

    tot_in_degree = 0
    tot_out_degree = 0

    for ii in range(tot_num_nodes):

        in_degree_vec[ii] = in_degree[ii]
        tot_in_degree += in_degree_vec[ii]

        out_degree_vec[ii] = out_degree[ii]
        tot_out_degree += out_degree[ii]

    if tot_in_degree != tot_out_degree:
        raise ValueError(
            '[_functions_network/graph_analysis] The total in-degree of the graph does not equal the total out-degree'
        )
    avg_in_degree = tot_in_degree / tot_num_nodes
    avg_out_degree = tot_out_degree / tot_num_nodes

    standard_deviation_in_degree = np.sqrt(
        np.sum((in_degree_vec - avg_in_degree)**2) / tot_num_nodes)
    standard_deviation_out_degree = np.sqrt(
        np.sum((out_degree_vec - avg_out_degree)**2) / tot_num_nodes)

    print('  generating random graph for comparison ...')
    G__rand = nx.fast_gnp_random_graph(tot_num_nodes,
                                       tot_out_degree / (tot_num_nodes *
                                                         (tot_num_nodes - 1)),
                                       seed=None,
                                       directed=True)

    if analyze_path_length == True:
        print('  calculating random graph shortest path length ...')
        st = time.time()
        spl__rand = nx.average_shortest_path_length(G__rand)
        print('    that took {:7.2}s'.format(time.time() - st))
        graph_data['average_shortest_path_length__random_graph'] = spl__rand

    if analyze_small_world == True:
        print('  analyzing clustering ...')
        st = time.time()
        # clustering =  nx.clustering(G)
        average_clustering_coefficient = nx.average_clustering(G)
        average_clustering_coefficient__rand = nx.average_clustering(G__rand)
        print('    that took {:7.2}s for real network and random combined'.
              format(time.time() - st))
        graph_data[
            'average_clustering_coefficient'] = average_clustering_coefficient
        graph_data[
            'average_clustering_coefficient__random_graph'] = average_clustering_coefficient__rand

        print('  calculating small-world index ...')
        swi = (average_clustering_coefficient *
               spl__rand) / (average_clustering_coefficient__rand * spl)
        print('    swi = {}'.format(swi))
        graph_data['small_world_index'] = swi

    print('  performing rentian analysis ...')
    st = time.time()
    nlh = hierarchy['H__num_levels_hier']
    e_h_hp1 = np.zeros([nlh - 1])
    # print('np.shape(e_h_hp1) = {}'.format(np.shape(e_h_hp1)))
    connections_list = []

    for ii in range(tot_num_nodes):
        connections = np.where(A[ii, :])[0]
        connections_list.append(connections)

        for jj in range(len(connections)):

            h = 0
            loop_breaker = 0
            while loop_breaker == 0 and h < nlh:
                # print('connections[jj] = {}'.format(connections[jj]))
                # print('np.asarray(s_i[''intra_modular_indices''][h+1][s_i[''map_to_upper''][ii][h]]) = {}'.format(np.asarray(s_i['intra_modular_indices'][h+1][s_i['map_to_upper'][ii][h]])))
                if connections[jj] in np.asarray(s_i['intra_modular_indices'][
                        h + 1][s_i['map_to_upper'][ii][h + 1]]):
                    e_h_hp1[h] += 1
                    loop_breaker = 1
                else:
                    h += 1

    # print('np.shape(e_h_hp1) = {}'.format(np.shape(e_h_hp1)))

    #fit to power law

    num_nodes_per_module__dense = np.linspace(
        hierarchy['num_nodes_per_module'][0],
        hierarchy['num_nodes_per_module'][-2], 100)

    e_fit = np.polyfit(np.log10(hierarchy['num_nodes_per_module'][0:-1]),
                       np.log10(e_h_hp1), 1)
    rentian_prefactor = 10**(e_fit[1])
    rentian_exponent = e_fit[0]
    e_h_hp1__dense = rentian_prefactor * num_nodes_per_module__dense**rentian_exponent
    print('    that took {:7.2}s for rentian analysis'.format(time.time() -
                                                              st))
    print('    rent exponent = {:4.2f}, targeting {:4.2f}'.format(
        rentian_exponent, rent['exponent']))

    print('  calculating modularity matrix ...')
    B = nx.directed_modularity_matrix(G)

    g = ig.Graph.Adjacency(A.tolist())
    communities = ig.GraphBase.community_infomap(g,
                                                 edge_weights=None,
                                                 vertex_weights=None,
                                                 trials=10)

    # this throws an error
    # from networkx.algorithms.community import greedy_modularity_communities
    # communities = list(greedy_modularity_communities(G))

    # this doesnt works for digraphs
    # partition  = community_louvain.best_partition(G)
    # # draw the graph
    # pos = nx.spring_layout(G)
    # # color the nodes according to their partition
    # cmap = cm.get_cmap('viridis', max(partition.values()) + 1)
    # nx.draw_networkx_nodes(G, pos, partition.keys(), node_size=40, cmap=cmap, node_color=list(partition.values()))
    # nx.draw_networkx_edges(G, pos, alpha=0.5)
    # plt.show()

    graph_data['G'] = G
    graph_data['B'] = B
    graph_data['in_degree'] = in_degree_vec
    graph_data['out_degree'] = out_degree_vec
    graph_data['tot_in_degree'] = tot_in_degree
    graph_data['tot_out_degree'] = tot_out_degree
    graph_data['avg_in_degree'] = avg_in_degree
    graph_data['avg_out_degree'] = avg_out_degree
    graph_data['standard_deviation_in_degree'] = standard_deviation_in_degree
    graph_data['standard_deviation_out_degree'] = standard_deviation_out_degree
    # graph_data['clustering'] = clustering

    graph_data['communities'] = communities
    graph_data['connections'] = connections_list
    graph_data['e_h_hp1'] = e_h_hp1
    graph_data['num_nodes_per_module__dense'] = num_nodes_per_module__dense
    graph_data['e_h_hp1__dense'] = e_h_hp1__dense
    graph_data['rentian_prefactor'] = rentian_prefactor
    graph_data['rentian_exponent'] = rentian_exponent

    return graph_data


#%% scraps

# proximity_factor = 1
# A = np.zeros([num_nodes,num_nodes])
# for ii in range(num_nodes):
#     k_out_ii = node_degrees[ii].astype(int)
#     r_out_ii__vec = np.random.exponential(decay_length,k_out_ii) # exponential spatial decay
#     # print('ii = {} of {}, k_out_ii = {}, len(r_out_ii__vec) = {}'.format(ii+1,num_nodes,k_out_ii,len(r_out_ii__vec)))
#     for r_out_ii in r_out_ii__vec:
#         tracker = 0
#         candidate_nodes = np.where( np.abs( R_mat[ii,:] - r_out_ii ) <= proximity_factor  )[0]
#         # print('here0')
#         if len(candidate_nodes) > 0:
#             while tracker == 0:

#                 # print('len(candidate_nodes) = {}'.format(len(candidate_nodes)))
#                 rand_ind = np.random.randint(0,len(candidate_nodes),1)
#                 # print('candidate_nodes = {}, rand_ind[0] = {}, candidate_nodes[rand_ind[0]] = {}'.format(candidate_nodes,rand_ind[0],candidate_nodes[rand_ind[0]]))

#                 if A[ii,candidate_nodes[rand_ind[0]]] == 0:
#                     A[ii,candidate_nodes[rand_ind[0]]] = 1
#                     tracker = 1
#                     # print('here1')
#                 elif A[ii,candidate_nodes[rand_ind[0]]] == 1:
#                     candidate_nodes = np.delete(candidate_nodes,rand_ind[0])
#                     # print('here2')
#                     if len(candidate_nodes) == 0:
#                         tracker = 1

# plot_A(A)