Example #1
0
def features_matrix(graph, anchors, use_dist=True, use_pgrs=True,
                    use_pgr=True, use_comm=False, use_comm_centr=False):
    node_feats = []
    n = len(graph)
    if use_dist:
        dists = nx.all_pairs_shortest_path_length(graph)
    if use_pgr:
        pageranks = nx.pagerank_numpy(graph)
    if use_pgrs:
        pgr_anchor = [anchored_pagerank(graph, anchor) for anchor in anchors]
    if use_comm_centr:
        communicability_centrality = nx.communicability_centrality(graph)
    if use_comm:
        communicability = nx.communicability(graph)

    for node in graph.nodes():
        assert node == len(node_feats)
        feats = []
        if use_dist:
            feats += [dists[node][anchor] for anchor in anchors]
        if use_pgrs:
            feats += [pgr[node]*n for pgr in pgr_anchor]
        if use_pgr:
            feats.append(pageranks[node]*n)
        if use_comm_centr:
            feats.append(communicability_centrality[node])
        if use_comm:
            feats += [communicability[node][anchor] for anchor in anchors]


        node_feats.append(np.array(feats))
    return node_feats
Example #2
0
def features_dict(graph, anchors, use_dist=True, use_pgrs=True,
                    use_pgr=True, use_comm=False, use_comm_centr=False):
    node_feats = {}
    n = len(graph)
    if use_dist:
        # dists = nx.all_pairs_shortest_path_length(graph)
        dists = dists_to_anchors(graph, anchors)
    if use_pgr:
        pageranks = nx.pagerank_numpy(graph)
    if use_pgrs:
        # pgr_anchor = [anchored_pagerank(graph, anchor) for anchor in anchors]
        pgr_anchor = pageranks_to_anchors(graph, anchors)
    if use_comm_centr:
        communicability_centrality = nx.communicability_centrality(graph)
    if use_comm:
        communicability = nx.communicability(graph)

    for node in graph.nodes():
        feats = []
        if use_dist:
            feats += [dists[node][anchor] for anchor in anchors]
        if use_pgrs:
            feats += [pgr_anchor[anchor][node]*n
                      for anchor in range(len(anchors))]
            # feats += [pgr[node]*n for pgr in pgr_anchor]
        if use_pgr:
            feats.append(pageranks[node]*n)
        if use_comm_centr:
            feats.append(communicability_centrality[node])
        if use_comm:
            feats += [communicability[node][anchor] for anchor in anchors]


        node_feats[node] = np.array(feats)
    return node_feats
def edge_centrality(net):
    values ={}
    
    bet = nx.edge_betweenness(net,normalized= True)
    flow = nx.edge_current_flow_betweenness_centrality(net,normalized= True)
    load = nx.edge_load(net)
    com = nx.communicability(net)
    bet_list =[]
    flow_list = []
    load_list = []
    com_list = []
    for edge,value in bet.iteritems() :
        origin,end = edge
        value_flow = max(flow.get(edge),flow.get((end,origin)))
        values[edge] = [value,value_flow,load.get(edge),com.get(origin).get(end)]
        bet_list.append(value)
        flow_list.append(value_flow)
        load_list.append(load.get(edge))
        com_list.append(com.get(origin).get(end))
    file3 = open("bl.csv",'w')
    for xt in [bet_list,load_list,flow_list,com_list] :
        for yt in [bet_list,load_list,flow_list,com_list] :
            corr(xt,yt,file3)
        print
        file3.write("\n")
    file3.close()
    return values
def edge_centrality(net):
    values = {}

    bet = nx.edge_betweenness(net, normalized=True)
    flow = nx.edge_current_flow_betweenness_centrality(net, normalized=True)
    load = nx.edge_load(net)
    com = nx.communicability(net)
    bet_list = []
    flow_list = []
    load_list = []
    com_list = []
    for edge, value in bet.iteritems():
        origin, end = edge
        value_flow = max(flow.get(edge), flow.get((end, origin)))
        values[edge] = [
            value, value_flow,
            load.get(edge),
            com.get(origin).get(end)
        ]
        bet_list.append(value)
        flow_list.append(value_flow)
        load_list.append(load.get(edge))
        com_list.append(com.get(origin).get(end))
    file3 = open("bl.csv", 'w')
    for xt in [bet_list, load_list, flow_list, com_list]:
        for yt in [bet_list, load_list, flow_list, com_list]:
            corr(xt, yt, file3)
        print
        file3.write("\n")
    file3.close()
    return values
    def metrics(self):
        degree_centrality = nx.degree_centrality(self.g)
        closeness_centrality = nx.closeness_centrality(self.g, distance=self.weight)
        communicability = nx.communicability(self.g)
        load_centrality = nx.load_centrality(self.g, weight=self.weight)
        nodes_pagerank = nx.pagerank(self.g, weight=self.weight)

        return degree_centrality, closeness_centrality, communicability, load_centrality, nodes_pagerank
def process_data(denom=100000, round=0):
	f = csv.reader(open("../applab_new_6.csv", 'rb'), delimiter=',')
	db = nx.DiGraph()
	full_users = set()
	i = 0
	uniquect = 0
	for line in f:
		if i % 100000 == 0 : print "processed", i, "lines"
		if i == 1000: break
		sender, receiver, date, time, duration, cost, location, region = map(lambda x: x.strip(), line)
		if sender not in full_users:
			uniquect += 1
			full_users.add(sender)
			if uniquect <= 2: #% denom - round == 0:
				db.add_node(sender)
				if db.has_node(receiver) == False:
					db.add_node(receiver)
		else:
			if db.has_node(receiver) == False:
				db.add_node(receiver)

		if db.has_edge(sender, receiver):
			db[sender][receiver]['weight'] += int(duration)
		else:
			db.add_edge(sender, receiver, weight=int(duration))
		i+=1
	#pickle.dump(db, open("users_networkx.p" % str(round), "wb"))
	#print "degree assortativity coeff:", nx.degree_assortativity_coefficient(db)
	#print "average degree connectivity:", nx.average_degree_connectivity(db)
	#	print "k nearest neighbors:", nx.k_nearest_neighbors(db)
	print "calculating deg cent"
	deg_cent = nx.degree_centrality(db) #sorted(nx.degree_centrality(db).items(), key=lambda x: x[1])
	print "calculating in deg cent"
	in_deg_cent = nx.in_degree_centrality(db) #sorted(nx.in_degree_centrality(db).items(), key=lambda x: x[1])
	print "calculating out deg cent"
	out_deg_cent = nx.out_degree_centrality(db) #sorted(nx.out_degree_centrality(db).items(), key=lambda x: x[1])
	print "closeness cent"
	closeness_cent = nx.closeness_centrality(db) #sorted(nx.closeness_centrality(db).items(), key=lambda x: x[1])
	#print "betweenness cent"
	#btwn_cent = nx.betweenness_centrality(db) #sorted(nx.betweenness_centrality(db).items(), key=lambda x: x[1])
	print "done"
	w = open("../output/user_network_stats.csv", 'w')
	w.write("uid,deg_cent,in_deg_cent,out_deg_cent,closeness_cent,btwn_cent\n")
	for user in deg_cent.keys():
		try:
			w.write("%s,%s,%s,%s,%s\n" % (user, deg_cent[user], in_deg_cent[user], out_deg_cent[user], closeness_cent[user]))
		except: pass
	w.close()
	print "drawing..."
	nx.draw(db)
	plt.savefig("path.pdf")
	print "done!"
	print "edge betweenness centrality:", nx.edge_betweenness_centrality(db)
	print "communicability:", nx.communicability(db)
	print "communicability centrality:", nx.communicability_centrality(db)
    def compute_metric(self, metric='dijkstra'):

        if metric == 'dijkstra':
            print("computing the distance matrix on the graph")
            self.distance = dict(nx.all_pairs_dijkstra_path_length(self.G))
        else:
            print("computing communicability..")
            self.distance = nx.communicability(self.G)
            print("done!")

        return 1
Example #8
0
def getSubGraphComm(network, this_TimeStamp = 0):
    onGraph = getOnGraph(network, this_TimeStamp, False)
    components = [i for i in nx.connected_components(onGraph)]
    giant_component = components[np.argmax([len(i) for i in nx.connected_components(onGraph)])]
    nodes = list(giant_component)
    commMat = np.zeros((network.numOfWires, network.numOfWires))
    subComm = nx.communicability(onGraph.subgraph(giant_component))
    for i in nodes:
        for j in nodes:
            commMat[i,j] = subComm[i][j]
    return commMat
Example #9
0
def GetCommunicability(G, csvfile):
    with open(csvfile, 'w') as file:
        data = collections.OrderedDict()
        nodes = sorted(nx.nodes(G))
        comm = nx.communicability(G)
        # Write Header
        for n in nodes:
            file.write(',' + str(n))
        file.write('\n')
        # Create and Write Body
        for n in nodes:
            s = str(n)
            for k in nodes:
                s += ','
                if n != k:
                    s += str(comm[n][k])
            file.write(s + '\n')
Example #10
0
def set_capacities_edge_communicability(topology, capacities,
                                        capacity_unit='Mbps'):
    """
    Set link capacities proportionally to edge communicability centrality of
    the link.

    Parameters
    ----------
    topology : Topology
        The topology to which link capacities will be set
    capacities : list
        A list of all possible capacity values
    capacity_unit : str, optional
        The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..)
    """
    communicability = nx.communicability(topology)
    centrality = {(u, v): communicability[u][v]
                  for (u, v) in topology.edges()}
    _set_capacities_proportionally(topology, capacities, centrality,
                                   capacity_unit=capacity_unit)
Example #11
0
def set_capacities_edge_communicability(topology, capacities,
                                        capacity_unit='Mbps'):
    """
    Set link capacities proportionally to edge communicability centrality of
    the link.

    Parameters
    ----------
    topology : Topology
        The topology to which link capacities will be set
    capacities : list
        A list of all possible capacity values
    capacity_unit : str, optional
        The unit in which capacity value is expressed (e.g. Mbps, Gbps etc..)
    """
    communicability = nx.communicability(topology)
    centrality = {(u, v): communicability[u][v]
                  for (u, v) in topology.edges_iter()}
    _set_capacities_proportionally(topology, capacities, centrality,
                                   capacity_unit=capacity_unit)
Example #12
0
def method1(df):
    df['authors_parsed'] = df['authors_parsed'].apply(
        lambda x: ast.literal_eval(x)[:3])  # 读取的str转list
    all_authors = sum(df['authors_parsed'], [])
    # 拼接所有的作者
    authors_names = [' '.join(x[:-1]) for x in all_authors]
    authors_names = pd.DataFrame(authors_names)
    top10_names = authors_names[0].value_counts().index.values[:10].tolist()
    print(top10_names)
    G = nx.Graph()
    n = 0
    for i in range(len(df)):
        authors = df.iloc[i]['authors_parsed']
        authors = [' '.join(x[:-1]) for x in authors]
        # print(authors)
        if list(set(authors) & set(top10_names)):
            n += 1
            for author in authors[1:]:
                G.add_edge(author[0], author)
    print(n)
    # 计算论文关系中有多少个联通子图
    print(len(nx.communicability(G)))
    nx.draw(G, with_labels=True)

    degree_sequence = sorted([d for n, d in G.degree()], reverse=True)
    dmax = max(degree_sequence)
    plt.loglog(degree_sequence, "b-", marker="o")
    plt.title("Degree rank plot")
    plt.ylabel("degree")
    plt.xlabel("rank")
    # draw graph in inset
    plt.axes([0.45, 0.45, 0.45, 0.45])
    Gcc = G.subgraph(
        sorted(nx.connected_components(G), key=len, reverse=True)[0])

    pos = nx.spring_layout(Gcc)
    plt.axis("off")
    nx.draw_networkx_nodes(Gcc, pos, node_size=20)
    nx.draw_networkx_edges(Gcc, pos, alpha=0.4)
    plt.show()
Example #13
0
import numpy as np
import matplotlib.pyplot as plt


G = nx.Graph()
G.add_edges_from([(1, 2), (1, 3), (2, 3), (1, 4), (4, 5)])
G2 = nx.generators.random_graphs.fast_gnp_random_graph(10, .3)
# nx.adjacency_matrix(G)
L = nx.laplacian(G)  # L=D-A
# np.linalg.eigvals(L)
np.linalg.eig(L)
res = nx.laplacian_spectrum(G)
print res

print nx.normalized_laplacian(G)
c = nx.communicability(G)

# drawing
nx.draw(G)  # default using spring_layout: force-directed
# same as:
# pos = nx.spring_layout(G)
# nx.draw(G, pos)
nx.draw_random(G)
nx.draw_spectral(G)
plt.show()
plt.savefig('path.png')
plt.cla()

# random graph
G = nx.generators.random_graphs.random_regular_graph(6, 50)
plt.show()
Example #14
0
def get_comm(G):
    GU = G.to_undirected()
    return nx.communicability(GU)
Example #15
0
def getCommMat(network):
    adjMat = network.connectivity.adj_matrix
    G = nx.from_numpy_array(adjMat)
    comm = nx.communicability(G)
    commMat = np.array([comm[i][j] for i in range(len(G)) for j in range(len(G))]).reshape(len(G),len(G))
    return commMat
print(tree.edges(data=True))

# 最短路径
G = nx.path_graph(5)  # 0-1-2-3-4链
print(nx.dijkstra_path(G, 0, 4))

# 所有节点之间的最短路径
G = nx.Graph()
G.add_weighted_edges_from(g_data)
gen = nx.all_pairs_shortest_path(G)
print(dict(gen))

# 各点之间可达性
G = nx.Graph()
G.add_weighted_edges_from(g_data)
print(nx.communicability(G))

# 获得图中非连通点的列表
G = nx.Graph()
G.add_edge(1, 2)
G.add_node(3)
print(list(nx.isolates(G)))

# 遍历
G = nx.Graph()
G.add_weighted_edges_from(g_data)
d_gen = nx.dfs_edges(G, 1)  #  按边深度搜索, 1为起点
b_gen = nx.bfs_edges(G, 1)
print(list(d_gen), list(b_gen))
print(nx.dfs_tree(G, 1).nodes())  # 按点深搜
Example #17
0
    def centrality(self):
        result = {}
        result['degree_centrality'] = nx.degree_centrality(self.graph)

        if self.directed == 'directed':
            result['in_degree_centrality'] = nx.in_degree_centrality(
                self.graph)
            result['out_degree_centrality'] = nx.out_degree_centrality(
                self.graph)

        result['closeness_centrality'] = nx.closeness_centrality(self.graph)
        result['betweenness_centrality'] = nx.betweenness_centrality(
            self.graph)

        # fix the tuple cant decode into json problem
        stringify_temp = {}
        temp = nx.edge_betweenness_centrality(self.graph)
        for key in temp.keys():
            stringify_temp[str(key)] = temp[key]
        result['edge_betweenness_centrality'] = stringify_temp

        if self.directed == 'undirected':
            result[
                'current_flow_closeness_centrality'] = nx.current_flow_closeness_centrality(
                    self.graph)
            result[
                'current_flow_betweenness_centrality'] = nx.current_flow_betweenness_centrality(
                    self.graph)

            stringify_temp = {}
            temp = nx.edge_current_flow_betweenness_centrality(self.graph)
            for key in temp.keys():
                stringify_temp[str(key)] = temp[key]
            result['edge_current_flow_betweenness_centrality'] = stringify_temp

            result[
                'approximate_current_flow_betweenness_centrality'] = nx.approximate_current_flow_betweenness_centrality(
                    self.graph)
            result['eigenvector_centrality'] = nx.eigenvector_centrality(
                self.graph)
            result[
                'eigenvector_centrality_numpy'] = nx.eigenvector_centrality_numpy(
                    self.graph)
            result['katz_centrality'] = nx.katz_centrality(self.graph)
            result['katz_centrality_numpy'] = nx.katz_centrality_numpy(
                self.graph)
            result['communicability'] = nx.communicability(self.graph)
            result['communicability_exp'] = nx.communicability_exp(self.graph)
            result[
                'communicability_centrality'] = nx.communicability_centrality(
                    self.graph)
            result[
                'communicability_centrality_exp'] = nx.communicability_centrality_exp(
                    self.graph)
            result[
                'communicability_betweenness_centrality'] = nx.communicability_betweenness_centrality(
                    self.graph)
            result['estrada_index'] = nx.estrada_index(self.graph)

        result['load_centrality'] = nx.load_centrality(self.graph)

        stringify_temp = {}
        temp = nx.edge_load(self.graph)
        for key in temp.keys():
            stringify_temp[str(key)] = temp[key]
        result['edge_load'] = stringify_temp
        result['dispersion'] = nx.dispersion(self.graph)

        fname_centra = self.DIR + '/centrality.json'
        with open(fname_centra, "w") as f:
            json.dump(result, f, cls=SetEncoder, indent=2)
        print(fname_centra)
Example #18
0
def function(input):

    if input == 1:
        clustering_coefficient = nx.clustering(G)
        clustering_coefficient = normalise(clustering_coefficient)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            clustering_coefficient.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            clustering_coefficient, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_clustering_coefficient.txt")
        write_to_file(dev_data, baseDir + "dev_clustering_coefficient.txt")
        write_to_file(test_data, baseDir + "test_clustering_coefficient.txt")

    if input == 2:
        betweenness_centrality = nx.betweenness_centrality(G, normalized=True)
        betweenness_centrality = normalise(betweenness_centrality)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            betweenness_centrality.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            betweenness_centrality, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_betweenness_centrality.txt")
        write_to_file(dev_data, baseDir + "dev_betweenness_centrality.txt")
        write_to_file(test_data, baseDir + "test_betweenness_centrality.txt")

    if input == 3:
        closeness_centrality = nx.closeness_centrality(G, normalized=True)
        closeness_centrality = normalise(closeness_centrality)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            closeness_centrality.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            closeness_centrality, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_closeness_centrality.txt")
        write_to_file(dev_data, baseDir + "dev_closeness_centrality.txt")
        write_to_file(test_data, baseDir + "test_closeness_centrality.txt")

    if input == 4:
        average_neighbor_degree = nx.average_neighbor_degree(G)
        average_neighbor_degree = normalise(average_neighbor_degree)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            average_neighbor_degree.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            average_neighbor_degree, train_keys, dev_keys, test_keys)
        write_to_file(train_data,
                      baseDir + "train_average_neighbor_degree.txt")
        write_to_file(dev_data, baseDir + "dev_average_neighbor_degree.txt")
        write_to_file(test_data, baseDir + "test_average_neighbor_degree.txt")

    if input == 5:
        degree_centrality = nx.degree_centrality(G)
        degree_centrality = normalise(degree_centrality)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            degree_centrality.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            degree_centrality, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_degree_centrality.txt")
        write_to_file(dev_data, baseDir + "dev_degree_centrality.txt")
        write_to_file(test_data, baseDir + "test_degree_centrality.txt")

    if input == 6:
        load_centrality = nx.load_centrality(G, normalized=True)
        load_centrality = normalise(load_centrality)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            load_centrality.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            load_centrality, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_load_centrality.txt")
        write_to_file(dev_data, baseDir + "dev_load_centrality.txt")
        write_to_file(test_data, baseDir + "test_load_centrality.txt")

    if input == 7:
        shortest_path_length_dict = nx.shortest_path_length(G)
        shortest_path_length = {}
        for key_1 in shortest_path_length_dict:
            for key_2 in shortest_path_length_dict[key_1]:
                shortest_path_length[
                    str(key_1) + "\t" +
                    str(key_2)] = shortest_path_length_dict[key_1][key_2]
        shortest_patth_length = normalise(shortest_path_length)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            shortest_path_length.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            shortest_path_length, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_shortest_path_length.txt")
        write_to_file(dev_data, baseDir + "dev_shortest_path_length.txt")
        write_to_file(test_data, baseDir + "test_shortest_path_length.txt")

    if input == 8:
        jaccard_coefficient = nx.jaccard_coefficient(G)
        jaccard_coefficient_dict = {}
        for u, v, p in jaccard_coefficient:
            jaccard_coefficient_dict[str(u) + "\t" + str(v)] = p
        jaccard_coefficient_dict = normalise(jaccard_coefficient_dict)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            jaccard_coefficient_dict.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            jaccard_coefficient_dict, train_keys, dev_keys, test_keys)
        write_to_file(train_data,
                      baseDir + "train_jaccard_coefficient_dict.txt")
        write_to_file(dev_data, baseDir + "dev_jaccard_coefficient_dict.txt")
        write_to_file(test_data, baseDir + "test_jaccard_coefficient_dict.txt")

    if input == 9:
        katz_centrality = nx.katz_centrality(G)
        katz_centrality = normalise(katz_centrality)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            katz_centrality.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            katz_centrality, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_katz_centrality.txt")
        write_to_file(dev_data, baseDir + "dev_katz_centrality.txt")
        write_to_file(test_data, baseDir + "test_katz_centrality.txt")

    if input == 10:
        pagerank = nx.pagerank(G)
        pagerank = normalise(pagerank)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            pagerank.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            pagerank, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_pagerank.txt")
        write_to_file(dev_data, baseDir + "dev_pagerank.txt")
        write_to_file(test_data, baseDir + "test_pagerank.txt")

    if input == 11:
        communicability = nx.communicability(G)
        communicability = normalise(pagerank)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            communicability.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            communicability, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_communicability.txt")
        write_to_file(dev_data, baseDir + "dev_communicability.txt")
        write_to_file(test_data, baseDir + "test_communicability.txt")

    if input == 12:
        degree = G.degree()
        degree = normalise(degree)
        train_keys, dev_keys, test_keys = create_train_test_dev_split(
            degree.keys())
        train_data, dev_data, test_data = write_train_test_dev(
            degree, train_keys, dev_keys, test_keys)
        write_to_file(train_data, baseDir + "train_degree.txt")
        write_to_file(dev_data, baseDir + "dev_degree.txt")
        write_to_file(test_data, baseDir + "test_degree.txt")
def features_part2(info):
    """
    third set of features.
    """
    G = info['G']
    n = info['num_nodes']
    num_units = info['num_units']
    edges = info['edges']
    nedges = len(edges)

    H = G.to_undirected()

    res = dict()
    cc = nx.closeness_centrality(G)
    res['closeness_centrality'] = cc[n - 1]
    res['closeness_centrality_mean'] = np.mean(list(cc.values()))

    bc = nx.betweenness_centrality(G)
    res['betweenness_centrality_mean'] = np.mean(list(bc.values()))

    cfcc = nx.current_flow_closeness_centrality(H)
    res['current_flow_closeness_centrality_mean'] = np.mean(list(
        cfcc.values()))

    cfbc = nx.current_flow_betweenness_centrality(H)
    res['current_flow_betweenness_centrality_mean'] = np.mean(
        list(cfbc.values()))

    soc = nx.second_order_centrality(H)
    res['second_order_centrality_mean'] = np.mean(list(soc.values())) / n

    cbc = nx.communicability_betweenness_centrality(H)
    res['communicability_betweenness_centrality_mean'] = np.mean(
        list(cbc.values()))

    comm = nx.communicability(H)
    res['communicability'] = np.log(comm[0][n - 1])
    res['communicability_start_mean'] = np.log(np.mean(list(comm[0].values())))
    res['communicability_end_mean'] = np.log(
        np.mean(list(comm[n - 1].values())))

    res['radius'] = nx.radius(H)
    res['diameter'] = nx.diameter(H)
    res['local_efficiency'] = nx.local_efficiency(H)
    res['global_efficiency'] = nx.global_efficiency(H)
    res['efficiency'] = nx.efficiency(H, 0, n - 1)

    pgr = nx.pagerank_numpy(G)
    res['page_rank'] = pgr[n - 1]
    res['page_rank_mean'] = np.mean(list(pgr.values()))

    cnstr = nx.constraint(G)
    res['constraint_mean'] = np.mean(list(cnstr.values())[:-1])

    effsize = nx.effective_size(G)
    res['effective_size_mean'] = np.mean(list(effsize.values())[:-1])

    cv = np.array(list(nx.closeness_vitality(H).values()))
    cv[cv < 0] = 0
    res['closeness_vitality_mean'] = np.mean(cv) / n

    res['wiener_index'] = nx.wiener_index(H) / (n * (n - 1) / 2)

    A = nx.to_numpy_array(G)
    expA = expm(A)
    res['expA'] = np.log(expA[0, n - 1])
    res['expA_mean'] = np.log(np.mean(expA[np.triu_indices(n)]))

    return res
Example #20
0
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt

G = nx.Graph()
G.add_edges_from([(1, 2), (1, 3), (2, 3), (1, 4), (4, 5)])
G2 = nx.generators.random_graphs.fast_gnp_random_graph(10, .3)
# nx.adjacency_matrix(G)
L = nx.laplacian(G)  # L=D-A
# np.linalg.eigvals(L)
np.linalg.eig(L)
res = nx.laplacian_spectrum(G)
print res

print nx.normalized_laplacian(G)
c = nx.communicability(G)

# drawing
nx.draw(G)  # default using spring_layout: force-directed
# same as:
# pos = nx.spring_layout(G)
# nx.draw(G, pos)
nx.draw_random(G)
nx.draw_spectral(G)
plt.show()
plt.savefig('path.png')
plt.cla()

# random graph
G = nx.generators.random_graphs.random_regular_graph(6, 50)
plt.show()