def com_bet(net):
    return distriCentra(
        nx.communicability_betweenness_centrality(net,
                                                  normalized=True).values(),
        nx.communicability_betweenness_centrality(star(net),
                                                  normalized=True).values(),
        'communicability_betweenness')
def centrality(net):
    values ={}
    close = nx.closeness_centrality(net, normalized= True)
    eigen = nx.eigenvector_centrality_numpy(net)
    page = nx.pagerank(net)
    bet = nx.betweenness_centrality(net,normalized= True)
    flow_c = nx.current_flow_closeness_centrality(net,normalized= True)
    flow_b = nx.current_flow_betweenness_centrality(net,normalized= True)
    load = nx.load_centrality(net, normalized = True)
    com_c = nx.communicability_centrality(net)
    com_b = nx.communicability_betweenness_centrality(net, normalized= True)
    degree = net.degree()
    
    file3 = open("bl.csv",'w')
    for xt in [bet,load,degree,page,flow_b,com_c,com_b,eigen,close,flow_c]:#[impo,bet,flow_b,load,com_c,com_b] :
        for yt in [bet,load,degree,page,flow_b,com_c,com_b,eigen,close,flow_c]:#[impo,bet,flow_b,load,com_c,com_b] :
            corr(xt.values(),yt.values(),file3)
        print
        file3.write("\n")
    file3.close()
    #plt.plot(x,y, 'o')
    #plt.plot(x, m*x + c, 'r', label='Fitted line')
    #plt.show()
    #for key,item in close.iteritems() :
        #values[key] = [impo.get(key),bet.get(key),flow_b.get(key), load.get(key),com_c.get(key),com_b.get(key)]
        
    return values
def centrality(net):
    values = {}
    close = nx.closeness_centrality(net, normalized=True)
    eigen = nx.eigenvector_centrality_numpy(net)
    page = nx.pagerank(net)
    bet = nx.betweenness_centrality(net, normalized=True)
    flow_c = nx.current_flow_closeness_centrality(net, normalized=True)
    flow_b = nx.current_flow_betweenness_centrality(net, normalized=True)
    load = nx.load_centrality(net, normalized=True)
    com_c = nx.communicability_centrality(net)
    com_b = nx.communicability_betweenness_centrality(net, normalized=True)
    degree = net.degree()

    file3 = open("bl.csv", 'w')
    for xt in [
            bet, load, degree, page, flow_b, com_c, com_b, eigen, close, flow_c
    ]:  #[impo,bet,flow_b,load,com_c,com_b] :
        for yt in [
                bet, load, degree, page, flow_b, com_c, com_b, eigen, close,
                flow_c
        ]:  #[impo,bet,flow_b,load,com_c,com_b] :
            corr(xt.values(), yt.values(), file3)
        print
        file3.write("\n")
    file3.close()
    #plt.plot(x,y, 'o')
    #plt.plot(x, m*x + c, 'r', label='Fitted line')
    #plt.show()
    #for key,item in close.iteritems() :
    #values[key] = [impo.get(key),bet.get(key),flow_b.get(key), load.get(key),com_c.get(key),com_b.get(key)]

    return values
Exemplo n.º 4
0
def roda_funcoes_grafo(nx_grafo):
    # Connectivity (em "Approximations and Heuristics")
    print('Connectivity')
    print(nx.all_pairs_node_connectivity(nx_grafo))
    # Centrality degree (em "Centrality")
    print('Centrality degree')
    print(nx.degree_centrality(nx_grafo))
    # Closeness (em "Centrality")
    print('Closeness')
    print(nx.closeness_centrality(nx_grafo, distance='weight'))
    # (Shortest Path) Betweenness (em "Centrality")
    print('Shortest path betweenness')
    print(
        nx.betweenness_centrality(nx_grafo, normalized=False, weight='weight'))
    # Communicability Betweenness
    print('Communicability Betweenness')
    print(
        nx.communicability_betweenness_centrality(nx_grafo.to_undirected(),
                                                  normalized=False))
Exemplo n.º 5
0
def plot_graph(g, genres):
    # remove the most ambiguous genre name such as "j-pop", or "rock".
    vague_genre = search_top_genre(genres)
    for node, genre in genres.items():
        if vague_genre in genre:
            genres[node].remove(vague_genre)
    # set colors by genres
    groups = []
    for group in genres.values():
        groups.append(group[0])
    print("groups:", groups)

    groupset = list(set(groups))
    print("group set:", len(groupset), groupset)

    color_combinations = {genre: "" for genre in groupset}
    colorlist = ["red", "orange", "yellow", "green", "blue", "purple", "cyan",
                 "magenta", "crimson", "indigo", "aqua", "royalblue"]
    print("color combs: before:", color_combinations)
    for i in range(0, len(groups)):
        color_combinations[groups[i]] = colorlist[i % len(colorlist)]

    print("color combs:", color_combinations)

    colormap = []
    for group in groups:
        if group in color_combinations.keys():
            colormap.append(color_combinations[group])
    print("colormap:", colormap)

    for k, v in color_combinations.items():
        plt.scatter(0, 0, c=v, label=k)
    plt.legend()

    centrality = nx.communicability_betweenness_centrality(g)
    pos = nx.spring_layout(g, iterations=200, k=0.7)
    node_size = [5000 * size for size in centrality.values()]
    nx.draw_networkx_nodes(g, pos=pos, node_size=node_size, alpha=0.5, nodelist=g.nodes,
                           node_color=colormap)
    nx.draw_networkx_labels(g, pos=pos, font_size=10, font_color='black', alpha=0.8)
    nx.draw_networkx_edges(g, pos=pos, alpha=0.5, edge_color="gray")
    return g
def com_bet(net):
    return distriCentra(nx.communicability_betweenness_centrality(net,normalized= True).values(),
                        nx.communicability_betweenness_centrality(star(net),normalized= True).values(),
                        'communicability_betweenness')
 def _calculate(self, include: set, is_regression=False):
     self._features = nx.communicability_betweenness_centrality(self._gnx)
Exemplo n.º 8
0
import networkx as nx
import os
G = nx.Graph()    #Create an empty graph with no nodes and no edges.
file = os.path.join("write your pathname")    #Load the data file
with open(file) as p:    #Try to open the data file
    next(p)    #ignore the firts row of the dataset
    for line in p:    #iterate in the dataset
        s=line.split()    #Break the dataset into different columns
        G.add_edge(s[0],s[1],weight=int(s[2]))    #Add edges and weights from the dataset        
communicability_betweenness_centrality_dic = nx.communicability_betweenness_centrality(G)    #Calculate the Communicability Betweenness Centrality of the network which will return a dictionary 
with open("Communicability_Betweenness_Centrality_Output.txt","w") as f:    #Create a text file name Communicability_Betweenness_Centrality_Output 
    f.write("\t\t\t\t\t\t\t\t\t************************************\t\t\tCommunicability Betweenness Centrality Output\t\t\t************************************"+"\n")
     #Write a header title 
    for k,v in communicability_betweenness_centrality_dic.items():   #Iterate into the dictionary
        f.write(str(k)+": "+str(v)+"\n")    #Write Dictionary keys and values in the file
Exemplo n.º 9
0
def add_communicability_betweenness_node(graf):
    print "Adding communicability betweenness to nodes"
    cmb_dict = nx.communicability_betweenness_centrality(graf)
    nx.set_node_attributes(graf, 'cmb', cmb_dict)
Exemplo n.º 10
0
def graph_summary(
    G: nx.Graph,
    summary_statistics: List[str] = [
        "degree",
        "betweenness_centrality",
        "closeness_centrality",
        "eigenvector_centrality",
        "communicability_betweenness_centrality",
    ],
    custom_data: Optional[Union[pd.DataFrame, pd.Series]] = None,
    plot: bool = False,
) -> pd.DataFrame:
    """Returns a summary of the graph in a dataframe.

    :param G: NetworkX graph to get summary of.
    :type G: nx.Graph
    :param plot: Whether or not to plot the summary as a heatmap, defaults to ``False``.
    :type plot: bool
    :return: Dataframe of summary or plot.
    :rtype: pd.DataFrame
    """
    col_list = []
    col_names = []
    if "degree" in summary_statistics:
        degrees = pd.DataFrame(nx.degree(G))
        degrees.columns = ["node", "degree"]
        degrees.index = degrees["node"]
        degrees = degrees["degree"]
        col_list.append(degrees)
        col_names.append("degree")
    if "betweenness_centrality" in summary_statistics:
        betweenness = pd.Series(nx.betweenness_centrality(G))
        col_list.append(betweenness)
        col_names.append("betweenness_centrality")
    if "closeness_centrality" in summary_statistics:
        closeness = pd.Series(nx.closeness_centrality(G))
        col_list.append(closeness)
        col_names.append("closeness_centrality")
    if "eigenvector_centrality" in summary_statistics:
        eigenvector = pd.Series(nx.eigenvector_centrality(G))
        col_list.append(eigenvector)
        col_names.append("eigenvector_centrality")
    if "communicability_betweenness_centrality" in summary_statistics:
        communicability = pd.Series(
            nx.communicability_betweenness_centrality(G))
        col_list.append(communicability)
        col_names.append("communicability_betweenness_centrality")

    df = pd.DataFrame(col_list).T
    df.columns = col_names
    df.index.name = "node"

    # Add custom data if provided
    if custom_data:
        df = pd.concat([df, custom_data], axis=1)

    if plot:
        return px.imshow(df.T)

    chain = [id.split(":")[0] for id in list(df.index)]
    residue_type = [id.split(":")[1] for id in list(df.index)]
    position = [id.split(":")[2] for id in list(df.index)]
    df["residue_type"] = residue_type
    df["position"] = position
    df["chain"] = chain

    return df
Exemplo n.º 11
0
def mergegraph(graphs, pos_old, labels_old, edge_prob=0.3, edge_num=0.4):
    nodes = []
    edges = []
    pos = {}
    node_cnt = 0
    val = 0.9
    shift_value = [[-val, val], [val, val], [-val, -val], [val, -val]]

    comm_lables = []

    for i, g in enumerate(graphs):
        tmp_nodes = list(g.nodes())
        tmp_edges = list(g.edges())

        comm_lables += [i] * len(tmp_nodes)

        node_map = {k: node_cnt + i for k, i in enumerate(tmp_nodes)}
        node_cnt += len(tmp_nodes)

        new_nodes = [node_map[n] for n in tmp_nodes]
        new_edges = [(node_map[u], node_map[v]) for u, v in tmp_edges]

        for k, v in pos_old[i].items():
            pos_old[i][k][0] += shift_value[i][0]
            pos_old[i][k][1] += shift_value[i][1]

        new_pos = {node_map[n]: v for n, v in pos_old[i].items()}

        nodes += new_nodes
        edges += new_edges
        pos.update(new_pos)

    G = nx.DiGraph()
    G.add_edges_from(edges)

    random.shuffle(nodes)
    l = int(edge_num * len(nodes))
    u = nodes[0:l]
    random.shuffle(nodes)
    v = nodes[0:l]

    for s, t in zip(u, v):
        if random.random() < edge_prob:
            G.add_edge(s, t)
            G.add_edge(t, s)
    nodes_deg = [G.degree[i] for i in G.nodes()]

    centrality = nx.closeness_centrality(G)
    labels_central = get_labels(centrality)
    print('centrality done!')

    inf_cent = nx.information_centrality(G.to_undirected())
    labels_inf_central = get_labels(inf_cent)
    print('info centrality done!')

    betweenness = nx.betweenness_centrality(G.to_undirected())
    labels_betweenness = get_labels(betweenness)
    print('betweenness done!')

    loads = nx.load_centrality(G.to_undirected())
    labels_load = get_labels(loads)
    print('load centrality done!')

    cmm_bet = nx.communicability_betweenness_centrality(G.to_undirected())
    labels_cmm_bet = get_labels(cmm_bet)
    print('commu betweenness done!')

    sce = nx.subgraph_centrality_exp(G.to_undirected())
    labels_sce = get_labels(sce)
    print('subgraph centrality done!')

    harm = nx.harmonic_centrality(G.to_undirected())
    labels_harm = get_labels(harm)
    print('harmonic done!')

    lrc = {
        v: nx.local_reaching_centrality(G.to_undirected(), v)
        for v in G.nodes()
    }
    labels_lrc = get_labels(lrc)
    print('lrc done!')

    unq_lbl = np.unique(nodes_deg)
    lbl_map = {unq_lbl[i]: i for i in range(len(unq_lbl))}
    labels = [lbl_map[k] for k in nodes_deg]
    return G, pos, labels, comm_lables, labels_central, labels_inf_central, labels_betweenness, labels_load, labels_cmm_bet, labels_sce, labels_harm, labels_lrc
Exemplo n.º 12
0
def calc_centralities(G,org_name,string_version):
    print("Calculating centralities")
    centrality_measures = {}
    string_location=f.string_version_data(string_version)[0]
    print(string_location)
    # if 1==0:
    if os.path.isfile('centrality_data/%s/%s.cent'%(string_location,org_name)):
        print("Using cached centrality data")
        file=open('centrality_data/%s/%s.cent'%(string_location,org_name))
        lines=file.readlines()
        centrality_list=lines.pop(0).strip().split(' ')
        centrality_list.pop(0)
        
        for i,centrality in enumerate(centrality_list):
            centrality_measures[centrality]={}

        for line in lines:
            value_list=line.split(' ')
            for i,centrality in enumerate(centrality_list):
                # print("%d. %s" % (i+1,centrality))
                centrality_measures[centrality][value_list[0]]=float(value_list[i+1])
    else:
        
        print("1. Degree centrality")
        centrality_measures['Degree_Centrality']=nx.degree_centrality(G)
        
        print("2. Closeness centrality")
        centrality_measures['Closeness_Centrality']=Counter(nx.algorithms.centrality.closeness_centrality(G))
        
        print("3. Betweenness centrality")
        centrality_measures['Betweenness_Centrality']=Counter(nx.algorithms.centrality.betweenness_centrality(G))
        
        print("4. Clustering coefficient")
        centrality_measures['Clustering_Co-efficient']=Counter(nx.clustering(G))
        
        print("5. Eigenvector centrality")
        centrality_measures['Eigenvector_Centrality']= nx.eigenvector_centrality(G)
        
        print("6. Subgraph centrality")
        centrality_measures["Subgraph_Centrality"]=nx.subgraph_centrality(G)
        
        print("7. Information centrality")
        centrality_measures["Information_Centrality"]=nx.current_flow_closeness_centrality(f.trim_graph(G))
        
        print("8. Clique Number")
        cliq={}
        for i in G.nodes():
           cliq[i]=nx.node_clique_number(G,i)
        centrality_measures["Clique_Number"]=cliq
        
        print("9. Edge clustering coefficient")
        edge_clus_coeff={}
        for n in G.nodes:
            edge_clus_coeff[n]=0
            for e in G.edges(n):
                num=len(list(nx.common_neighbors(G,e[0],e[1])))
                den=(min(G.degree(e[0]),G.degree(e[1]))-1)
                if den==0:
                    den=1
                edge_clus_coeff[n]+=num/den
    
        centrality_measures['Edge_Clustering_Coefficient']=edge_clus_coeff
        
        print("10. Page Rank")
        centrality_measures['Page_Rank']=nx.pagerank(G)
        
        print("11. Random Walk Betweenness Centrality")
        centrality_measures["Random_Walk_Betweenness_Centrality"]=nx.current_flow_betweenness_centrality(f.trim_graph(G))
        
        print("12. Load Centrality")
        centrality_measures["Load_Centrality"]=nx.load_centrality(G)
        
        print("13. Communicability Betweenness")
        centrality_measures["Communicability_Betweenness"]=nx.communicability_betweenness_centrality(f.trim_graph(G))
        
        print("14. Harmonic Centrality")
        centrality_measures["Harmonic_Centrality"]=nx.harmonic_centrality(G)
            
        print("15. Reaching Centrality")
        reach_cent={}
        for node in G.nodes:
            reach_cent[node] = nx.local_reaching_centrality(G,node)
        centrality_measures["Reaching_Centrality"]=reach_cent
        
        print("16. Katz Centrality(not calculated)")
    #   centrality_measures["Katz_Centrality"]=nx.katz_centrality(G)
    
        datafile=open("refex_props/%s.refex" % (org_name))
        sample_line=datafile.readline()
        s= sample_line.strip().split(' ')
        for x in range(1,len(s)):
            centrality_measures["refex#%d" % (x)]={}                
        for line in datafile:
            props=line.strip().split(" ")
            props=[i.strip('\t') for i in props]
            for x in range(1,len(s)):
                centrality_measures["refex#%d" % (x)][props[0]]=float(props[x])

        datafile=open("refex_rider_props/%s.riderproperties" % (org_name))
        sample_line=datafile.readline()
        s= sample_line.strip().split(' ')
        s.pop(1)
        print(len(s))
        for x in range(1,len(s)):
            centrality_measures["refex_rider#%d" % (x)]={}                
        
        for line in datafile:
            props=line.strip().split(" ")
            props.pop(1)
            for x in range(1,len(props)):

                centrality_measures["refex_rider#%d" % (x)][props[0]]=float(props[x])
    
     
        with open('centrality_data/%s/%s.cent'%(string_location,org_name),'w') as file:
            file.write(str(org_name)+' ')
            centrality_list=list(centrality_measures)
            for x in centrality_list:
                file.write(str(x)+' ')

            for node in G.nodes:
                file.write('\n'+node+' ')
                for x in centrality_list:
                    if node not in centrality_measures[x]:
                        file.write('-1 ')
                    else:
                        file.write(str(centrality_measures[x][node])+' ')
    return centrality_measures
Exemplo n.º 13
0
from bokeh.palettes import YlOrRd

df = pd.read_csv(
    'C:/Users/Meenu/PycharmProjects/CS590/CS590-Yelp/usernetwork1.csv')
df['distance'] = 1 / df['strength']
df_user = pd.read_csv(
    'C:/Users/Meenu/PycharmProjects/CS590/CS590-Yelp/userdetails1.csv')
del df_user['Unnamed: 0']

G = nx.from_pandas_edgelist(df, 'user1', 'user2', ['strength', 'distance'])
print(nx.number_connected_components(G))
nx.set_node_attributes(G, df_user.set_index('user_id').to_dict('index'))
nx.set_node_attributes(G, dict(G.degree(weight='strength')), 'WDegree')
nx.set_node_attributes(G, nx.betweenness_centrality(G, weight='distance'),
                       'bwcentral')
nx.set_node_attributes(G, nx.communicability_betweenness_centrality(G),
                       'ccentral')

# col = ['#FFFFFF', '#93CCB9', '#4D9980', '#24745A', '#074A34', '#002217']
col = YlOrRd[8]

for u in G.nodes():
    if G.node[u]['friend'] < 730:
        G.node[u]['friend'] = col[7]
    elif G.node[u]['friend'] < (730 * 2):
        G.node[u]['friend'] = col[6]
    elif G.node[u]['friend'] < (730 * 3):
        G.node[u]['friend'] = col[5]
    elif G.node[u]['friend'] < (730 * 4):
        G.node[u]['friend'] = col[4]
    elif G.node[u]['friend'] < (730 * 5):
Exemplo n.º 14
0
def features_part2(info):
    """
    third set of features.
    """
    G = info['G']
    n = info['num_nodes']
    num_units = info['num_units']
    edges = info['edges']
    nedges = len(edges)

    H = G.to_undirected()

    res = dict()
    cc = nx.closeness_centrality(G)
    res['closeness_centrality'] = cc[n - 1]
    res['closeness_centrality_mean'] = np.mean(list(cc.values()))

    bc = nx.betweenness_centrality(G)
    res['betweenness_centrality_mean'] = np.mean(list(bc.values()))

    cfcc = nx.current_flow_closeness_centrality(H)
    res['current_flow_closeness_centrality_mean'] = np.mean(list(
        cfcc.values()))

    cfbc = nx.current_flow_betweenness_centrality(H)
    res['current_flow_betweenness_centrality_mean'] = np.mean(
        list(cfbc.values()))

    soc = nx.second_order_centrality(H)
    res['second_order_centrality_mean'] = np.mean(list(soc.values())) / n

    cbc = nx.communicability_betweenness_centrality(H)
    res['communicability_betweenness_centrality_mean'] = np.mean(
        list(cbc.values()))

    comm = nx.communicability(H)
    res['communicability'] = np.log(comm[0][n - 1])
    res['communicability_start_mean'] = np.log(np.mean(list(comm[0].values())))
    res['communicability_end_mean'] = np.log(
        np.mean(list(comm[n - 1].values())))

    res['radius'] = nx.radius(H)
    res['diameter'] = nx.diameter(H)
    res['local_efficiency'] = nx.local_efficiency(H)
    res['global_efficiency'] = nx.global_efficiency(H)
    res['efficiency'] = nx.efficiency(H, 0, n - 1)

    pgr = nx.pagerank_numpy(G)
    res['page_rank'] = pgr[n - 1]
    res['page_rank_mean'] = np.mean(list(pgr.values()))

    cnstr = nx.constraint(G)
    res['constraint_mean'] = np.mean(list(cnstr.values())[:-1])

    effsize = nx.effective_size(G)
    res['effective_size_mean'] = np.mean(list(effsize.values())[:-1])

    cv = np.array(list(nx.closeness_vitality(H).values()))
    cv[cv < 0] = 0
    res['closeness_vitality_mean'] = np.mean(cv) / n

    res['wiener_index'] = nx.wiener_index(H) / (n * (n - 1) / 2)

    A = nx.to_numpy_array(G)
    expA = expm(A)
    res['expA'] = np.log(expA[0, n - 1])
    res['expA_mean'] = np.log(np.mean(expA[np.triu_indices(n)]))

    return res
Exemplo n.º 15
0
    def centrality(self):
        result = {}
        result['degree_centrality'] = nx.degree_centrality(self.graph)

        if self.directed == 'directed':
            result['in_degree_centrality'] = nx.in_degree_centrality(
                self.graph)
            result['out_degree_centrality'] = nx.out_degree_centrality(
                self.graph)

        result['closeness_centrality'] = nx.closeness_centrality(self.graph)
        result['betweenness_centrality'] = nx.betweenness_centrality(
            self.graph)

        # fix the tuple cant decode into json problem
        stringify_temp = {}
        temp = nx.edge_betweenness_centrality(self.graph)
        for key in temp.keys():
            stringify_temp[str(key)] = temp[key]
        result['edge_betweenness_centrality'] = stringify_temp

        if self.directed == 'undirected':
            result[
                'current_flow_closeness_centrality'] = nx.current_flow_closeness_centrality(
                    self.graph)
            result[
                'current_flow_betweenness_centrality'] = nx.current_flow_betweenness_centrality(
                    self.graph)

            stringify_temp = {}
            temp = nx.edge_current_flow_betweenness_centrality(self.graph)
            for key in temp.keys():
                stringify_temp[str(key)] = temp[key]
            result['edge_current_flow_betweenness_centrality'] = stringify_temp

            result[
                'approximate_current_flow_betweenness_centrality'] = nx.approximate_current_flow_betweenness_centrality(
                    self.graph)
            result['eigenvector_centrality'] = nx.eigenvector_centrality(
                self.graph)
            result[
                'eigenvector_centrality_numpy'] = nx.eigenvector_centrality_numpy(
                    self.graph)
            result['katz_centrality'] = nx.katz_centrality(self.graph)
            result['katz_centrality_numpy'] = nx.katz_centrality_numpy(
                self.graph)
            result['communicability'] = nx.communicability(self.graph)
            result['communicability_exp'] = nx.communicability_exp(self.graph)
            result[
                'communicability_centrality'] = nx.communicability_centrality(
                    self.graph)
            result[
                'communicability_centrality_exp'] = nx.communicability_centrality_exp(
                    self.graph)
            result[
                'communicability_betweenness_centrality'] = nx.communicability_betweenness_centrality(
                    self.graph)
            result['estrada_index'] = nx.estrada_index(self.graph)

        result['load_centrality'] = nx.load_centrality(self.graph)

        stringify_temp = {}
        temp = nx.edge_load(self.graph)
        for key in temp.keys():
            stringify_temp[str(key)] = temp[key]
        result['edge_load'] = stringify_temp
        result['dispersion'] = nx.dispersion(self.graph)

        fname_centra = self.DIR + '/centrality.json'
        with open(fname_centra, "w") as f:
            json.dump(result, f, cls=SetEncoder, indent=2)
        print(fname_centra)
Exemplo n.º 16
0
nx.draw_spectral(galexy_network,
                 labels=galexy_id2word,
                 font_family=font_name,
                 **option)
plt.subplot(224)
plt.title('Spring Layout', fontsize=20)
nx.draw_spring(galexy_network,
               labels=galexy_id2word,
               font_family=font_name,
               **option)

plt.show()

#Degree (연결중심성)
nx.degree_centrality(galexy_network)

#Eigenvector (위세 중심성)
nx.eigenvector_centrality(galexy_network, weight='weight')

#Closeness (근접 중심성)
nx.closeness_centrality(galexy_network, distance='weight')

#Current Flow Closeness (매개 중심성)
nx.current_flow_closeness_centrality(galexy_network)

#Current Flow Betweenness
nx.current_flow_betweenness_centrality(galexy_network)

#Communicability Betweenness
nx.communicability_betweenness_centrality(galexy_network)
Exemplo n.º 17
0
logging.basicConfig(level=logging.INFO,
                    format='%(module)s:%(levelname)s:%(asctime)s:%(message)s',
                    handlers=[
                        logging.FileHandler("../logs/report.log"),
                        logging.StreamHandler()
                    ])
logging.info(args)

net = utils.read_network(args.in_net_path)

t0 = time()

print('Calculating...')

if args.measure == 'degree':
    cent = nx.degree_centrality(net)
elif args.measure == 'betweenness':
    cent = nx.betweenness_centrality(net)
elif args.measure == 'current':
    cent = nx.approximate_current_flow_betweenness_centrality(net, solver='lu')
elif args.measure == 'communicability':
    cent = nx.communicability_betweenness_centrality(net)

logging.info('Completed calculating measure {}. Time: {}'.format(
    args.measure,
    time() - t0))

cent_df = pd.DataFrame(list(cent.items()), columns=['entrez', args.measure])

cent_df.to_csv(args.out_path, sep='\t', index=False)
 def _calculate(self, include: set):
     self._features = nx.communicability_betweenness_centrality(self._gnx.to_undirected())
Exemplo n.º 19
0
#  节点紧密度 节点和图中其它节点之间最短路径的平均值。
# clo_ctl=nx.closeness_centrality(G)
# print(clo_ctl)

# 衡量点(流量中心)较好指标
# 当前流量中心度 把边当成电阻,节点是电阻之间的节点。(两点之间相连,有多少流量经过该节点)
clo_flow_ctl = nx.current_flow_closeness_centrality(G)
print(sum(clo_flow_ctl.values()))

# print(max(clo_flow_ctl,key=lambda x:clo_flow_ctl[x]))
# print(max(clo_flow_ctl,key=clo_flow_ctl.get))
print(sorted(clo_flow_ctl, key=lambda x: clo_flow_ctl[x]))

# 关系中心
com_centre = nx.communicability_betweenness_centrality(G)

# print(com_centre)
print(sum(com_centre.values()))
print(sorted(com_centre, key=lambda x: com_centre[x]))

# 链路分析
# 网页排名 根据节点的度计算节点的排名
# page_rank=nx.pagerank(G)
# print(page_rank)
# print(sum(page_rank.values()))
# page_rank_num=nx.pagerank_numpy(G)
# print(page_rank_num)

#链路预测算法
# 节点资源分配计算   有多个外延节点的中心点的资源分配情况(根据边或度平均分配)
Exemplo n.º 20
0
 def communicability_betweenness(uG, ni, nj, rand_node):
     d = nx.communicability_betweenness_centrality(uG)
     return d[nj], d[rand_node]