def test_closeness_centrality(self): c = bipartite.closeness_centrality(self.P4, [1, 3]) answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3} assert_equal(c, answer) c = bipartite.closeness_centrality(self.K3, [0, 1, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} assert_equal(c, answer) c = bipartite.closeness_centrality(self.C4, [0, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} assert_equal(c, answer)
def test_closeness_centrality(self): c = bipartite.closeness_centrality(self.P4, [1,3]) answer = {0: 2.0/3, 1: 1.0, 2: 1.0, 3:2.0/3} assert_equal(c, answer) c = bipartite.closeness_centrality(self.K3, [0,1,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} assert_equal(c, answer) c = bipartite.closeness_centrality(self.C4, [0,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} assert_equal(c, answer)
def test_closeness_centrality(self): c = bipartite.closeness_centrality(self.P4, [1,3]) answer = {0: 2.0/3, 1: 1.0, 2: 1.0, 3:2.0/3} assert_equal(c, answer) c = bipartite.closeness_centrality(self.K3, [0,1,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} assert_equal(c, answer) c = bipartite.closeness_centrality(self.C4, [0,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} assert_equal(c, answer) G = nx.Graph() G.add_node(0) G.add_node(1) c = bipartite.closeness_centrality(G, [0]) assert_equal(c, {1: 0.0}) c = bipartite.closeness_centrality(G, [1]) assert_equal(c, {1: 0.0})
def test_closeness_centrality(self): c = bipartite.closeness_centrality(self.P4, [1, 3]) answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3} assert c == answer c = bipartite.closeness_centrality(self.K3, [0, 1, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} assert c == answer c = bipartite.closeness_centrality(self.C4, [0, 2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} assert c == answer G = nx.Graph() G.add_node(0) G.add_node(1) c = bipartite.closeness_centrality(G, [0]) assert c == {1: 0.0} c = bipartite.closeness_centrality(G, [1]) assert c == {1: 0.0}
def calculate_centrality(fp, centrality_type, perm_maps): print '%s : start to read %s.txt '%(centrality_type, fp) g = nx.Graph() i_t = 100000 i_i = 0 p = 0 f = codecs.open('./txt_critical_perms/apps_file/%s.txt'%(fp), 'r', encoding='utf-8') l = f.readline() l = f.readline() while l: p, i_i = p_percent(p, i_i, i_t, 10) ls = l.split('\t') app_id = ls[0].strip().lower() perm_id = ls[1].strip().lower() g.add_node(app_id, bipartite=0) # top g.add_node(perm_id, bipartite=1) # buttom g.add_edge(app_id, perm_id) l = f.readline() is_connect = nx.is_connected(g) print u'end read: %s'%(fp), is_connect # buttom top #node_data, node_app = bipartite.sets(g) node_data = set(n for n, d in g.nodes(data=True) if d['bipartite'] == 1) node_app = set(g) - node_data ## centrality degree if centrality_type == 'degree': try: centrality = bipartite.degree_centrality(g, node_data) result = get_centrality_out(fp, node_data, node_app, centrality, centrality_type, perm_maps) return result, is_connect except Exception as e: print '** error in centrality: %s : %s'%(centrality_type, fp), e ## centrality closeness if centrality_type == 'closeness': try: centrality = bipartite.closeness_centrality(g, node_app, normalized=False) result = get_centrality_out(fp, node_data, node_app, centrality, centrality_type, perm_maps) return result, is_connect except Exception as e: print '**** error in centrality : %s : %s'%(centrality_type, fp), e ## centrality betweenness if centrality_type == 'betweenness': try: centrality = bipartite.betweenness_centrality(g, node_app) result = get_centrality_out(fp, node_data, node_app, centrality, centrality_type, perm_maps) return result, is_connect except Exception as e: print '**** error in centrality : %s : %s'%(centrality_type, fp), e if centrality_type == 'clustering': try: centrality = bipartite.clustering(g, node_data, mode='dot') result = get_centrality_out(fp, node_data, node_app, centrality, centrality_type, perm_maps) return result, is_connect except Exception as e: print '**** error in centrality : %s : %s'%(centrality_type, fp), e
def info_network(G): from networkx.algorithms import bipartite from decimal import Decimal print G.number_of_nodes() print G.number_of_edges() print "average_neighbor_degree" dict = nx.average_neighbor_degree(G) list1 = dict.keys() list2 = dict.values() print list1 print list2 print "degree_assortativity_coefficient" print nx.degree_assortativity_coefficient(G) print "degree_pearson_correlation_coefficient" print nx.degree_pearson_correlation_coefficient(G) # print nx.k_nearest_neighbors(G) print "STOP HERE" print "bipartite.closeness_centrality(G,G.node)" dict2 = bipartite.closeness_centrality(G, G.node) list3 = dict2.values() print list3 print "nx.degree_centrality(G)" dict3 = nx.degree_centrality(G) list4 = dict3.values() print list4 print "nx.betweenness_centrality(G)" dict4 = nx.betweenness_centrality(G) list5 = dict4.values() print list5 print "hits_numpy" dict5 = nx.hits_numpy(G) print dict5
def test_davis_closeness_centrality(self): G = self.davis clos = bipartite.closeness_centrality(G, self.top_nodes) answer = { "E8": 0.85, "E9": 0.79, "E7": 0.73, "Nora Fayette": 0.80, "Evelyn Jefferson": 0.80, "Theresa Anderson": 0.80, "E6": 0.69, "Sylvia Avondale": 0.77, "Laura Mandeville": 0.73, "Brenda Rogers": 0.73, "Katherina Rogers": 0.73, "E5": 0.59, "Helen Lloyd": 0.73, "E3": 0.56, "Ruth DeSand": 0.71, "Verne Sanderson": 0.71, "E12": 0.56, "Myra Liddel": 0.69, "E11": 0.54, "Eleanor Nye": 0.67, "Frances Anderson": 0.67, "Pearl Oglethorpe": 0.67, "E4": 0.54, "Charlotte McDowd": 0.60, "E10": 0.55, "Olivia Carleton": 0.59, "Flora Price": 0.59, "E2": 0.52, "E1": 0.52, "Dorothy Murchison": 0.65, "E13": 0.52, "E14": 0.52, } for node, value in answer.items(): assert almost_equal(value, clos[node], places=2)
def test_davis_closeness_centrality(self): G = self.davis clos = bipartite.closeness_centrality(G, self.top_nodes) answer = { 'E8': 0.85, 'E9': 0.79, 'E7': 0.73, 'Nora Fayette': 0.80, 'Evelyn Jefferson': 0.80, 'Theresa Anderson': 0.80, 'E6': 0.69, 'Sylvia Avondale': 0.77, 'Laura Mandeville': 0.73, 'Brenda Rogers': 0.73, 'Katherina Rogers': 0.73, 'E5': 0.59, 'Helen Lloyd': 0.73, 'E3': 0.56, 'Ruth DeSand': 0.71, 'Verne Sanderson': 0.71, 'E12': 0.56, 'Myra Liddel': 0.69, 'E11': 0.54, 'Eleanor Nye': 0.67, 'Frances Anderson': 0.67, 'Pearl Oglethorpe': 0.67, 'E4': 0.54, 'Charlotte McDowd': 0.60, 'E10': 0.55, 'Olivia Carleton': 0.59, 'Flora Price': 0.59, 'E2': 0.52, 'E1': 0.52, 'Dorothy Murchison': 0.65, 'E13': 0.52, 'E14': 0.52 } for node, value in answer.items(): assert almost_equal(value, clos[node], places=2)
def compute_centrality(nets=None, names=None): datet = datetime.datetime.today() date = datet.strftime("%Y%m%d%H%M") if names is None: names = default_years if nets is None: nets = networks_by_year() result = {} for name, G in zip(names, nets): result[name] = {} print("computing centrality for {}".format(name)) devs = set(n for n, d in G.nodes(data=True) if d['bipartite']==1) result[name]['deg'] = bp.degree_centrality(G, devs) try: result[name]['bet'] = bp.betweenness_centrality(G, devs) except ZeroDivisionError: result[name]['bet'] = dict() result[name]['clos'] = bp.closeness_centrality(G, devs) result[name]['ev'] = nx.eigenvector_centrality_numpy(G) fn = 'years' if name == 2014 else 'branches' fname = "{0}/bipartite_centrality_{1}_{2}.pkl".format(results_dir, fn, date) utils.write_results_pkl(result, fname)
def test_davis_closeness_centrality(self): G = self.davis clos = bipartite.closeness_centrality(G, self.top_nodes) answer = {'E8':0.85, 'E9':0.79, 'E7':0.73, 'Nora Fayette':0.80, 'Evelyn Jefferson':0.80, 'Theresa Anderson':0.80, 'E6':0.69, 'Sylvia Avondale':0.77, 'Laura Mandeville':0.73, 'Brenda Rogers':0.73, 'Katherina Rogers':0.73, 'E5':0.59, 'Helen Lloyd':0.73, 'E3':0.56, 'Ruth DeSand':0.71, 'Verne Sanderson':0.71, 'E12':0.56, 'Myra Liddel':0.69, 'E11':0.54, 'Eleanor Nye':0.67, 'Frances Anderson':0.67, 'Pearl Oglethorpe':0.67, 'E4':0.54, 'Charlotte McDowd':0.60, 'E10':0.55, 'Olivia Carleton':0.59, 'Flora Price':0.59, 'E2':0.52, 'E1':0.52, 'Dorothy Murchison':0.65, 'E13':0.52, 'E14':0.52} for node, value in answer.items(): assert_almost_equal(value, clos[node], places=2)
G.add_edges_from([(row[0],row[1])],weight=row[2]) print "average_neighbor_degree" print nx.average_neighbor_degree(G) print "degree_assortativity_coefficient" print nx.degree_assortativity_coefficient(G) print "degree_pearson_correlation_coefficient" print nx.degree_pearson_correlation_coefficient(G) #print nx.k_nearest_neighbors(G) print "bipartite.closeness_centrality" print bipartite.closeness_centrality(G,G.node) print "degree_centrality" print nx.degree_centrality(G) print "betweenness_centrality" print nx.betweenness_centrality(G) print "k_nearest_neighbors" print nx.k_nearest_neighbors(G) #print nx.current_flow_closeness_centrality(G, normalized=True, weight='weight', dtype='float', solver='lu') #centrality=nx.eigenvector_centrality(G) #print(['%s %0.2f'%(node,centrality[node]) for node in centrality]) #print nx.eigenvector_centrality(G, max_iter=100, tol=1e-02, nstart=None)
def closeness_centrality(self): self.closeness_centrality_dict = bi.closeness_centrality(self.G, self.nodes)
print('%d %s' % (W2.degree(c, weight='weight'), c)) print nx.draw(W2, node_color='b', edge_color='r', with_labels=True) plt.savefig("davisontoclubsratio.png") # save as png plt.show() # display print print # Degee Summary Stats deg = bipartite.degree_centrality(g, clubs) # Betweenness Summary Stats bc = bipartite.betweenness_centrality(g, clubs) # Closeness Summary Stats cc = bipartite.closeness_centrality(g, clubs) maxdeg = 0 mindeg = 9999 mindegwomen = [] maxdegwomen = [] degarray = [] maxbc = 0 minbc = 9999 minbcwomen = [] maxbcwomen = [] bcarray = [] maxcc = 0 mincc = 9999
# ADD PLOTS OF DEGREE DISTRIBUTION CONSIDERING EACH EDGE TYPE (3 PLOTS) # Connectedness numSCC = nx.number_strongly_connected_components(network) numWCC = nx.number_weakly_connected_components(network) # Clustering # No C3 clustering by definition of bipartite, elaborate and explain C4 during talk cluster1 = nx.square_clustering( network) # No clustering because edges only go from users to designs cluster2 = bipartite.clustering( network) # No clustering because edges only go from users to designs # Centrality Measures # Do these factor in directedness!!!!!!!!!!!!!!!!!!!!!!!!!??????????????????????? closeness_centrality = bipartite.closeness_centrality(network, users) total_closeness_centrality = 0 for key, value in closeness_centrality.items(): total_closeness_centrality += value avg_closeness_centrality = total_closeness_centrality / len( closeness_centrality) degree_centrality = bipartite.degree_centrality(network, users) total_degree_centrality = 0 for key, value in degree_centrality.items(): total_degree_centrality += value avg_degree_centrality = total_degree_centrality / len(degree_centrality) betweenness_centrality = bipartite.betweenness_centrality(network, users) total_betweenness_centrality = 0 for key, value in betweenness_centrality.items():
dict = nx.average_neighbor_degree(G) list1 = dict.keys() list2 = dict.values() print list1 print list2 print "degree_assortativity_coefficient" print nx.degree_assortativity_coefficient(G) print "degree_pearson_correlation_coefficient" print nx.degree_pearson_correlation_coefficient(G) #print nx.k_nearest_neighbors(G) print "STOP HERE" print "bipartite.closeness_centrality(G,G.node)" dict2 = bipartite.closeness_centrality(G,G.node) list3 = dict2.values() print list3 print "nx.degree_centrality(G)" dict3 = nx.degree_centrality(G) list4 = dict3.values() print list4 print "nx.betweenness_centrality(G)" dict4 = nx.betweenness_centrality(G) list5 = dict4.values() print list5 #print nx.current_flow_closeness_centrality(G, normalized=True, weight='weight', dtype='float', solver='lu') #centrality=nx.eigenvector_centrality(G)