def test_find_cores(self): core = nx.find_cores(self.G) nodes_by_core = [sorted(n for n in core if core[n] == val) for val in range(4)] assert nodes_equal(nodes_by_core[0], [21]) assert nodes_equal(nodes_by_core[1], [17, 18, 19, 20]) assert nodes_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16]) assert nodes_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8])
def filterOnCores(self, mincorenum): kcores = NX.find_cores(self) core_items = list(kcores.items()) nodes = [ node for (node, corenum) in core_items if corenum < mincorenum] self.remove_nodes_from(nodes) self.filterEmptyNodes()
def test_find_cores2(self): core = nx.find_cores(self.H) nodes_by_core = [sorted([n for n in core if core[n] == val]) for val in range(3)] assert_equal(nodes_by_core[0], [0]) assert_equal(nodes_by_core[1], [1, 3]) assert_equal(nodes_by_core[2], [2, 4, 5, 6])
def filterOnCores(self, mincorenum): kcores = NX.find_cores(self) core_items = kcores.items() nodes = [ node for (node, corenum) in core_items if corenum < mincorenum] self.remove_nodes_from(nodes) self.filterEmptyNodes()
def test_find_cores2(self): cores = nx.find_cores(self.H) nodes_by_core = [] for val in [0, 1, 2]: nodes_by_core.append(sorted([k for k in cores if cores[k] == val])) assert_equal(nodes_by_core[0], [0]) assert_equal(nodes_by_core[1], [1, 3]) assert_equal(nodes_by_core[2], [2, 4, 5, 6])
def test_find_cores2(self): core = nx.find_cores(self.H) nodes_by_core = [ sorted([n for n in core if core[n] == val]) for val in range(3) ] assert_equal(nodes_by_core[0], [0]) assert_equal(nodes_by_core[1], [1, 3]) assert_equal(nodes_by_core[2], [2, 4, 5, 6])
def test_find_cores(self): core = nx.find_cores(self.G) nodes_by_core = [sorted([n for n in core if core[n] == val]) for val in range(4)] assert_equal(nodes_by_core[0], [21]) assert_equal(nodes_by_core[1], [17, 18, 19, 20]) assert_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16]) assert_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8])
def test_find_cores2(self): cores=nx.find_cores(self.H) nodes_by_core=[] for val in [0,1,2]: nodes_by_core.append( sorted([k for k in cores if cores[k]==val])) assert_equal(nodes_by_core[0],[0]) assert_equal(nodes_by_core[1],[1, 3]) assert_equal(nodes_by_core[2],[2, 4, 5, 6])
def test_find_cores(self): cores = nx.find_cores(self.G) nodes_by_core = [] for val in [0, 1, 2, 3]: nodes_by_core.append(sorted([k for k in cores if cores[k] == val])) assert_equal(nodes_by_core[0], [21]) assert_equal(nodes_by_core[1], [17, 18, 19, 20]) assert_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16]) assert_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8])
def test_find_cores(self): cores=nx.find_cores(self.G) nodes_by_core=[] for val in [0,1,2,3]: nodes_by_core.append( sorted([k for k in cores if cores[k]==val])) assert_equal(nodes_by_core[0],[21]) assert_equal(nodes_by_core[1],[17, 18, 19, 20]) assert_equal(nodes_by_core[2],[9, 10, 11, 12, 13, 14, 15, 16]) assert_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8])
def prune(graph, cutoff=1): ''' Only keep nodes with a connectivity >= cutoff. ''' # The to_undirected function is tempting, but it copies all the # data, which is unnecessary for this algorithm. ugraph = nx.Graph() ugraph.add_nodes_from(graph) ugraph.add_edges_from(graph.edges_iter()) cores = nx.find_cores(ugraph) core_nodes = [n for n in graph.nodes() if cores[n] >= cutoff] return graph.subgraph(core_nodes)
def get_table(net): print('getting coreness...') core = nx.find_cores(net) print('getting degrees...') deg = dict(nx.degree(net)) print('getting degree centrality...') dc = nx.degree_centrality(net) print('getting eigenvector centrality...') eig = nx.eigenvector_centrality_numpy(net) print('compiling table...') table = pd.DataFrame( { 'degree': deg, 'degree_cent': dc, 'coreness': core, 'eigenvector': eig }, index=list(net.nodes())) return table
colors[idx], label="node-ids = " + str(node)) ax.legend(loc=0) fig3.tight_layout() fig3.savefig('./average_prevalence_2.png') # Task 4 start_node = np.random.randint(0, num_of_nodes, 20) infected_list = [] for idx, node in enumerate(start_node): infected_link = list() infected = SI(network, sorted_flights, node, 0.5) infected_list.append(infected) kshell = np.array(list(nx.find_cores(network).values())) clustering = np.array(list(nx.clustering(network).values())) degree = np.array(list(nx.degree(network).values())) strength = np.array(list(nx.degree(network, weight='weight').values())) betweenness = np.array( list(nx.betweenness_centrality(network, normalized=True).values())) closeness = np.array(list(nx.closeness_centrality(network).values())) centrality = np.array( [kshell, clustering, degree, strength, betweenness, closeness]) start_node = np.random.randint(0, num_of_nodes, 50) infected_list = [] for idx, node in enumerate(start_node): infected = SI(network, sorted_flights, node, 0.5) infected_list.append(infected)
def test_trivial(self): """Empty graph""" G = nx.Graph() assert_equal(nx.find_cores(G), {})
def test_trivial(self): """Empty graph""" G = nx.Graph() assert_equal(nx.find_cores(G),{})
missing = mod_table[mod_table['degree'] == False] present = mod_table[mod_table['degree'] != False] print(f'there are {len(missing)} mods who did not make comments that month') scatter_table(author_table) scatter_table(present) post_giant = get_giant(posts) post_table = get_table(post_giant) scatter_table(post_table) plt.hist(list(dict(nx.degree(posts)).values())) plt.hist(list(dict(nx.degree_centrality(posts)).values())) plt.hist(list(dict(nx.find_cores(posts)).values())) plt.hist(post_table['coreness']) plt.hist(list(dict(nx.eigenvector_centrality_numpy(posts)).values())) plt.hist(post_table['eigenvector']) plt.hist(list(dict(nx.degree(authors)).values())) plt.hist(list(dict(nx.degree_centrality(authors)).values())) plt.hist(list(dict(nx.find_cores(authors)).values())) plt.hist(author_table['coreness']) plt.hist(list(dict(nx.eigenvector_centrality_numpy(authors)).values())) plt.hist(author_table['eigenvector']) # post data post_data = pd.read_pickle( '/Users/emg/Programming/GitHub/comment-authors/cmv_17_06_posts.pkl') post_data['time'] = pd.to_datetime(post_data['created_utc'], unit='s')
def test_trivial(self): """Empty graph""" G = nx.Graph() assert nx.find_cores(G) == {}
def handle(self, *args, **options): members = FacultyMember.actives.filter(expertise__facultymember__isnull=False).distinct() graph1 = nx.Graph() for member in members: graph1.add_node(member.full_name()) for area in member.expertise.all(): if not graph1.has_node(area.name): graph1.add_node(area.name) graph1.add_edge(member.full_name(),area.name) expertise = Expertise.objects.all() #cores = nx.find_cores(graph1) #sorted_cores = sorted(cores.iteritems(),key=operator.itemgetter(1)) #print sorted_cores graph2 = nx.Graph() for member in members: if graph1.has_node(member.full_name()): a_node = graph1[member.full_name()] subgraph = nx.Graph() keys = a_node.keys() subgraph.add_nodes_from(keys) for index,node in enumerate(keys): for v in range(index+1,len(keys)): weight = 1 if graph2.has_edge(node,keys[v]): graph2.edge[node][keys[v]]["weight"] += 1 if subgraph.has_edge(node,keys[v]): subgraph.edge[node][keys[v]]["weight"] = weight else: subgraph.add_edge(node,keys[v],weight=weight) #subgraph = nx.complete_graph(0,create_using=subgraph) graph2 = nx.compose(graph2,subgraph) for edge in graph2.edges(): #print edge, graph2[edge[0]][edge[1]]["weight"] if graph2[edge[0]][edge[1]]["weight"] == 1: graph2.remove_edge(edge[0],edge[1]) cores = nx.find_cores(graph2) sorted_cores = sorted(cores.iteritems(),key=operator.itemgetter(1)) print sorted_cores agraph = nx.to_agraph(graph2) # id = 0 # nodes = [] # for index,core in enumerate(sorted_cores): # nodes.append(core[0]) # if core[1] != id or index == len(sorted_cores)-1: # id = core[1] # sub = agraph.subgraph(nbunch=nodes, # name="cluster%d" % (id), # style='filled', # color='lightgrey', # label='cluster %d' % (id)) # nodes = [] import graphutils graph2,subs = graphutils.computeSubgraphClusters(nx.connected_component_subgraphs(graph2)[0],0,5) agraph2 = nx.to_agraph(graph2) for index,sub in enumerate(subs): sub = agraph2.subgraph(nbunch=sub.nodes(), name="cluster%d" % (index), style='filled', color='#ccccff', fontcolor='#000033', fontname="Gotham Rounded", label='cluster %d' % (index + 1)) #agraph.graph_attr.update(layout="neato",bgcolor="#000000", bb = "0,0,822,810", viewport="1440,810,1,411,405", overlap="scale") agraph2.graph_attr.update(layout="fdp",bgcolor="#ffffff", overlap="scale", splines="true", bb="10,7.5", size="10,7.5", page="11,8.5", maxiter="2400") try: agraph2.node_attr.update(color="#000033",fontcolor="#000033", shape="plaintext", fontsize="10", fontname="Gotham Rounded") except: agraph2.node_attr.update(color="#000033",fontcolor="#000033", shape="plaintext", fontsize="10") agraph2.edge_attr.update(color="#333366",fontcolor="#333333", arrowhead="dot", arrowtail="dot", arrowsize="0.5") agraph2.layout(prog="fdp") if len(args) > 0: agraph2.write(args[0]) if len(args) > 1: agraph2.draw(args[1], format="pdf")
def immunization(network, sorted_flights): p = 0.5 colors = ['r-', 'g-', 'b-', 'c-', 'm-', 'y-', 'k-', '#50f386'] num_iter = 20 num_nodes = network.number_of_nodes() kshell = sorted(nx.find_cores(network).items(), key=operator.itemgetter(1), reverse=True)[0:10] immunized_node_kshell = {tup[0] for tup in kshell} clustering = sorted(nx.clustering(network).items(), key=operator.itemgetter(1), reverse=True)[0:10] immunized_node_clustering = {tup[0] for tup in clustering} degree = sorted(list(nx.degree(network).items()), key=operator.itemgetter(1), reverse=True)[0:10] immunized_node_degree = {tup[0] for tup in degree} strength = sorted(list(nx.degree(network, weight='weight').items()), key=operator.itemgetter(1), reverse=True)[0:10] immunized_node_strength = {tup[0] for tup in strength} betweenness = sorted(nx.betweenness_centrality(network, normalized=True).items(), key=operator.itemgetter(1), reverse=True)[0:10] immunized_node_betweenness = {tup[0] for tup in betweenness} closeness = sorted(nx.closeness_centrality(network, distance='weight').items(), key=operator.itemgetter(1), reverse=True)[0:10] immunized_node_closeness = {tup[0] for tup in closeness} immunized_node_random = set(np.random.randint(0, num_nodes, 10)) # social network strategy immunized_node_social = set() while len(immunized_node_social) < 10: random = np.random.randint(0, num_nodes, 10) for node in random: rnd_neighbor = np.random.choice(list(network.neighbors(node))) immunized_node_social.add(rnd_neighbor) all_immunized_nodes = immunized_node_social\ .union(immunized_node_kshell)\ .union(immunized_node_clustering)\ .union(immunized_node_degree)\ .union(immunized_node_strength)\ .union(immunized_node_betweenness)\ .union(immunized_node_closeness)\ .union(immunized_node_random) # ************* available_nodes = list(set(range(0, num_nodes)) - all_immunized_nodes) seed_list = np.random.choice(available_nodes, 20) first_departure = sorted_flights[0]["StartTime"] last_arrival = max(sorted_flights["EndTime"]) num_nodes = network.number_of_nodes() fig5 = plt.figure() ax = fig5.add_subplot(111) plt.xticks(rotation=45) ax.set_xlabel("Time") ax.set_ylabel("Average Prevalence") time_step = np.linspace(first_departure, last_arrival, num_nodes) time_step_converted = [ dt.datetime.fromtimestamp(date).strftime('%Y-%m-%d %H:%M') for date in time_step ] time_step_converted = md.datestr2num(time_step_converted) strategies = { 'kshell': immunized_node_kshell, 'clustering': immunized_node_clustering, 'degree': immunized_node_degree, 'strength': immunized_node_strength, 'betweenness': immunized_node_betweenness, 'closeness': immunized_node_closeness, 'random': immunized_node_random, 'social': immunized_node_social } for idx, strategy in enumerate(strategies.keys()): immunized_nodes = strategies[strategy] infected_times = [] for i in range(num_iter): seed_node = seed_list[i] infected = SI(network, sorted_flights, seed_node, p, immunized_nodes) infected.sort() infected_times.append(infected) avg_prevalence = prevalence(infected_times, num_iter, num_nodes, time_step) ax.plot_date(time_step_converted, avg_prevalence, colors[idx], label=strategy) ax.legend(loc=0) fig5.tight_layout() fig5.savefig('./immunization.png') return None
def kcoreness_iter(self, nodes=None): if not self.cores: self.cores = nx.find_cores(self) return self.generic_networkx_parameter_iter(self.__kcoreness_func, 'kcoreness', self.cores, nodes)
def find_community(graph): return list(nx.find_cores(graph))
def kcoreness(self, nodes=None): cores = nx.find_cores(self) return self.generic_networkx_parameter(self.__kcoreness_func, 'kcoreness', cores, nodes)
def cores(G): return nx.find_cores(G).values()