g = standardize(WS2D(NODES, edges, radius, weak_ties)) print '# Edges %d\tAverage Clustering = %f' % (countEdges(g) * 2, ac( und(g))) fi(g) # Fill incoming edges sys.stdout.flush() # ## Execute centrality measures # In[5]: print '# Page Rank execution...' pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3) print '#', iterations, ' iterations. Error:', err top_pr = [a for a, b in topk(pagerank, seed)] # In[6]: print '# Eigenvector Centrality...', cscores, diffsum = ec(g) top_eigenc = [a for a, b in topk(cscores, seed)] print '# Done' # In[7]: print '# Betweennes centrality...', bet = betweenness(g) top_bet = [a for a, b in topk(bet, seed)] print '# Done'
print '# Cascade Expansion Wiki-Vote.txt' print '# Edges = %d\tAverage Clustering = %f'% (countEdges(g), ac(und(g))) fi(g) # Fill incoming edges dictionary sys.stdout.flush() # ## Execute centrality measures # In[8]: print '# Page Rank execution...' pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3) print '#',iterations, ' iterations. Error:', err top_pr = [a for a,b in topk(pagerank, nodes)] # In[9]: print '# Eigenvector Centrality...', cscores, diffsum = ec(g) top_eigenc = [a for a, b in topk(cscores, nodes)] print '# Done' # In[10]: print '# Betweennes centrality...', bet = betweenness(g) top_bet = [a for a, b in topk(bet, nodes)]
from mylesson5 import eigenvector_centrality as ec from dgraph import diameter from dgraph import readGraph from dgraph import Page_Rank as pr from dgraph import fill_incoming as fi import networkx as nx seed = 100 g = readGraph('wiki-Vote.txt') # G = nx.from_dict_of_lists(g) # print 'NetworkX Page Rank' # print [a for a,b in topk(nx.pagerank(G, alpha=1.0e-6, tol=1.0e-10), 10)] # print [a for a,b in topk(nx.eigenvector_centrality(G), 10)] # g = {0: [2, 3, 4], 1: [0, 2, 3], 2: [1], 3: [0, 4], 4: [0]} fi(g) print 'Incoming edges stored' # print 'Nodes: ', len(g.keys()) # print 'Diameter: ', diameter(g) print 'Page Rank execution...' # print 'Triangles: ', ctD(g) pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-8) # alpha = 0.00001 print iterations, ' iterations. Error:', err print 'Page Rank' print topk(pagerank, seed) # print 'Eigenvector Centrality' # cscores, diffsum = ec(g) # print [a for a, b in topk(cscores, 10)] # bet = betweenness(g) # print 'Betweennes centrality' # print [a for a, b in topk(bet, 10)]
# In[6]: g = GenWSGridGraph(NODES, edges, radius, weak_ties) print '# Edges %d\tAverage Clustering = %f' % (countEdges(g)*2,ac(und(g))) fi(g) # Fill incoming edges dictionary # ## Execute centrality measures # In[12]: print '# Page Rank execution...' pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3) print '#',iterations, ' iterations. Error:', err top_pr = [a for a,b in topk(pagerank, seed)] # In[13]: print '# Eigenvector Centrality...', ecscores, _ = ec(g) top_eigenc = [a for a, b in topk(ecscores, seed)] print '# Done' # In[14]: print '# Betweennes centrality...', bet = betweenness(g) top_bet = [a for a, b in topk(bet, seed)]
# In[6]: g = GenWSGridGraph(NODES, edges, radius, weak_ties) print '# Edges %d\tAverage Clustering = %f' % (countEdges(g)*2,ac(und(g))) fi(g) # Fill incoming edges dictionary # ## Execute centrality measures # In[12]: print '# Page Rank execution...' pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3) print '#',iterations, ' iterations. Error:', err top_pr = [a for a,b in topk(pagerank, NODES)] # In[13]: print '# Eigenvector Centrality...', ecscores, _ = ec(g) top_eigenc = [a for a, b in topk(ecscores, NODES)] print '# Done' # In[14]: print '# Betweennes centrality...', bet = betweenness(g) top_bet = [a for a, b in topk(bet, NODES)]
#!/usr/bin/python import sys from lesson3 import readGraph, count2Paths, diameter from lesson4 import betweenness, cb_max, topk simple_graph = {} simple_graph[0] = {1} simple_graph[1] = {0, 2} simple_graph[2] = {1, 3} simple_graph[3] = {2, 4} simple_graph[4] = {3} cb = betweenness(simple_graph, True) # graph = readGraph(sys.argv[1]) # cb = betweenness(graph) k = 10 print 'Top ', k, '=>', topk(cb, k)
# In[4]: g = rdbg(NODES, p, edges) print '# Edges %d\tAverage Clustering = %f' % (countEdges(g) * 2, ac(und(g))) fi(g) # Fill incoming edges sys.stdout.flush() # ## Execute centrality measures # In[6]: print '# Page Rank execution...' pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3) print '#', iterations, ' iterations. Error:', err top_pr = [a for a, b in topk(pagerank, NODES)] # In[7]: print '# Eigenvector Centrality...', cscores, diffsum = ec(g) top_eigenc = [a for a, b in topk(cscores, NODES)] print '# Done' # In[8]: print '# Betweennes centrality...', bet = betweenness(g) top_bet = [a for a, b in topk(bet, NODES)] print '# Done'
#!/usr/bin/python import sys from lesson4 import topk, lastk from dgraph import readGraph from dgraph import Page_Rank as pr from dgraph import Independent_Cascade as ic seed = int(sys.argv[1]) g = readGraph('wiki-Vote.txt') pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-6) # alpha = 0.00001 # print pagerank print 'Page Rank. %s iterations. %s accuracy' % (iterations, err) top = [a for a,b in topk(pagerank, seed)] last = [a for a,b in lastk(pagerank, seed)] print 'Top', seed print [(u, pagerank[u]) for u in top] print 'Last', seed print [(u, pagerank[u]) for u in last] adopters, haters, steps = ic(g, top) print 'Independent Cascade Model: TOP', seed print 'Final Adopters:', len(adopters) print 'Final Haters:', len(haters) print '# Iterations:', steps adopters, haters, steps = ic(g, last) print 'Independent Cascade Model: LAST', seed print 'Final Adopters:', len(adopters) print 'Final Haters:', len(haters) print '# Iterations:', steps
#!/usr/bin/python import sys from lesson4 import topk, lastk from dgraph import readGraph from dgraph import Page_Rank as pr from dgraph import Independent_Cascade as ic seed = int(sys.argv[1]) g = readGraph('wiki-Vote.txt') pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-6) # alpha = 0.00001 # print pagerank print 'Page Rank. %s iterations. %s accuracy' % (iterations, err) top = [a for a, b in topk(pagerank, seed)] last = [a for a, b in lastk(pagerank, seed)] print 'Top', seed print[(u, pagerank[u]) for u in top] print 'Last', seed print[(u, pagerank[u]) for u in last] adopters, haters, steps = ic(g, top) print 'Independent Cascade Model: TOP', seed print 'Final Adopters:', len(adopters) print 'Final Haters:', len(haters) print '# Iterations:', steps adopters, haters, steps = ic(g, last) print 'Independent Cascade Model: LAST', seed print 'Final Adopters:', len(adopters) print 'Final Haters:', len(haters) print '# Iterations:', steps
nodes = len(g.keys()) print '# Cascade Expansion Wiki-Vote.txt' print '# Edges = %d\tAverage Clustering = %f' % (countEdges(g), ac(und(g))) fi(g) # Fill incoming edges dictionary sys.stdout.flush() # ## Execute centrality measures # In[8]: print '# Page Rank execution...' pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3) print '#', iterations, ' iterations. Error:', err top_pr = [a for a, b in topk(pagerank, nodes)] # In[9]: print '# Eigenvector Centrality...', cscores, diffsum = ec(g) top_eigenc = [a for a, b in topk(cscores, nodes)] print '# Done' # In[10]: print '# Betweennes centrality...', bet = betweenness(g) top_bet = [a for a, b in topk(bet, nodes)] print '# Done'