for i in xrange(100):

    NODES = 7115
    edges = random.randint(75000, 125000)
    radius = 2
    weak_ties = [i * 5 for i in xrange(0, 4)]
    seed = 100

    # ##Create a Watts-Strogatz 2D direct graph

    # In[4]:

    g = standardize(WS2D(NODES, edges, radius, weak_ties))
    print '# Edges %d\tAverage Clustering = %f' % (countEdges(g) * 2, ac(
        und(g)))
    fi(g)  # Fill incoming edges

    sys.stdout.flush()

    # ## Execute centrality measures

    # In[5]:

    print '# Page Rank execution...'
    pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3)
    print '#', iterations, ' iterations. Error:', err
    top_pr = [a for a, b in topk(pagerank, seed)]

    # In[6]:

    print '# Eigenvector Centrality...',
from mylesson5 import eigenvector_centrality as ec
from dgraph import diameter
from dgraph import readGraph
from dgraph import Page_Rank as pr
from dgraph import fill_incoming as fi
import networkx as nx

seed = 100
g = readGraph('wiki-Vote.txt')

# G = nx.from_dict_of_lists(g)
# print 'NetworkX Page Rank'
# print [a for a,b in topk(nx.pagerank(G, alpha=1.0e-6, tol=1.0e-10), 10)]
# print [a for a,b in topk(nx.eigenvector_centrality(G), 10)]
# g = {0: [2, 3, 4], 1: [0, 2, 3], 2: [1], 3: [0, 4], 4: [0]}
fi(g)
print 'Incoming edges stored'
# print 'Nodes: ', len(g.keys())
# print 'Diameter: ', diameter(g)
print 'Page Rank execution...'
# print 'Triangles: ', ctD(g)
pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-8) # alpha = 0.00001
print iterations, ' iterations. Error:', err
print 'Page Rank'
print topk(pagerank, seed)
# print 'Eigenvector Centrality'
# cscores, diffsum = ec(g)
# print [a for a, b in topk(cscores, 10)]
# bet = betweenness(g)
# print 'Betweennes centrality'
# print [a for a, b in topk(bet, 10)]

# ## Set Parameters

# In[3]:

seed = 100

# ##Read the wiki-vote graph

# In[4]:

g = readGraph('wiki-Vote.txt')
print '# Wiki-Vote.txt'
print '# Edges = %d\tAverage Clustering = %f' % (countEdges(g) * 2, ac(und(g)))
fi(g)  # Fill incoming edges dictionary

sys.stdout.flush()

# ## Execute centrality measures

# In[8]:

print '# Page Rank execution...'
pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3)
print '#', iterations, ' iterations. Error:', err
top_pr = [a for a, b in topk(pagerank, seed)]

# In[9]:

print '# Eigenvector Centrality...',
# ## Set Parameters

# In[3]:



# ##Read the wiki-vote graph

# In[4]:

g = readGraph('wiki-Vote.txt')
nodes = len(g.keys())

print '# Cascade Expansion Wiki-Vote.txt'
print '# Edges = %d\tAverage Clustering = %f'% (countEdges(g), ac(und(g)))
fi(g) # Fill incoming edges dictionary


sys.stdout.flush()

# ## Execute centrality measures

# In[8]:

print '# Page Rank execution...'
pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3)
print '#',iterations, ' iterations. Error:', err
top_pr = [a for a,b in topk(pagerank, nodes)]


# In[9]:
# In[3]:
print '# 100 Random Direct Graphs'
for i in xrange(100):
    NODES = 7115
    edges = random.randint(75000, 125000)
    p = 0.5 # probability
    seed = 100


    # ##Create a Random Direct Graph

    # In[4]:

    g = rdbg(NODES, p, edges)
    print '# Edges %d\tAverage Clustering = %f' % (countEdges(g)*2,ac(und(g)))
    fi(g) # Fill incoming edges

    sys.stdout.flush()


    # ## Execute centrality measures

    # In[6]:

    print '# Page Rank execution...'
    pagerank, iterations, err = pr(g, alpha=1.0e-5, eps=1.0e-3)
    print '#', iterations, ' iterations. Error:', err
    top_pr = [a for a,b in topk(pagerank, seed)]


    # In[7]: