fileName = sys.argv[1]
    else:
        fileName = "Graph_LargeSet"

    # create 3 files
    fo = open("PerplexityValues.txt", 'wb')
    fo.write("PerplexityValues at different iterations till convergence\n")
    fh = open("Top50_PageRank.txt", 'wb')
    fh.write("Top 50 URLs with high page ranks \n")
    fd = open("Top50_InLinks.txt", 'wb')
    fd.write("Top 50 URLs with high in-links \n")

    # create PageRankCalculator object with input and run the process
    # till it is converged
    # ----------------------------------------------------------------
    prc = PageRankCalculator(fileName, fh)
    prc.iterateTillConvergence()
    # ----------------------------------------------------------------

    noInLinkC = 0
    noOutLinkC = 0
    reducedPR = 0
    totalURLS = prc.N
    initialPR = 1 / float(totalURLS)
    for key, value in prc.urls.iteritems():
        urlPR[key] = value.rank
        urlInLink[key] = len(value.inList)
        if len(value.inList) == 0:
            noInLinkC += 1
        if len(value.outList) == 0:
            noOutLinkC += 1
		fileName = sys.argv[1]
	else:
		fileName = "Graph_LargeSet"

	# create 3 files 	
	fo = open("PerplexityValues.txt", 'wb')
	fo.write("PerplexityValues at different iterations till convergence\n")
	fh = open("Top50_PageRank.txt", 'wb')
	fh.write("Top 50 URLs with high page ranks \n")
	fd = open("Top50_InLinks.txt", 'wb')
	fd.write("Top 50 URLs with high in-links \n")

	# create PageRankCalculator object with input and run the process 
	# till it is converged
	# ----------------------------------------------------------------
	prc = PageRankCalculator(fileName, fh)
	prc.iterateTillConvergence()
	# ----------------------------------------------------------------

	noInLinkC = 0
	noOutLinkC = 0
	reducedPR = 0
	totalURLS = prc.N
	initialPR = 1/float(totalURLS)
	for key, value in prc.urls.iteritems():
		urlPR[key] = value.rank
		urlInLink[key] = len(value.inList)
		if len(value.inList) == 0:
			noInLinkC += 1
		if len(value.outList) == 0:	
			noOutLinkC += 1
#! /usr/bin/python

import sys
from page_rank import PageRankCalculator

if __name__ == '__main__':

    # The input graph consists of 6 nodes
    if len(sys.argv) > 1:
        fileName = sys.argv[1]
    else:
        fileName = "Graph_SmallSet"
    fh = open("RankAtIterations.txt", 'wb')
    prc = PageRankCalculator(fileName, fh)
    # Calculate Page rank for 100 iterations
    for i in range(1, 101):
        prc.calculatePROneIteration()
        # Record PageRank values after 1, 10, 100 iterations
        if i == 1 or i == 10 or i == 100:
            fh.write("Iteration: " + str(i) + "\n")
            print "Iteration No. : ", i
            for key, value in prc.urls.iteritems():
                print "Node: ", key, " | Rank: ", value.rank
                fh.write("Node: " + key + " | Rank: " + str(value.rank) + '\n')
#! /usr/bin/python

import sys
from page_rank import PageRankCalculator

if __name__ == '__main__':

	# The input graph consists of 6 nodes
	if len(sys.argv) > 1:
		fileName = sys.argv[1]
	else:
		fileName = "Graph_SmallSet"
	fh = open("RankAtIterations.txt", 'wb')
	prc = PageRankCalculator(fileName, fh)
	# Calculate Page rank for 100 iterations
	for i in range(1,101):
		prc.calculatePROneIteration()
		# Record PageRank values after 1, 10, 100 iterations
		if i == 1 or i == 10 or i == 100:
			fh.write("Iteration: " + str(i) + "\n")
			print "Iteration No. : ", i
			for key, value in prc.urls.iteritems():
				print "Node: ", key," | Rank: ", value.rank
				fh.write("Node: " + key + " | Rank: " + str(value.rank) + '\n')