def randon_main(g, folder): g = gen(g) metrics = extractor(g.copy(), 1) graphBuilder(g, metrics[1], metrics[0], 'GiantRandon', folder, 1) metrics = extractor(g.copy(), 2) graphBuilder(g, metrics[1], metrics[0], 'EficRandon', folder, 2)
def processDay(data, day): print('\nStarting Processing Day: ' + str(day + 1) + '\n') # problem builder sp = graphBuilder(data, day) str_ucs = '\nUCS Result for Day ' + str(day + 1) + ': \n' str_ida = '\nIDA* Result for Day ' + str(day + 1) + ': \n' # UCS Segment t0 = time.time() availActions = searchProblem.ucs(sp) t1 = time.time() ucstDiff = t1 - t0 actualCost = sp.GetPathCost(availActions, sflag=True) ucsCost = actualCost # day result ucsResult = (sp.visited, ucstDiff, sp.GetPathCost(availActions), actualCost, availActions) print('UCS Result: \n') print('\t' + str(ucsResult)) str_ucs += ('\t' + str(ucsResult) + '\n') ucsPerf = (sp.visited, ucstDiff) # End of UCS Segment # IDA* Segment t0 = time.time() availActions, costEstimation = searchProblem.idaStarCallback(sp) t1 = time.time() idatDiff = t1 - t0 actualCost = sp.GetPathCost(availActions, sflag=True) idaCost = actualCost idaResult = (sp.visited, idatDiff, costEstimation, actualCost, availActions) print('\nIDA Result: \n') print('\t' + str(idaResult)) str_ida += ('\t' + str(idaResult) + '\n') idaPerf = (sp.visited, idatDiff) # End of IDA* Segment print('\nDone for day: ' + str(day + 1) + '\n\n') # return is for processing return (ucsCost, idaCost, str_ucs, str_ida, ucsPerf, idaPerf)
def processDay(data, day): print('\nStarting Processing Day: ' + str(day+1) + '\n') # problem builder sp = graphBuilder(data, day) str_ucs = '\nUCS Result for Day ' + str(day+1) + ': \n' str_ida = '\nIDA* Result for Day ' + str(day+1) + ': \n' # UCS Segment t0 = time.time() availActions = searchProblem.ucs(sp) t1 = time.time() ucstDiff = t1-t0 actualCost = sp.GetPathCost(availActions, sflag = True) ucsCost = actualCost # day result ucsResult = (sp.visited, ucstDiff, sp.GetPathCost(availActions), actualCost, availActions) print('UCS Result: \n') print('\t' + str(ucsResult)) str_ucs += ('\t' + str(ucsResult) + '\n') ucsPerf = (sp.visited, ucstDiff) # End of UCS Segment # IDA* Segment t0 = time.time() availActions, costEstimation = searchProblem.idaStarCallback(sp) t1 = time.time() idatDiff = t1-t0 actualCost = sp.GetPathCost(availActions, sflag = True) idaCost = actualCost idaResult = (sp.visited, idatDiff, costEstimation, actualCost, availActions) print('\nIDA Result: \n') print('\t' + str(idaResult)) str_ida += ('\t' + str(idaResult)+'\n') idaPerf = (sp.visited, idatDiff) # End of IDA* Segment print('\nDone for day: ' + str(day+1) + '\n\n') # return is for processing return (ucsCost, idaCost, str_ucs, str_ida, ucsPerf, idaPerf)
def grapher(g): csv_gen(g.copy(), "Vinicios", 'giant', 1) metrics = extractor(g.copy(), 1) graphBuilder(g, metrics[1], metrics[0], 'GiantComponent', "Vinicios/Graphs/Giant", 1) csv_gen(g.copy(), "Vinicios", 'eff', 2) metrics = extractor(g.copy(), 2) graphBuilder(g, metrics[1], metrics[0], 'Efficiency', "Vinicios/Graphs/Effc", 2) csv_gen(g.copy(), "Vinicios", 'flow', 3) metrics = extractor(g.copy(), 3) graphBuilder(g, metrics[1], metrics[0], 'TotalFlow', "Vinicios/Graphs/Flow", 3)
def main(network): for net in network: g = _main(net) # csv_gen(g.copy(), 'csvOutPut', net) metrics = extractor(g.copy(), 1) graphBuilder(g, metrics[1], metrics[0], net, 'GeneratedGraph/GiantComponent', 1) # csv_gen(g.copy(),'csvOutPut', net, 2) metrics = extractor(g.copy(), 2) graphBuilder(g, metrics[1], metrics[0], net, 'GeneratedGraph/Efficiency', 2) # csv_gen(g.copy(),'csvOutPut', net, 3) metrics = extractor(g.copy(), 3) graphBuilder(g, metrics[1], metrics[0], net, 'GeneratedGraph/TotalFlow', 3)
# -*- coding: utf-8 -*- """ socialGamers.py This part is designed to support the data found in Section 4.A of the report. @author: realhire """ from graphBuilder import graphBuilder gamerTopic = {'140mafia': 2634, 'spymaster': 2304, 'vampirebite': 0} i = 21 print "Gathering day ", i, "'s data..." m = graphBuilder("tc21.txt") print "apply topic filer and build the graph..." m.topic_filter(gamerTopic) print "outputing target file..." m.outputToGexf() print "socialGamer info analyzed complete. Results in", m.outfile ''' # This part is a demonstration of topic collector and degree filter print "find today's top ten topic in: ", m.infile, "..." m.todayTopTenHashtag() print "Today's top ten hashtags are: ", m.todayTopic print "processing reply, retweet and hashtag: ", m.infile, "..." m.processReplyRetweetHashtag(m.todayTopic) degreeRange = [0, float("inf")] print "filter out nodes not in degree range: [", degreeRange[0], ", ", degreeRange[1], "]..."
# -*- coding: utf-8 -*- """ mvp.py Find Twitter MVP and check whether their topic overlap with their follower/sub scribers; topic @author: realhire """ from graphBuilder import graphBuilder i = 21 print "Gathering day ", str(i) + "'s data" m = graphBuilder("tc30.txt") print "Build the graph, all nodes included..." m.processReplyRetweetHashtag() print "Find today's MVP:..." m.todayTopTenMVP() for mvp in m.MVP: print mvp, ':\tTopic: ', m.G.node[mvp] """ ''' Singer Trey ''' interestedUser = '******' print "Now we are interested in", interestedUser print "This account's topics are:", list(m.G.node[interestedUser]) print "Filter out all user not connected to", interestedUser m.connection_filter(interestedUser, removeTargetUser=True) print "outputing target file..." m.outputToGexf("Day"+str(i)+"MVP"+interestedUser+".gexf") print "MVP connection info analyzed complete. Results in", m.outfile """