import sys import fileinput import itertools import pickle from locations import LocationGraph #python build_network.py out/user_checkins.out out/austin.out out/coordinates.p out/gowalla_net #users = pickle.load(open("out/" + sys.argv[1], "rb")) users = {} austin = {} coords = pickle.load(open(sys.argv[3], "rb")) out = sys.argv[4] Gowalla = LocationGraph() fmt = "%Y-%m-%d %H:%M:%S" austin_weight = .24 epsilon = 1 if len(sys.argv) < 2: print "Filename required" else: for line in fileinput.input(sys.argv[1]): line = line.split("|") u = line.pop(0) users[u] = [] while len(line) > 0: entry = line.pop(0) entry = entry.split(">") l = users[u] l.append(entry[0])
#locations=pickle.load(open(sys.argv[7], 'rb')) states = pickle.load(open(sys.argv[5], 'rb')) #need to update with actual census data population = {} census = pickle.load(open(sys.argv[6], 'rb')) total_pop = 0 for p in census: total_pop += census[p] for p in census: population[p] = (census[p] / float(total_pop)) * n #print population network = LocationGraph() network.load(sys.argv[7]) print "Dictionaries loaded." params = str(beta) + "-" + str(shape) + "-" + str(scale) matrix = open(sys.argv[8] + "_" + params + ".out", 'w') countID = 0 infected = {} latent = {} recovered = [] def genID(): global countID countID += 1
#locations=pickle.load(open(sys.argv[7], 'rb')) states = pickle.load(open(sys.argv[8], 'rb')) #need to update with actual census data population = {} census = pickle.load(open(sys.argv[9], 'rb')) total_pop = 0 for p in census: total_pop += census[p] for p in census: population[p] = (census[p] / float(total_pop)) * n #print population network = LocationGraph() network.load(sys.argv[10]) print "Dictionaries loaded." params = str(beta) + "-" + str(shape) + "-" + str(scale) matrix = open(sys.argv[11] + "_" + params + ".out", 'w') countID = 0 infected = {} recovered = [] def genID(): global countID countID += 1 return countID
import fileinput import sys import csv import pickle from locations import LocationGraph #python build_airlinenet.py [quarter.csv] [airports.p] [quarter_net.csv] [nodes.out] airports = pickle.load(open(sys.argv[2], 'rb')) writer = csv.writer(open(sys.argv[3], 'wb'), delimiter=',') out = open(sys.argv[4], 'wb') epsilon = 1 airnet = LocationGraph() for l in fileinput.input(sys.argv[1]): line = l.replace('"', '').split(',') if line[2] in airports and line[5] in airports: airnet.add_edge(line[2], line[5], float(line[7])) for a in airports: if a not in airnet.nodes(): airnet.add_node(a) airnet.make_connected(epsilon) T = airnet.transition_matrix(airports) for row in T: writer.writerow(row) for n in airnet.nodes():
#locations=pickle.load(open(sys.argv[7], 'rb')) states = pickle.load(open(sys.argv[8], 'rb')) #need to update with actual census data population = {} census = pickle.load(open(sys.argv[9], 'rb')) total_pop = 0 for p in census: total_pop += census[p] for p in census: population[p] = (census[p] / float(total_pop)) * n #print population network = LocationGraph() network.load(sys.argv[10]) print "Dictionaries loaded." params = str(beta) + "-" + str(shape) + "-" + str(scale) matrix = open(sys.argv[11]+"_"+params+".out", 'w') countID = 0 infected = {} recovered = [] def genID(): global countID countID += 1 return countID
#locations=pickle.load(open(sys.argv[7], 'rb')) states = pickle.load(open(sys.argv[5], 'rb')) #need to update with actual census data population = {} census = pickle.load(open(sys.argv[6], 'rb')) total_pop = 0 for p in census: total_pop += census[p] for p in census: population[p] = (census[p] / float(total_pop)) * n #print population network = LocationGraph() network.load(sys.argv[7]) print "Dictionaries loaded." params = str(beta) + "-" + str(shape) + "-" + str(scale) matrix = open(sys.argv[8]+"_"+params+".out", 'w') countID = 0 infected = {} latent = {} recovered = [] def genID(): global countID countID += 1 return countID
import sys import fileinput import pickle import csv from locations import LocationGraph import numpy as np #python transprob_matrix.py out/gowalla_net trans_prob.csv city_list.p network = LocationGraph() network.load(sys.argv[1]) city_list = network.nodes() size = len(city_list) T = np.zeros(shape=(size,size)) for i in range(size): city1 = city_list[i] sum_w = network.total_edge_weights(city1) for j in range(size): city2 = city_list[j] w = network.edge_weight(city1,city2) T[i,j] = w / float(sum_w) writer = csv.writer(open(sys.argv[2], 'wb'), delimiter=',') for row in T: writer.writerow(row) pickle.dump(city_list, open(sys.argv[3], 'wb'))