def I210_metrics(alphas): out = np.zeros((len(alphas), 6)) net, d, node, features = load_I210_modified() d[:, 2] = d[:, 2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, features, 3000., 100.) save_metrics(alphas, net, net2, d, features, small_capacity, \ 'data/I210_modified/test_{}.csv', 'data/I210_modified/out.csv', skiprows=1)
def I210_parametric_study(alphas): # load the network and its properties g_r, d, node, feat = load_I210_modified() # modify the costs on non routed network g_nr, small_capacity = multiply_cognitive_cost(g_r, feat, 3000., 100.) #divide the demand by 4000 to computationally optimize d[:,2] = d[:,2] / 4000. for alpha in alphas: if alpha == 0.0: print 'non-routed = 1.0, routed = 0.0' f_nr = solver_3(g_nr, d, max_iter=1000, stop=1e-3) fs=np.zeros((f_nr.shape[0],2)) fs[:,0]=f_nr elif alpha == 1.0: print 'non-routed = 0.0, routed = 1.0' f_r = solver_3(g_r, d, max_iter=1000, stop=1e-3) fs=np.zeros((f_r.shape[0],2)) fs[:,1]=f_r else: print 'non-routed = {}, routed = {}'.format(1-alpha, alpha) d_nr, d_r = heterogeneous_demand(d, alpha) fs = gauss_seidel([g_nr,g_r], [d_nr,d_r], solver_3, max_iter=1000, \ stop=1e-3, stop_cycle=1e-3, q=50, past=20) np.savetxt('data/I210_modified/test_{}.csv'.format(int(alpha*100)), fs, \ delimiter=',', header='f_nr,f_r')
def I210_parametric_study(alphas): # load the network and its properties g_r, d, node, feat = load_I210_modified() # modify the costs on non routed network g_nr, small_capacity = multiply_cognitive_cost(g_r, feat, 3000., 100.) #divide the demand by 4000 to computationally optimize d[:, 2] = d[:, 2] / 4000. for alpha in alphas: if alpha == 0.0: print 'non-routed = 1.0, routed = 0.0' f_nr = solver_3(g_nr, d, max_iter=1000, stop=1e-3) fs = np.zeros((f_nr.shape[0], 2)) fs[:, 0] = f_nr elif alpha == 1.0: print 'non-routed = 0.0, routed = 1.0' f_r = solver_3(g_r, d, max_iter=1000, stop=1e-3) fs = np.zeros((f_r.shape[0], 2)) fs[:, 1] = f_r else: print 'non-routed = {}, routed = {}'.format(1 - alpha, alpha) d_nr, d_r = heterogeneous_demand(d, alpha) fs = gauss_seidel([g_nr,g_r], [d_nr,d_r], solver_3, max_iter=1000, \ stop=1e-3, stop_cycle=1e-3, q=50, past=20) np.savetxt('data/I210_modified/test_{}.csv'.format(int(alpha*100)), fs, \ delimiter=',', header='f_nr,f_r')
def I210_metrics(alphas): out = np.zeros((len(alphas),6)) net, d, node, features = load_I210_modified() d[:,2] = d[:,2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, features, 3000., 100.) save_metrics(alphas, net, net2, d, features, small_capacity, \ 'data/I210_modified/test_{}.csv', 'data/I210_modified/out.csv', skiprows=1)
def I210_metrics(alphas): net, d, node, features = load_I210() d[:, 2] = d[:, 2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, features, 3000., 100.) save_metrics(alphas, net, net2, d, features, small_capacity, 'data/I210_attack/test_{}.csv', 'data/I210_attack/out.csv', skiprows=1)
def parametric_study(alphas, g, d, node, geometry, thres, cog_cost, output, \ stop=1e-2, stop_cycle=1e-2): g_nr, small_capacity = multiply_cognitive_cost(g, geometry, thres, cog_cost) if (type(alphas) is float) or (type(alphas) is int): alphas = [alphas] for alpha in alphas: #special case where in fact homogeneous game if alpha == 0.0: print 'non-routed = 1.0, routed = 0.0' f_nr = solver_3(g_nr, d, max_iter=1000, display=1, stop=stop) fs=np.zeros((f_nr.shape[0],2)) fs[:,0]=f_nr elif alpha == 1.0: print 'non-routed = 0.0, routed = 1.0' f_r = solver_3(g, d, max_iter=1000, display=1, stop=stop) fs=np.zeros((f_r.shape[0],2)) fs[:,1]=f_r #run solver else: print 'non-routed = {}, routed = {}'.format(1-alpha, alpha) d_nr, d_r = heterogeneous_demand(d, alpha) fs = gauss_seidel([g_nr,g], [d_nr,d_r], solver_3, max_iter=1000, \ display=1, stop=stop, stop_cycle=stop_cycle, q=50, past=20) np.savetxt(output.format(int(alpha*100)), fs, \ delimiter=',', header='f_nr,f_r')
def LA_metrics(alphas, input, output): net, d, node, features = load_LA_2() d[:,2] = d[:,2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) save_metrics(alphas, net, net2, d, features, small_capacity, input, \ output, skiprows=1, \ length_unit='Meter', time_unit='Second')
def LA_local_non_routed_costs(alphas, input, output): net, demand, node, features = load_LA_3() net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) net_local = np.copy(net) for row in range(net.shape[0]): if small_capacity[row] == 0.0: net_local[row,3:] = net_local[row,3:] * 0. OD_non_routed_costs(alphas, net_local, net2, demand, input, output, verbose=1)
def LA_metrics(alphas, input, output): net, d, node, features = load_LA_3() # import pdb; pdb.set_trace() d[:, 2] = d[:, 2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) save_metrics(alphas, net, net2, d, features, small_capacity, input, \ output, skiprows=1, \ length_unit='Meter', time_unit='Second')
def LA_metrics_attack(alphas, input, output, beta): net, d, node, features = load_LA_4() # import pdb; pdb.set_trace() d[:,2] = d[:,2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, features,beta, 1000., 3000.) save_metrics(alphas, net, net2, d, features, small_capacity, input, \ output, skiprows=1, \ length_unit='Meter', time_unit='Second')
def LA_ue_K(factors, thres, cog_cost, output): ''' parametric study for computing equilibrium flows with different demand factors and cognitive cost ''' net, demand, node, geom = load_LA_3() demand[:,2] = demand[:,2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, geom, thres, cog_cost) single_class_parametric_study(factors, output, net2, demand)
def LA_ue_K(factors, thres, cog_cost, output): ''' parametric study for computing equilibrium flows with different demand factors and cognitive cost ''' net, demand, node, geom = load_LA_3() demand[:, 2] = demand[:, 2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, geom, thres, cog_cost) single_class_parametric_study(factors, output, net2, demand)
def chicago_metrics(alphas): ''' study the test_*.csv files generated by chicago_parametric_study() in particular, display the average costs for each type of users ''' net, d, node, features = load_chicago() d[:,2] = d[:,2] / 2000. # technically, it's 2*demand/4000 net2, small_capacity = multiply_cognitive_cost(net, features, 2000., 1000.) save_metrics(alphas, net, net2, d, features, small_capacity, \ 'data/chicago/test_{}.csv', 'data/chicago/out.csv', skiprows=1)
def chicago_metrics(alphas): """ study the test_*.csv files generated by chicago_parametric_study() in particular, display the average costs for each type of users """ net, d, node, features = load_chicago() d[:, 2] = d[:, 2] / 2000.0 # technically, it's 2*demand/4000 net2, small_capacity = multiply_cognitive_cost(net, features, 2000.0, 1000.0) save_metrics( alphas, net, net2, d, features, small_capacity, "data/chicago/test_{}.csv", "data/chicago/out.csv", skiprows=1 )
def LA_OD_free_flow_costs(thres, cog_costs, output, verbose=0): ''' computes OD costs (free-flow travel times) for non-routed users under different levels of cognitive costs for links with capacity under thres ''' net, demand, node, geom = load_LA_3() costs = [] for K in cog_costs: net2, small_capacity = multiply_cognitive_cost(net, geom, thres, K) costs.append(net2[:,3]) free_flow_OD_costs(net, costs, demand, output, verbose)
def LA_OD_free_flow_costs(thres, cog_costs, output, verbose=0): ''' computes OD costs (free-flow travel times) for non-routed users under different levels of cognitive costs for links with capacity under thres ''' net, demand, node, geom = load_LA_3() costs = [] for K in cog_costs: net2, small_capacity = multiply_cognitive_cost(net, geom, thres, K) costs.append(net2[:, 3]) free_flow_OD_costs(net, costs, demand, output, verbose)
def LA_free_flow_costs(thres, cog_costs): ''' study aiming at comparing the OD costs of all-or-nothing assignment between costs = travel times, and costs with multiplicative cognitive costs ''' net, demand, node, geom = load_LA_2() g = construct_igraph(net) g2 = construct_igraph(net) od = construct_od(demand) print np.array(g.es["weight"]).dot(all_or_nothing(g, od))/ (np.sum(demand[:,2])*60.) for K in cog_costs: net2, small_capacity = multiply_cognitive_cost(net, geom, thres, K) g2.es["weight"] = net2[:,3] print np.array(g.es["weight"]).dot(all_or_nothing(g2, od))/ (np.sum(demand[:,2])*60.)
def LA_local_non_routed_costs(alphas, input, output): net, demand, node, features = load_LA_3() net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) net_local = np.copy(net) for row in range(net.shape[0]): if small_capacity[row] == 0.0: net_local[row, 3:] = net_local[row, 3:] * 0. OD_non_routed_costs(alphas, net_local, net2, demand, input, output, verbose=1)
def total_link_flows(alphas, input, output): net, demand, node, features = load_LA_2() net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) links = net.shape[0] n_alpha = len(alphas) out = np.zeros((links, 7+n_alpha)) out[:,:3] = net[:,:3] out[:,3:6] = features out[:,6] = small_capacity col_alphas = ','.join(['X'+str(int(alpha*100)) for alpha in alphas]) columns = 'link_id,from,to,capacity,length,fftt,local,' + col_alphas for i,alpha in enumerate(alphas): fs = np.loadtxt(input.format(int(alpha*100)), delimiter=',', skiprows=1) out[:,i+7] = np.sum(fs,1) np.savetxt(output, out, delimiter=',', header=columns, comments='')
def LA_free_flow_costs(thres, cog_costs): ''' study aiming at comparing the OD costs of all-or-nothing assignment between costs = travel times, and costs with multiplicative cognitive costs ''' net, demand, node, geom = load_LA_2() g = construct_igraph(net) g2 = construct_igraph(net) od = construct_od(demand) print np.array(g.es["weight"]).dot(all_or_nothing( g, od)) / (np.sum(demand[:, 2]) * 60.) for K in cog_costs: net2, small_capacity = multiply_cognitive_cost(net, geom, thres, K) g2.es["weight"] = net2[:, 3] print np.array(g.es["weight"]).dot(all_or_nothing( g2, od)) / (np.sum(demand[:, 2]) * 60.)
def save_metrics_beta_LA(alphas, betas, thres, input, output, skiprows=0, \ length_unit='Mile', time_unit='Minute'): out = np.zeros((len(alphas) * len(betas), 13)) for beta in betas: net, d, node, features = LA_metrics_attacks_all(beta, thres) net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) subset = small_capacity a = 0 if alphas[0] == 0.0: alpha = 0.0 print 'compute for nr = {}, r = {}'.format(1 - alphas[0], alphas[0]) fs = np.loadtxt(input.format(int(alpha*100),int(beta*100)), delimiter=',', \ skiprows=skiprows) f = np.sum(fs, axis=1) compute_metrics_beta(0.0, beta, f, net, d, features, subset, out, 0, \ length_unit=length_unit, time_unit=time_unit) a = 1 b = 1 if alphas[-1] == 1.0 else 0 for i, alpha in enumerate(alphas[a:len(alphas) - b]): print 'compute for nr = {}, r = {}'.format(1 - alpha, alpha) fs = np.loadtxt(input.format(int(alpha*100),int(beta*100)), delimiter=',', \ skiprows=skiprows) f = np.sum(fs, axis=1) compute_metrics_beta(alpha, beta, f, net, d, features, subset, out, i+a, fs=fs, \ length_unit=length_unit, time_unit=time_unit) if alphas[-1] == 1.0: alpha = 1.0 print 'compute for nr = {}, r = {}'.format(1 - alphas[-1], alphas[-1]) fs = np.loadtxt(input.format(int(alpha*100),int(beta*100)), delimiter=',', \ skiprows=skiprows) f = np.sum(fs, axis=1) compute_metrics_beta(1.0, beta, f, net, d, features, subset, out, -1, net2=net2, \ length_unit=length_unit, time_unit=time_unit) colnames = 'ratio_routed,beta,tt_non_routed,tt_routed,tt,tt_local,tt_non_local,gas,gas_local,gas_non_local,' colnames = colnames + 'vmt,vmt_local,vmt_non_local' np.savetxt(output, out, delimiter=',', \ header=colnames, \ comments='')
def main(): for alpha in [.75]: # for alpha in np.linspace(0, .49, 50): # for alpha in np.linspace(.5, .99, 50): print "ALPHA:", alpha start_time2 = timeit.default_timer() graph = np.loadtxt('data/LA_net.csv', delimiter=',', skiprows=1) demand = np.loadtxt('data/LA_od_2.csv', delimiter=',', skiprows=1) graph[10787, -1] = graph[10787, -1] / (1.5**4) graph[3348, -1] = graph[3348, -1] / (1.2**4) node = np.loadtxt('data/LA_node.csv', delimiter=',') features = extract_features('data/LA_net.txt') # graph = np.loadtxt('data/Chicago_net.csv', delimiter=',', skiprows=1) # demand = np.loadtxt('data/Chicago_od.csv', delimiter=',', skiprows=1) # node = np.loadtxt('data/Chicago_node.csv', delimiter=',', skiprows=1) # features = extract_features('data/ChicagoSketch_net.txt') # features = table in the format [[capacity, length, FreeFlowTime]] # alpha = .2 # also known as r thres = 1000. cog_cost = 3000. demand[:, 2] = 0.5 * demand[:, 2] / 4000 g_nr, small_capacity = multiply_cognitive_cost(graph, features, thres, cog_cost) d_nr, d_r = heterogeneous_demand(demand, alpha) fs, hs, n_d = fw_heterogeneous_1([graph, g_nr], [d_r, d_nr], alpha, max_iter=30, display=1) print n_d output = {'f': fs, 'h': hs, 'n_d': n_d} with open('graph_stuff/LA_net_od_2_alpha_{}.txt'.format(alpha), 'w') as outfile: # with open('graph_stuff/Chicago_net_od_2_alpha_{}.txt'.format(alpha), 'w') as outfile: outfile.write(pickle.dumps(output)) #end of timer elapsed2 = timeit.default_timer() - start_time2 print("Execution took %s seconds" % elapsed2)
def save_metrics_beta_LA(alphas, betas, thres, input, output, skiprows=0, \ length_unit='Mile', time_unit='Minute'): out = np.zeros((len(alphas)*len(betas),13)) for beta in betas: net, d, node, features = LA_metrics_attacks_all(beta, thres) net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) subset = small_capacity a = 0 if alphas[0] == 0.0: alpha = 0.0 print 'compute for nr = {}, r = {}'.format(1-alphas[0], alphas[0]) fs = np.loadtxt(input.format(int(alpha*100),int(beta*100)), delimiter=',', \ skiprows=skiprows) f = np.sum(fs, axis=1) compute_metrics_beta(0.0, beta, f, net, d, features, subset, out, 0, \ length_unit=length_unit, time_unit=time_unit) a = 1 b = 1 if alphas[-1] == 1.0 else 0 for i,alpha in enumerate(alphas[a:len(alphas)-b]): print 'compute for nr = {}, r = {}'.format(1-alpha, alpha) fs = np.loadtxt(input.format(int(alpha*100),int(beta*100)), delimiter=',', \ skiprows=skiprows) f = np.sum(fs, axis=1) compute_metrics_beta(alpha, beta, f, net, d, features, subset, out, i+a, fs=fs, \ length_unit=length_unit, time_unit=time_unit) if alphas[-1] == 1.0: alpha = 1.0 print 'compute for nr = {}, r = {}'.format(1-alphas[-1], alphas[-1]) fs = np.loadtxt(input.format(int(alpha*100),int(beta*100)), delimiter=',', \ skiprows=skiprows) f = np.sum(fs, axis=1) compute_metrics_beta(1.0, beta, f, net, d, features, subset, out, -1, net2=net2, \ length_unit=length_unit, time_unit=time_unit) colnames = 'ratio_routed,beta,tt_non_routed,tt_routed,tt,tt_local,tt_non_local,gas,gas_local,gas_non_local,' colnames = colnames + 'vmt,vmt_local,vmt_non_local' np.savetxt(output, out, delimiter=',', \ header=colnames, \ comments='')
def total_link_flows(alphas, input, output): ''' output numpy array with total link flows (non-routed + routed) of the form: link_id,from,to,capacity,length,fftt,local,X0,...,X100 ''' net, demand, node, features = load_LA_2() net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) links = net.shape[0] n_alpha = len(alphas) out = np.zeros((links, 7 + n_alpha)) out[:, :3] = net[:, :3] out[:, 3:6] = features out[:, 6] = small_capacity col_alphas = ','.join(['X' + str(int(alpha * 100)) for alpha in alphas]) columns = 'link_id,from,to,capacity,length,fftt,local,' + col_alphas for i, alpha in enumerate(alphas): fs = np.loadtxt(input.format(int(alpha * 100)), delimiter=',', skiprows=1) out[:, i + 7] = np.sum(fs, 1) np.savetxt(output, out, delimiter=',', header=columns, comments='')
def I210_metrics(alphas): net, d, node, features = load_I210() d[:,2] = d[:,2] / 4000. net2, small_capacity = multiply_cognitive_cost(net, features, 3000., 100.) save_metrics(alphas, net, net2, d, features, small_capacity, \ 'data/I210_attack/test_{}.csv', 'data/I210_attack/out.csv', skiprows=1)
def LA_non_routed_costs(alphas, input, output): net, demand, node, features = load_LA_3() net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) OD_non_routed_costs(alphas, net, net2, demand, input, output, verbose=1)
node = np.loadtxt('data/LA_node.csv', delimiter=',') features = extract_features('data/LA_net.txt') # graph = np.loadtxt('data/Chicago_net.csv', delimiter=',', skiprows=1) # demand = np.loadtxt('data/Chicago_od.csv', delimiter=',', skiprows=1) # node = np.loadtxt('data/Chicago_node.csv', delimiter=',', skiprows=1) # features = extract_features('data/ChicagoSketch_net.txt') # features = table in the format [[capacity, length, FreeFlowTime]] # alpha = .2 # also known as r thres = 1000. cog_cost = 3000. demand[:, 2] = 0.5 * demand[:, 2] / 4000 g_nr, small_capacity = multiply_cognitive_cost(graph, features, thres, cog_cost) d_nr, d_r = heterogeneous_demand(demand, alpha) fs, hs, n_d = fw_heterogeneous_1([graph, g_nr], [d_r, d_nr], alpha, max_iter=30, display=1) # print n_d output = {'f': fs, 'h': hs, 'n_d': n_d} with open('graph_stuff/LA_net_od_2_alpha_{}.txt'.format(alpha), 'w') as outfile: # with open('graph_stuff/Chicago_net_od_2_alpha_{}.txt'.format(alpha), 'w') as outfile: outfile.write(pickle.dumps(output)) #end of timer
def chicago_non_routed_costs(alphas): net, demand, node, features = load_chicago() net2, small_capacity = multiply_cognitive_cost(net, features, 2000.0, 1000.0) OD_non_routed_costs(alphas, net, net2, demand, "data/chicago/test_{}.csv", "data/chicago/non_routed_costs.csv")
def chicago_non_routed_costs(alphas): net, demand, node, features = load_chicago() net2, small_capacity = multiply_cognitive_cost(net, features, 2000., 1000.) OD_non_routed_costs(alphas, net, net2, demand, 'data/chicago/test_{}.csv', 'data/chicago/non_routed_costs.csv')
def LA_metrics_attack_2(alphas, input, output, thres, beta): net, d, node, features = LA_metrics_attacks_all(beta, thres) net2, small_capacity = multiply_cognitive_cost(net, features, 1000., 3000.) save_metrics(alphas, net, net2, d, features, small_capacity, input, \ output, skiprows=1, \ length_unit='Meter', time_unit='Second')