Example #1
0
def run_simple_iteration(G, ground_motion, demand, multi):
  #G is a graph, demand is a dictionary keyed by source and target of demand per weekday. multi is a boolean that is true if it is a multigraph (can have two parallel edges between nodes)
  #change edge properties
  newG, capacities = damage_network(G, ground_motion, multi) #also returns the number of bridges out
  num_out = sum(x < 100 for x in capacities)
#  util.write_list(time.strftime("%Y%m%d")+'_bridges_scen_1.txt', capacities)   
  #get max flow
  start = time.time()
  #node 5753 is in superdistrict 12, which is santa clara county, and node 3144 is in superdistrict 18, which is alameda county. roughly these are san jose and oakland
  #node 7619 is in superdistrict 1 (7493 is also), which is sf, and node node 3144 is in superdistrict 18, which is alameda county. roughly these are san francisco and oakland
  s = '5753'
  t = '7493' #2702 
  flow = nx.max_flow(newG, s, t, capacity='capacity') #not supported by multigraph
  print 'time to get max flow: ', time.time() - start
#  flow = -1 
  #get ave. shortest path
#  start = time.time()
  sp_dict = nx.single_source_dijkstra_path_length(newG,'7619',weight='distance')
  sp = sum(sp_dict.values())/float(len(sp_dict.values()))
  sp2 = 0
  for target in demand.keys():
    sp2 += sp_dict[target]
  sp2 = sp2 / float(len(demand.keys()))
#  print 'time to get shortest path: ', time.time() - start
  newG = util.clean_up_graph(newG, multi)
  return (num_out, flow, sp, sp2) 
Example #2
0
def compute_road_performance(G, damaged_bridges_internal, demand, no_damage_travel_time, no_damage_vmt, no_damage_flow, no_damage_shortest_path, master_dict_ret, index):
	'''computes network performance after damaging the network based on which bridges are damaged'''
	start_time = time.time()

	if G == None:
		G = get_graph()
	#figure out road network damage
	if len(damaged_bridges_internal) > 0:
		#print len(damaged_bridges_internal)
		print 'ok'
		print damaged_bridges_internal

		G, road_bridges_out = damage_highway_network(damaged_bridges_internal, G, master_dict_ret, index)

		#figure out impact (performance metrics)
		flow, shortest_paths, travel_time, vmt = measure_performance(G, demand)
		G = util.clean_up_graph(G) #absolutely critical. otherwise, damage from scenario gets added to damage from previous scenarios!
	else: #no bridges are damaged, so no need to do all the calculations
		flow = no_damage_flow
		shortest_paths = no_damage_shortest_path
		travel_time = no_damage_travel_time
		vmt = no_damage_vmt
		road_bridges_out = 0

	print 'total network performance calculation time: ', time.time() - start_time
	return index,  road_bridges_out, flow, shortest_paths, travel_time, vmt
def compute_performance(scenario, G, index, demand, no_damage_travel_time, no_damage_vmt):
  import time
  import pickle
  import pdb
  import networkx 
  import util
  from travel_main_simple_simplev3 import damage_bridges
  from travel_main_simple_simplev3 import measure_performance
  from travel_main_simple_simplev3 import damage_network
  from travel_main_simple_simplev3 import get_graph
  start_time = time.time()

  if G == None:
    G = get_graph()

  #figure out bridge damage for each scenario
  damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges = damage_bridges(scenario) #e.g., [1, 89, 598] #num_bridges_out is highway bridges only

  # #figure out network damage and output Cube files to this effect
  G = damage_network(damaged_bridges_internal, damaged_bridges_new, G, time.strftime("%Y%m%d")+'_filesForCube/', index)

  # #figure out impact (performance metrics)
  flow, shortest_paths, travel_time, vmt = measure_performance(G, num_damaged_bridges, demand, no_damage_travel_time, no_damage_vmt)
  G = util.clean_up_graph(G)
  print 'total scenario time: ', time.time() - start_time
  return damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges, flow, shortest_paths, travel_time, vmt
def main():
  '''can change the number of epsilons below'''
  seed(0) #set seed
  simple = False  #simple is just %bridges out, which is computationally efficient
  number_of_highway_bridges = 1743
  numeps = 3 #the number of epsilons
  tol = 0.00001 #the minimum annual rate that you care about in the original event set (the weight now is the original annual rate / number of epsilons per event)
  demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids.csv') #we just take a percentage in ita.py, namely  #to get morning flows, take 5.3% of daily driver values. 11.5/(4.5*6+11.5*10+14*4+4.5*4) from Figure S10 of http://www.nature.com/srep/2012/121220/srep01001/extref/srep01001-s1.pdf
  #figure out ground motions
  lnsas, weights = ground_motions(numeps, tol, '/Users/mahalia/Documents/matlab/Research/Herbst2011/output_data/SF2_mtc_total_3909scenarios_1743bridgesPlusBART_3eps.txt')
  bart_dict = transit_to_damage.make_bart_dict()
  muni_dict = transit_to_damage.make_muni_dict()
  set_main_path('/Users/mahaliamiller/Desktop/trn/transit_lines/', None) #TODO: need to change THREE file paths (these plus bart)

  print 'the number of ground motion events we are considering: ', len(lnsas)
  index = 0
  bridge_array = []
  travel_index_times = []

  # G = nx.read_gpickle("input/graphMTC_noCentroidsLength15.gpickle")
  G = nx.read_gpickle("input/graphMTC_CentroidsLength6.gpickle")
   # Directed! only one edge between nodes
  G = nx.freeze(G) #prevents edges or nodes to be added or deleted
  print 'am I a multi graph? ', G.is_multigraph()
  no_damage_travel_time, no_damage_vmt = compute_tt_vmt(G, demand)
  if not os.path.isdir(time.strftime("%Y%m%d")+'_filesForCube/'):
    os.mkdir(time.strftime("%Y%m%d")+'_filesForCube/')
  if not os.path.isdir(time.strftime("%Y%m%d")+'_filesForCube/transit/'):
    os.mkdir(time.strftime("%Y%m%d")+'_filesForCube/transit/')
  if not os.path.isdir(time.strftime("%Y%m%d")+'_filesForCube/modCapacities/'):
    os.mkdir(time.strftime("%Y%m%d")+'_filesForCube/modCapacities/')

  for scenario in lnsas:
    print index
    #figure out bridge damage for each scenario
    damaged_bridges, num_bridges_out = damage_bridges(scenario) #e.g., [1, 89, 598] #num_bridges_out is highway bridges only
    bridge_array.append(damaged_bridges)

    #figure out network damage and output Cube files to this effect
    G = damage_network(damaged_bridges, G, time.strftime("%Y%m%d")+'_filesForCube/', index)

    #figure out impact (performance metrics)
    flow, shortest_paths, travel_time, vmt = measure_performance(G, damaged_bridges, demand, no_damage_travel_time, no_damage_vmt)
    travel_index_times.append((index, num_bridges_out, flow, shortest_paths, travel_time, vmt, num_bridges_out/float(number_of_highway_bridges)))
    G = util.clean_up_graph(G)
    index +=1

    # if index%3909 == 0:
    if index%100 == 0:
      save_results(bridge_array, travel_index_times, int(index/float(3909)))

  test(numeps, lnsas, damaged_bridges, damaged_graph, num_bridges_out, flow, shortest_paths, travel_time, vmt)
Example #5
0
def run_iteration(G, ground_motion, demand):
  #change edge properties
  newG = damage_network(G, ground_motion)

  #call ita
  start = time.time()
#  print 'starting iterative travel assignment'
  it = ita.ITA(G,demand)
  newG = it.assign()
  print 'time to assign: ', time.time()-start
#  for n,nbrsdict in newG.adjacency_iter():
#    for nbr,keydict in nbrsdict.items():
#      for key,eattr in keydict.items():
#        if eattr['flow']>0:
#          print (n, nbr, eattr['flow'])
  travel_time = util.find_travel_time(newG)
  vmt = util.find_vmt(G)
#  print 'travel time: ', travel_time
#  print 'vmt: ', util.find_vmt(G) #in the undamaged case, this should be around 172 million (http://www.mtc.ca.gov/maps_and_data/datamart/stats/vmt.htm)
  newG = util.clean_up_graph(newG)
  return (travel_time, vmt)
Example #6
0
def run_iteration(G, ground_motion, demand, damagedG, clean_up=True):
  '''this function runs iterative traffic assignment to find the vehicle miles traveled (VMT) and the travel time'''
  if damagedG == None:
    print 'damaging network'
    damagedG = damage_network(G, ground_motion)
  travel_time = -1
  vmt = -1
  #call ita
 #  start = time.time()
 #  it = ita.ITA(damagedG,demand)
 #  newG = it.assign()
 #  print 'time to assign: ', time.time()-start
 # # for n,nbrsdict in newG.adjacency_iter():
 # #   for nbr,keydict in nbrsdict.items():
 # #     for key,eattr in keydict.items():
 # #       if eattr['flow']>0:
 # #         print (n, nbr, eattr['flow'])
 #  travel_time = util.find_travel_time(damagedG) #this should be a little less than 252850.3941hours or **910,261,418.76seconds. **
 #  vmt = util.find_vmt(damagedG) #in the undamaged case, this should be around 172 million (http://www.mtc.ca.gov/maps_and_data/datamart/stats/vmt.htm) over the course of a day, so divide by 0.053 (see demand note in main). So, that's **8-9 million vehicle-miles**
  if clean_up == True:
    damagedG= util.clean_up_graph(damagedG)
  return (travel_time, vmt)
Example #7
0
def run_simple_iteration(G, ground_motion, demand, multi, j, targets, clean_up = True):
  #G is a graph (not a multigraph!), demand is a dictionary keyed by source and target of demand per weekday. multi is a boolean that is true if it is a multigraph (can have two parallel edges between nodes)
  #change edge properties
  newG, capacities = damage_network(G, ground_motion, multi) #also returns the number of bridges out
  num_out = sum(x < 100 for x in capacities)
  update_bridge_damage_dataset(capacities)
  if j in targets:
    affected_bridges = []
    for i in range(len(capacities)):
      if capacities[i] < 100:
        if (i+1) not in SPECIALLY_RETROFITTED_BRIDGES:
          affected_bridges.append(str(i+1))
    util.write_list('20130902_modifyingCapacity/' + time.strftime("%Y%m%d")+'_modifyingCapacitytab' + str(j) + '.txt', affected_bridges) 
  #get max flow
  start = time.time()
  #node 5753 is in superdistrict 12, which is santa clara county, and node 3144 is in superdistrict 18, which is alameda county. roughly these are san jose and oakland
  #node 7619 is in superdistrict 1 (7493 is also), which is sf, and node node 3144 is in superdistrict 18, which is alameda county. roughly these are san francisco and oakland
  s = '3144'
  t = '7493' #2702 
  try:
    flow = nx.max_flow(newG, s, t, capacity='capacity') #not supported by multigraph
  except nx.exception.NetworkXError as e:
    print 'found an ERROR: ', e
    flow = -1
    print s in newG
    print t in newG
    print len(newG.nodes())
    print len(newG.edges())
  # sp_dict = nx.single_source_dijkstra_path_length(newG,'7493',weight='distance')
  # sp = sum(sp_dict.values())/float(len(sp_dict.values()))
  # sp2 = 0
  # for target in demand.keys():
  #   sp2 += sp_dict[target]
  # sp2 = sp2 / float(len(demand.keys()))
  sp = 0
  sp2 = 0
  if clean_up == True:
    damagedG= util.clean_up_graph(newG)
  return (num_out, flow, sp, sp2, newG) 
Example #8
0
def main():
	'''this is the main file that runs from ground-motion intensity map to network performance measure. You will  need to adjust various things below, such as the ground motion files, performance measure info and more. you should not need to change, however, the functions that they call'''
	seed_num = 0 #USER ADJUSTS THIS! other value examples: 1,2, 11, 14, ...
	random.seed(seed_num) #set random number generator seed so we can repeat this process

	#################################################################
	################## ground-motion intensity map data #######################
	#load the earthquake info
	#just for demonstration, this does ONLY THREE ground-motion intensity maps
	#sa_matrix = util.read_2dlist('input/sample_ground_motion_intensity_map_JUST_THREE.txt',delimiter='\t')
	#this does approx. 2000 ground-motion intensity maps. These are hazard consistent.
	#sa_matrix = util.read_2dlist('input/sample_ground_motion_intensity_maps_road_only_filtered.txt',delimiter='\t')
	#GB: this does 25 hazard-consistent maps
	sa_matrix = util.read_2dlist('input/subset_maps_25.txt', delimiter='\t')
	lnsas = []
	magnitudes = []
	for row in sa_matrix:
		print row[4:]
		lnsas.append([log(float(sa)) for sa in row[4:]])
		magnitudes.append(float(row[2]))
	print 'You are considering %d ground-motion intensity maps.' % int(len(lnsas))
	print 'You are considering %d different site locations.' % int(len(lnsas[0]))

	################## component (bridge) damage map data #######################
	sets = 1 # number of bridge damage maps per ground-motion intensity map. USER ADJUSTS THIS! other value examples: 3,9,18
	targets = range(0, len(lnsas)*sets) #define the damage map IDs you want to consider. Note: this currently does not require modification. Just change the number of sets above.
	print 'You are considering %d different damage maps (%d per ground-motion intensity map).' % (int(len(targets)), int(sets))
	#first load the all-purpose dictionary linking info about the bridges
	#with open('input/20140114_master_bridge_dict.pkl','rb') as f:
	with open('input/master_bridge_dict_ret.pkl','rb') as f:
		master_dict_ret = pickle.load(f) #has 1743 keys. One per highway bridge. (NOT BART)
		'''
		dict where the keyranges from 1 to 1889 and then the value is another dictionary with the following keys:
		loren_row_number: the row number in the original table that has info on all CA bridges (where the header line is row 0)
		original_id: the original id (1-1889)
		new_id: the new id that excludes filtered out bridges (1-1743). Bridges are filtered out if a.) no seismic capacity data AND non-transbay bridge or b.) not located by Jessica (no edge list). this id is the new value that is the column number for the lnsa simulations.
		jessica_id: the id number jessica used. it's also the number in arcgis.
		a_b_pairs_direct: list of (a,b) tuples that would be directly impacted by bridge damage (bridge is carrying these roads)
		a_b_pairs_indirect: ditto but roads under the indirectly impacted bridges
		edge_ids_direct: edge object IDS for edges that would be directly impacted by bridge damage
		edge_ids_indirect: ditto but roads under the indirectly impacted bridges
		mod_lnSa: median Sa for the moderate damage state. the dispersion (beta) for the lognormal distribution is 0.6. (See hazus/mceer method)
		ext_lnSa: median Sa for the extensive damage state. the dispersion (beta) for the lognormal distribution is 0.6. (See hazus/mceer method)
		com_lnSa: median Sa for the complete damage state. the dispersion (beta) for the lognormal distribution is 0.6. (See hazus/mceer method)
		'''
	num_of_interest_bridges = len(master_dict_ret)
	num_of_total_bridges = len(master_dict_ret)

	# network damage map data
	G = get_graph()
	assert G.is_multigraph() == False, 'You want a directed graph without multiple edges between nodes'

	################## network performance map data #######################
	#compute what the travel time and vehicle-miles-traveled values are without any damage
	demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids_dummies.csv') #we just take a percentage in ita.py, namely  #to get morning flows, take 5.3% of daily driver values. 11.5/(4.5*6+11.5*10+14*4+4.5*4) from Figure S10 of http://www.nature.com/srep/2012/121220/srep01001/extref/srep01001-s1.pdf. Note: these are vehicle-driver trips only (not transit, biking, walking, etc.)
	#pre-compute the network performance measures when there is no damage to save time later
	no_damage_travel_time, no_damage_vmt = compute_tt_vmt(G, demand)
	no_damage_flow = compute_flow(G)
	no_damage_shortest_path = -1
	G = util.clean_up_graph(G) #so the trips assigned don't hang around

	# GB ADDITION
	print no_damage_travel_time
	print no_damage_vmt

	#################################################################
	################## actually run damage map creation #######################
	ppservers = ()    #starting a super cool parallelization
	# Creates jobserver with automatically detected number of workers
	job_server = pp.Server(ppservers=ppservers)
	print "Starting pp with", job_server.get_ncpus(), "workers"
	# set up jobs
	jobs = []
	for i in targets:
		jobs.append(job_server.submit(compute_damage, (lnsas[i%len(lnsas)], master_dict_ret, targets[i], ), modules = ('random', 'math', ), depfuncs = (damage_bridges, )))

	# get the results that have already run
	bridge_array_new = []
	bridge_array_internal = []
	indices_array = [] # GB: stores index of damage map being considered (or GM intensity map? unclear)
	bridge_array_hwy_num = [] # GB:

	for job in jobs:
		(index, damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges_road) = job()
		bridge_array_internal.append(damaged_bridges_internal)
		bridge_array_new.append(damaged_bridges_new)
		indices_array.append(index)
		bridge_array_hwy_num.append(num_damaged_bridges_road)
	save_results_0(bridge_array_internal, bridge_array_new, int((i + 1)/float(len(lnsas))), seed_num) #save temp
	# GB ADDITION
	# print jobs
	print 'bridge array internal ='
	print bridge_array_internal
	# print bridge_array_new
	# print 'Indices array'
	# print indices_array
	# print bridge_array_hwy_num

	#
	print 'Great. You have made damage maps'
	# #################################################################
	# ################## actually run performance measure realization creation #######################
	ppservers = ()
	# Creates jobserver with automatically detected number of workers
	job_server = pp.Server(ppservers=ppservers)
	print "Starting pp with", job_server.get_ncpus(), "workers"
	# set up jobs
	jobs = []

	for i in targets:
		jobs.append(job_server.submit(compute_road_performance, (None, bridge_array_internal[i], demand, no_damage_travel_time, no_damage_vmt, no_damage_flow, no_damage_shortest_path, master_dict_ret, targets[i], ), modules = ('networkx', 'time', 'pickle', 'pdb', 'util', 'random', 'math', 'ita', ), depfuncs = (get_graph, add_superdistrict_centroids, damage_bridges, damage_highway_network, measure_performance, compute_flow, compute_shortest_paths, compute_tt_vmt, ))) # functions, modules

	# get the results that have already run and save them
	travel_index_times = []
	#print jobs
	i = 0
	for job in jobs:
		(index,  road_bridges_out, flow, shortest_paths, travel_time, vmt) = job()

		#print indices_array[i]
		#print index
		print travel_time

		assert indices_array[i] == index, 'the damage maps should correspond to the performance measure realizations'
		assert bridge_array_hwy_num[i] == road_bridges_out, 'we should also have the same number of hwy bridges out'
		travel_index_times.append((index, road_bridges_out, flow, shortest_paths, travel_time, vmt, road_bridges_out/float(num_of_interest_bridges), len(bridge_array_new[i])/float(num_of_total_bridges), magnitudes[index%len(magnitudes)]))

		#save as you go
		if i%len(lnsas) == 0:
			save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(len(lnsas))), seed_num)
		i += 1

	#save an extra time at the very end
	save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(len(lnsas))), seed_num) #save again when totally done
	print 'Great. You have calculated network performance. Good job!'
def main():
  '''can change the number of epsilons below'''
  seed(0) #set seed 
  simple = False  #simple is just %bridges out, which is computationally efficient
  number_of_highway_bridges = 1743
  numeps = 3 #the number of epsilons
  tol = 0.00001 #the minimum annual rate that you care about in the original event set (the weight now is the original annual rate / number of epsilons per event)
  demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids_dummies.csv') #we just take a percentage in ita.py, namely  #to get morning flows, take 5.3% of daily driver values. 11.5/(4.5*6+11.5*10+14*4+4.5*4) from Figure S10 of http://www.nature.com/srep/2012/121220/srep01001/extref/srep01001-s1.pdf
  #figure out ground motions
  # lnsas, weights = ground_motions(numeps, tol, 'input/SF2_mtc_total_3909scenarios_1743bridgesPlusBART_1epsFake.txt')
  lnsas, weights, magnitudes = ground_motions(numeps, tol, 'input/SF2_mtc_total_3909scenarios_1743bridgesPlusBART_3eps.txt')
  # with open ('input/20140114_lnsas_1epsFake.pkl', 'wb') as f:
  #   pickle.dump(lnsas, f)
  with open ('input/20140114_magnitudes_3eps.pkl', 'wb') as f:
    pickle.dump(magnitudes, f)
  with open('input/20140114_lnsas_3eps.pkl','rb') as f:
    lnsas = pickle.load(f)
  # with open('input/20140114_lnsas_1epsFake.pkl','rb') as f:
  #   lnsas = pickle.load(f)
  print 'the number of ground motion events we are considering: ', len(lnsas)
  print 'first length: ', len(lnsas[0])

  bart_dict = transit_to_damage.make_bart_dict()
  muni_dict = transit_to_damage.make_muni_dict()

  bridge_array_new = []
  bridge_array_internal = []
  travel_index_times = []
  # G = nx.read_gpickle("input/graphMTC_noCentroidsLength15.gpickle")
  G = get_graph()

  print 'am I a multi graph? I really do not want to be!', G.is_multigraph() #An undirected graph class that can store multiedges. Multiedges are multiple edges between two nodes. Each edge can hold optional data or attributes.A MultiGraph holds undirected edges. Self loops are allowed.
  no_damage_travel_time, no_damage_vmt = compute_tt_vmt(G, demand)
  G = util.clean_up_graph(G)
  # make_directories(range(len(lnsas)))
  # transit_to_damage.set_main_path('input/trn/transit_lines/', 'input/trncopy/transit_lines/') #TODO: need to change THREE file paths (these plus bart)

  # run in SERIES
  #---------------------------------------------
  # targets = [0, 5000]
  # # targets = range(len(lnsas))
  # for i in targets:
  #   print i
  #   start = time.time()
  #   damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges, flow, shortest_paths, travel_time, vmt = compute_performance(lnsas[i], G, i, demand, no_damage_travel_time, no_damage_vmt)
  #   bridge_array_internal.append(damaged_bridges_internal)
  #   bridge_array_new.append(damaged_bridges_new)
  #   travel_index_times.append((i, num_damaged_bridges, flow, shortest_paths, travel_time, vmt, num_damaged_bridges/float(number_of_highway_bridges), magnitudes[i]))
  #   print 'time for one: ', time.time() - start
  #   if i%3909 == 0:
      # save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(3909)))
  
  #   # scenario = lnsas[i]
  #   # #figure out bridge damage for each scenario
  #   # damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges = damage_bridges(scenario) #e.g., [1, 89, 598] #num_bridges_out is highway bridges only
  #   # bridge_array_internal.append(damaged_bridges_internal)
  #   # bridge_array_new.append(damaged_bridges_new)

  #   # #figure out network damage and output Cube files to this effect
  #   # G = damage_network(damaged_bridges_internal, damaged_bridges_new, G, time.strftime("%Y%m%d")+'_filesForCube/', i)

  #   # #figure out impact (performance metrics)
  #   # flow, shortest_paths, travel_time, vmt = measure_performance(G, num_damaged_bridges, demand, no_damage_travel_time, no_damage_vmt)
  #   # travel_index_times.append((i, num_damaged_bridges, flow, shortest_paths, travel_time, vmt, num_damaged_bridges/float(number_of_highway_bridges), magnitudes[i]))
  #   # G = util.clean_up_graph(G)
  #   # # if i%3909 == 0:
  #   # if i%1 == 0:
  #   #   save_results(bridge_array_internal, bridge_array_new, travel_index_times, int(i/float(3909)))

  # # #---------------------------------------------

  # # # run in PARALLEL
  # # # #---------------------------------------------
  ppservers = ()    
  # Creates jobserver with automatically detected number of workers
  job_server = pp.Server(ppservers=ppservers)
  print "Starting pp with", job_server.get_ncpus(), "workers"
  start_time = time.time()
  # set up jobs
  jobs = []
  targets = range(3909, len(lnsas)) #len(lnsas)) 7818, 
  # targets = [0, 33, 5000]
  # for i in range(len(lnsas)):
  for i in targets:
    jobs.append(job_server.submit(compute_performance, (lnsas[i], None, i, demand, no_damage_travel_time, no_damage_vmt, ), modules = ('networkx', ))) # functions, modules
  # get results
  # if len(jobs) != len(lnsas):
  #   pdb.set_trace() # error checking!
  index = 0
  for job in jobs:
    (damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges, flow, shortest_paths, travel_time, vmt) = job()
    i = targets[index]
    print 'target id: ', i
    bridge_array_internal.append(damaged_bridges_internal)
    bridge_array_new.append(damaged_bridges_new)
    travel_index_times.append((i, num_damaged_bridges, flow, shortest_paths, travel_time, vmt, num_damaged_bridges/float(number_of_highway_bridges), magnitudes[i]))
    if i%3909 == 0:
      save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(3909)))
    index += 1

  # #---------------------------------------------
  save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(3909)))
Example #10
0
def main():
  #get graph info
  G = nx.MultiDiGraph()
  G.add_node(1)
  G.add_node(2)
  G.add_edge(1,2,capacity_0=1000,capacity=1000,t_0=15,t_a=15,flow=0, distance=10)
  G.add_edge(1,2,capacity_0=3000,capacity=3000,t_0=20,t_a=20,flow=0, distance=10)
  #get od info. This is in format of a dict keyed by od, like demand[sd1][sd2] = 200000.
  demand = {}
  demand[1] = {}
  demand[1][2] = 8000

  #call ita
  it = ITA(G,demand)
  newG = it.assign()
  print newG
  for n,nbrsdict in newG.adjacency_iter():
    for nbr,keydict in nbrsdict.items():
      for key,eattr in keydict.items():
        print (n, nbr, eattr['flow'])
  print 'should have flow of 3200 and 4800'

  #try another one
  G = nx.MultiDiGraph()
  G.add_node('A')
  G.add_node('B')
  G.add_node('C')
  G.add_node('D')
  G.add_node('E')
#first type
  G.add_edge('A','B',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
  G.add_edge('A','D',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
  G.add_edge('D','F',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
  G.add_edge('E','C',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
  G.add_edge('B','A',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
  G.add_edge('D','A',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
  G.add_edge('F','D',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
  G.add_edge('C','E',capacity_0=5000,capacity=1000,t_0=15,t_a=15,flow=0)
#second type
  G.add_edge('B','D',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  G.add_edge('D','B',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  G.add_edge('A','E',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  G.add_edge('E','A',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  G.add_edge('E','F',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  G.add_edge('F','E',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  G.add_edge('F','C',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  G.add_edge('C','F',capacity_0=5000,capacity=3000,t_0=20,t_a=20,flow=0)
  
  #get od info. This is in format of a dict keyed by od, like demand[sd1][sd2] = 200000.
  demand = {}
  demand['A'] = {}
  demand['C'] = {}
  demand['B'] = {}
  demand['A']['B'] =4000
  demand['A']['C'] =5000
  demand['C']['B'] =2000
  demand['B']['A'] =1000


  #call ita
  it = ITA(G,demand)
  newG = it.assign()
  print newG
  for n,nbrsdict in newG.adjacency_iter():
    for nbr,keydict in nbrsdict.items():
      for key,eattr in keydict.items():
        print (n, nbr, eattr['flow'])
  print 'and now clean up the graph'
  newG = util.clean_up_graph(newG)
  for n,nbrsdict in newG.adjacency_iter():
    for nbr,keydict in nbrsdict.items():
      for key,eattr in keydict.items():
        print (n, nbr, eattr['flow'])
        print (n, nbr, eattr['capacity'])