Esempio n. 1
0
def main():
  seed(0) #set seed
  #get graph info
  G = nx.read_gpickle("input/graphMTC_CentroidsLength6.gpickle") #noCentroidsLength15.gpickle") #does not have centroidal links. There is also the choice of a proper multidigraph: nx.read_gpickle("input/graphMTC_CentroidsLength5.gpickle")
  G = nx.freeze(G) #prevents edges or nodes to be added or deleted
  #get od info. This is in format of a dict keyed by od, like demand[sd1][sd2] = 200000.
  demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids.csv')
  #get earthquake info
  q = QuakeMaps('input/20130210_mtc_total_lnsas3.pkl', 'input/20130210_mtc_magnitudes3.pkl', 'input/20130210_mtc_faults3.pkl', 'input/20130210_mtc_weights3.pkl', 'input/20130210_mtc_scenarios3.pkl') #input/20130107_mtc_total_lnsas1.pkl', 'input/20130107_mtc_magnitudes1.pkl','input/20130107_mtc_faults1.pkl', 'input/20130107_mtc_weights1.pkl', 'input/20130107_mtc_scenarios1.pkl') #'input/20130210_mtc_total_lnsas3.pkl', 'input/20130210_mtc_magnitudes3.pkl', 'input/20130210_mtc_faults3.pkl', 'input/20130210_mtc_weights3.pkl', 'input/20130210_mtc_scenarios3.pkl') #('input/20130107_mtc_total_lnsas1.pkl', 'input/20130107_mtc_magnitudes1.pkl',  #totalfilename=None, magfilename=None, faultfilename=None, weightsfilename=None, scenariofilename=None):
  print 'weights: ', q.weights
  q.num_sites = len(q.lnsas[0])
  #determine which scenarios you want to run
  good_indices = pick_scenarios(q.lnsas, q.weights)
  
  travel_index_times = []
  index = 0
  #loop over scenarios
  print 'size of lnsas: ', len(q.lnsas)
  for scenario in q.lnsas: #each 'scenario' has 1557 values of lnsa, i.e. one per site
    if index in good_indices:
      print 'index: ', index
      (bridges, flow, path, path2) = run_simple_iteration(G, scenario, demand, False)
      travel_index_times.append((index, bridges, flow, path, path2))
#      print 'new travel times: ', travel_index_times
      if index%1000 ==0:
        util.write_2dlist(time.strftime("%Y%m%d")+'_bridges_flow_paths4.txt',travel_index_times)
    index += 1 #IMPORTANT
  util.write_2dlist(time.strftime("%Y%m%d")+'_bridges_flow_paths4.txt',travel_index_times)
  print 'the number of scenarios I considered doing: ', index
  print 'the number of scenarios I actually did: ', len(travel_index_times)
Esempio n. 2
0
def main():
  seed(0) #set seed
  #get graph info
  G = nx.read_gpickle("input/graphMTC_CentroidsLength5.gpickle") #noCentroidsLength15.gpickle") #does not have centroidal links
  print '|V| = ', len(G.nodes())
  print '|E| = ', len(G.edges())
  G = nx.freeze(G) #prevents edges or nodes to be added or deleted
  #get od info. This is in format of a dict keyed by od, like demand[sd1][sd2] = 200000.
  demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids.csv') #bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids.csv')
  #get earthquake info
  q = QuakeMaps('input/20130210_mtc_total_lnsas3.pkl', 'input/20130210_mtc_magnitudes3.pkl', 'input/20130210_mtc_faults3.pkl', 'input/20130210_mtc_weights3.pkl', 'input/20130210_mtc_scenarios3.pkl') #(input/20130107_mtc_total_lnsas1.pkl', 'input/20130107_mtc_magnitudes1.pkl', 'input/20130107_mtc_faults1.pkl', 'input/20130107_mtc_weights1.pkl', 'input/20130107_mtc_scenarios1.pkl') #totalfilename=None, magfilename=None, faultfilename=None, weightsfilename=None, scenariofilename=None): 'input/20130210_mtc_total_lnsas3.pkl', 'input/20130210_mtc_magnitudes3.pkl', 'input/20130210_mtc_faults3.pkl', 'input/20130210_mtc_weights3.pkl', 'input/20130210_mtc_scenarios3.pkl') #(


  q.num_sites = len(q.lnsas[0])
  #determine which scenarios you want to run
  good_indices = pick_scenarios(q.lnsas, q.weights)
  
  travel_index_times = []
  index = 0
  #loop over scenarios
  for scenario in q.lnsas: #each 'scenario' has 1557 values of lnsa, i.e. one per site
    if index in good_indices:
      print 'index: ', index
      (travel_time, vmt) = run_iteration(G, scenario, demand)
      travel_index_times.append((index, travel_time, vmt))
#      print 'new travel times: ', travel_index_times
      if index%100 ==0:
        util.write_2dlist(time.strftime("%Y%m%d")+'_travel_time.txt',travel_index_times)
    index += 1 #IMPORTANT
  util.write_2dlist(time.strftime("%Y%m%d")+'_travel_time.txt',travel_index_times)
def main():
  '''can change the number of epsilons below'''
  seed(0) #set seed
  simple = False  #simple is just %bridges out, which is computationally efficient
  number_of_highway_bridges = 1743
  numeps = 3 #the number of epsilons
  tol = 0.00001 #the minimum annual rate that you care about in the original event set (the weight now is the original annual rate / number of epsilons per event)
  demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids.csv') #we just take a percentage in ita.py, namely  #to get morning flows, take 5.3% of daily driver values. 11.5/(4.5*6+11.5*10+14*4+4.5*4) from Figure S10 of http://www.nature.com/srep/2012/121220/srep01001/extref/srep01001-s1.pdf
  #figure out ground motions
  lnsas, weights = ground_motions(numeps, tol, '/Users/mahalia/Documents/matlab/Research/Herbst2011/output_data/SF2_mtc_total_3909scenarios_1743bridgesPlusBART_3eps.txt')
  bart_dict = transit_to_damage.make_bart_dict()
  muni_dict = transit_to_damage.make_muni_dict()
  set_main_path('/Users/mahaliamiller/Desktop/trn/transit_lines/', None) #TODO: need to change THREE file paths (these plus bart)

  print 'the number of ground motion events we are considering: ', len(lnsas)
  index = 0
  bridge_array = []
  travel_index_times = []

  # G = nx.read_gpickle("input/graphMTC_noCentroidsLength15.gpickle")
  G = nx.read_gpickle("input/graphMTC_CentroidsLength6.gpickle")
   # Directed! only one edge between nodes
  G = nx.freeze(G) #prevents edges or nodes to be added or deleted
  print 'am I a multi graph? ', G.is_multigraph()
  no_damage_travel_time, no_damage_vmt = compute_tt_vmt(G, demand)
  if not os.path.isdir(time.strftime("%Y%m%d")+'_filesForCube/'):
    os.mkdir(time.strftime("%Y%m%d")+'_filesForCube/')
  if not os.path.isdir(time.strftime("%Y%m%d")+'_filesForCube/transit/'):
    os.mkdir(time.strftime("%Y%m%d")+'_filesForCube/transit/')
  if not os.path.isdir(time.strftime("%Y%m%d")+'_filesForCube/modCapacities/'):
    os.mkdir(time.strftime("%Y%m%d")+'_filesForCube/modCapacities/')

  for scenario in lnsas:
    print index
    #figure out bridge damage for each scenario
    damaged_bridges, num_bridges_out = damage_bridges(scenario) #e.g., [1, 89, 598] #num_bridges_out is highway bridges only
    bridge_array.append(damaged_bridges)

    #figure out network damage and output Cube files to this effect
    G = damage_network(damaged_bridges, G, time.strftime("%Y%m%d")+'_filesForCube/', index)

    #figure out impact (performance metrics)
    flow, shortest_paths, travel_time, vmt = measure_performance(G, damaged_bridges, demand, no_damage_travel_time, no_damage_vmt)
    travel_index_times.append((index, num_bridges_out, flow, shortest_paths, travel_time, vmt, num_bridges_out/float(number_of_highway_bridges)))
    G = util.clean_up_graph(G)
    index +=1

    # if index%3909 == 0:
    if index%100 == 0:
      save_results(bridge_array, travel_index_times, int(index/float(3909)))

  test(numeps, lnsas, damaged_bridges, damaged_graph, num_bridges_out, flow, shortest_paths, travel_time, vmt)
Esempio n. 4
0
def main():
  '''can change the number of epsilons below'''
  seed(0) #set seed
  simple = False #False #simple is just %bridges out, which is computationally efficient
  #get graph info
  # G = nx.read_gpickle("input/graphMTC_CentroidsLength6.gpickle") #noCentroidsLength15.gpickle") #does not have centroidal links. There is also the choice of a proper multidigraph: nx.read_gpickle("input/graphMTC_CentroidsLength5.gpickle")
  G = nx.read_gpickle("input/graphMTC_CentroidsLength6highways.gpickle") #noCentroidsLength15.gpickle") #does not have centroidal links. Directed! only one edge between nodes
  # G1 = nx.read_gpickle("input/graphMTC_CentroidsLength5.gpickle") #undirected, multiple edges. It is a little funky because it has two links between A and B and two between B and A so is that double-counting?
  # '''a multigraph: An undirected graph class that can store multiedges.
  #   Multiedges are multiple edges between two nodes.  Each edge
  #   can hold optional data or attributes.
  #   A MultiGraph holds undirected edges.  Self loops are allowed.'''
  print 'nodes: ', len(G.nodes())
  G = nx.freeze(G) #prevents edges or nodes to be added or deleted
  # G1 = nx.freeze(G1)
  #get od info. This is in format of a dict keyed by od, like demand[sd1][sd2] = 200000.
  demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids.csv') #we just take a percentage in ita.py, namely  #to get morning flows, take 5.3% of daily driver values. 11.5/(4.5*6+11.5*10+14*4+4.5*4) from Figure S10 of http://www.nature.com/srep/2012/121220/srep01001/extref/srep01001-s1.pdf
          #get path
  #get earthquake info #UPDATED May 23, 2013
  #TODO
  q = QuakeMaps('input/20130612_mtc_total_lnsas5.pkl', 'input/20130612_mtc_magnitudes5.pkl', 'input/20130612_mtc_faults5.pkl', 'input/20130612_mtc_weights5.pkl', 'input/20130612_mtc_scenarios5.pkl') #input/20130107_mtc_total_lnsas1.pkl', 'input/20130107_mtc_magnitudes1.pkl','input/20130107_mtc_faults1.pkl', 'input/20130107_mtc_weights1.pkl', 'input/20130107_mtc_scenarios1.pkl') #'input/20130210_mtc_total_lnsas3.pkl', 'input/20130210_mtc_magnitudes3.pkl', 'input/20130210_mtc_faults3.pkl', 'input/20130210_mtc_weights3.pkl', 'input/20130210_mtc_scenarios3.pkl') #('input/20130107_mtc_total_lnsas1.pkl', 'input/20130107_mtc_magnitudes1.pkl',  #totalfilename=None, magfilename=None, faultfilename=None, weightsfilename=None, scenariofilename=None):
  q.num_sites = len(q.lnsas[0])
  numeps = 5 #CAHNGE THIS CHANGE THIS!!!!!!!!
  #determine which scenarios you want to run
  good_indices = pick_scenarios(q.lnsas, q.weights,True, numeps)
  targets = good_indices #[12, 35, 55, 71, 75, 82, 86, 87, 88, 106, 108, 115, 121, 231, 241, 247, 256, 258, 260, 261, 676, 730, 733, 1231, 1548] #indices between 0 and 2110. the scenarios for which you want to save the damaged bridge data
  print 'the number of scenarios for which I want to save bridge info: ', len(targets)

  travel_index_times = []
  index = 0
  good_index = 0
  # pdb.set_trace()
  #figure out what the travel time and vmt are if no damage to any bridges
  no_damage_travel_time = -1
  no_damage_vmt = -1
  found_no_damage = False
  for scenario in q.lnsas: #each 'scenario' has 1xxx values of lnsa, i.e. one per site
    while found_no_damage == False:
      (bridges, flow, path, path2, newG) = run_simple_iteration(G, scenario, demand, False, good_index, targets, True) #since looking for no damage case, it is ok to clean up
      if bridges == 0:
        found_no_damage = True
        print 'found case with no damage so I will save those and save you work later on'
        (no_damage_travel_time, no_damage_vmt) = run_iteration(G, scenario, demand, newG)

  #loop over scenarios
  print 'size of lnsas: ', len(q.lnsas)
  for scenario in q.lnsas: #each 'scenario' has 1xxx values of lnsa, i.e. one per site
    if index in good_indices:
      print 'index: ', index
      if simple == True:
        (bridges, flow, path, path2, newG) = run_simple_iteration(G, scenario, demand, False, good_index, targets)
        travel_index_times.append((index, bridges, flow, path, path2, -1, -1, bridges/float(q.num_sites), -1))
      else:
        (bridges, flow, path, path2, newG) = run_simple_iteration(G, scenario, demand, False, good_index, targets, False) #doesn't clean up the damage
        print 'what i found for bridges: ', bridges
        if bridges == 0:
          travel_time = no_damage_travel_time; 
          vmt = no_damage_vmt; 
        else:
          print 'attempting new'
          (travel_time, vmt) = run_iteration(G, scenario, demand, newG, True)
        print 'what i have for (tt, vmt): ', (travel_time, vmt)
        travel_index_times.append((index, bridges, flow, path, path2, travel_time, vmt, bridges/float(q.num_sites), -1))
      good_index += 1
        # travel_index_times.append((index, travel_time, vmt))
#      print 'new travel times: ', travel_index_times
    if index%1000 ==0:
      print 'index: ', index
      util.write_2dlist(time.strftime("%Y%m%d")+'_bridges_flow_paths_5eps_extensive.txt',travel_index_times)
    index += 1 #IMPORTANT
  util.write_2dlist(time.strftime("%Y%m%d")+'_bridges_flow_paths_5eps_extensive.txt',travel_index_times)
  print 'the number of scenarios I considered doing: ', index
  print 'the number of scenarios I actually did: ', len(travel_index_times)
  print 'i.e.: ', good_index
  print 'and now, I will save a dataset of damaged bridges in each scenario'
  util.write_2dlist(time.strftime("%Y%m%d")+'_damaged_bridges_5eps_extensive.txt',BRIDGE_DAMAGE_DATASET)
  with open(time.strftime("%Y%m%d")+'_damaged_bridges_5eps_extensive.pkl', 'wb') as f:
    pickle.dump(BRIDGE_DAMAGE_DATASET, f)
Esempio n. 5
0
def main():
	'''this is the main file that runs from ground-motion intensity map to network performance measure. You will  need to adjust various things below, such as the ground motion files, performance measure info and more. you should not need to change, however, the functions that they call'''
	seed_num = 0 #USER ADJUSTS THIS! other value examples: 1,2, 11, 14, ...
	random.seed(seed_num) #set random number generator seed so we can repeat this process

	#################################################################
	################## ground-motion intensity map data #######################
	#load the earthquake info
	#just for demonstration, this does ONLY THREE ground-motion intensity maps
	#sa_matrix = util.read_2dlist('input/sample_ground_motion_intensity_map_JUST_THREE.txt',delimiter='\t')
	#this does approx. 2000 ground-motion intensity maps. These are hazard consistent.
	#sa_matrix = util.read_2dlist('input/sample_ground_motion_intensity_maps_road_only_filtered.txt',delimiter='\t')
	#GB: this does 25 hazard-consistent maps
	sa_matrix = util.read_2dlist('input/subset_maps_25.txt', delimiter='\t')
	lnsas = []
	magnitudes = []
	for row in sa_matrix:
		print row[4:]
		lnsas.append([log(float(sa)) for sa in row[4:]])
		magnitudes.append(float(row[2]))
	print 'You are considering %d ground-motion intensity maps.' % int(len(lnsas))
	print 'You are considering %d different site locations.' % int(len(lnsas[0]))

	################## component (bridge) damage map data #######################
	sets = 1 # number of bridge damage maps per ground-motion intensity map. USER ADJUSTS THIS! other value examples: 3,9,18
	targets = range(0, len(lnsas)*sets) #define the damage map IDs you want to consider. Note: this currently does not require modification. Just change the number of sets above.
	print 'You are considering %d different damage maps (%d per ground-motion intensity map).' % (int(len(targets)), int(sets))
	#first load the all-purpose dictionary linking info about the bridges
	#with open('input/20140114_master_bridge_dict.pkl','rb') as f:
	with open('input/master_bridge_dict_ret.pkl','rb') as f:
		master_dict_ret = pickle.load(f) #has 1743 keys. One per highway bridge. (NOT BART)
		'''
		dict where the keyranges from 1 to 1889 and then the value is another dictionary with the following keys:
		loren_row_number: the row number in the original table that has info on all CA bridges (where the header line is row 0)
		original_id: the original id (1-1889)
		new_id: the new id that excludes filtered out bridges (1-1743). Bridges are filtered out if a.) no seismic capacity data AND non-transbay bridge or b.) not located by Jessica (no edge list). this id is the new value that is the column number for the lnsa simulations.
		jessica_id: the id number jessica used. it's also the number in arcgis.
		a_b_pairs_direct: list of (a,b) tuples that would be directly impacted by bridge damage (bridge is carrying these roads)
		a_b_pairs_indirect: ditto but roads under the indirectly impacted bridges
		edge_ids_direct: edge object IDS for edges that would be directly impacted by bridge damage
		edge_ids_indirect: ditto but roads under the indirectly impacted bridges
		mod_lnSa: median Sa for the moderate damage state. the dispersion (beta) for the lognormal distribution is 0.6. (See hazus/mceer method)
		ext_lnSa: median Sa for the extensive damage state. the dispersion (beta) for the lognormal distribution is 0.6. (See hazus/mceer method)
		com_lnSa: median Sa for the complete damage state. the dispersion (beta) for the lognormal distribution is 0.6. (See hazus/mceer method)
		'''
	num_of_interest_bridges = len(master_dict_ret)
	num_of_total_bridges = len(master_dict_ret)

	# network damage map data
	G = get_graph()
	assert G.is_multigraph() == False, 'You want a directed graph without multiple edges between nodes'

	################## network performance map data #######################
	#compute what the travel time and vehicle-miles-traveled values are without any damage
	demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids_dummies.csv') #we just take a percentage in ita.py, namely  #to get morning flows, take 5.3% of daily driver values. 11.5/(4.5*6+11.5*10+14*4+4.5*4) from Figure S10 of http://www.nature.com/srep/2012/121220/srep01001/extref/srep01001-s1.pdf. Note: these are vehicle-driver trips only (not transit, biking, walking, etc.)
	#pre-compute the network performance measures when there is no damage to save time later
	no_damage_travel_time, no_damage_vmt = compute_tt_vmt(G, demand)
	no_damage_flow = compute_flow(G)
	no_damage_shortest_path = -1
	G = util.clean_up_graph(G) #so the trips assigned don't hang around

	# GB ADDITION
	print no_damage_travel_time
	print no_damage_vmt

	#################################################################
	################## actually run damage map creation #######################
	ppservers = ()    #starting a super cool parallelization
	# Creates jobserver with automatically detected number of workers
	job_server = pp.Server(ppservers=ppservers)
	print "Starting pp with", job_server.get_ncpus(), "workers"
	# set up jobs
	jobs = []
	for i in targets:
		jobs.append(job_server.submit(compute_damage, (lnsas[i%len(lnsas)], master_dict_ret, targets[i], ), modules = ('random', 'math', ), depfuncs = (damage_bridges, )))

	# get the results that have already run
	bridge_array_new = []
	bridge_array_internal = []
	indices_array = [] # GB: stores index of damage map being considered (or GM intensity map? unclear)
	bridge_array_hwy_num = [] # GB:

	for job in jobs:
		(index, damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges_road) = job()
		bridge_array_internal.append(damaged_bridges_internal)
		bridge_array_new.append(damaged_bridges_new)
		indices_array.append(index)
		bridge_array_hwy_num.append(num_damaged_bridges_road)
	save_results_0(bridge_array_internal, bridge_array_new, int((i + 1)/float(len(lnsas))), seed_num) #save temp
	# GB ADDITION
	# print jobs
	print 'bridge array internal ='
	print bridge_array_internal
	# print bridge_array_new
	# print 'Indices array'
	# print indices_array
	# print bridge_array_hwy_num

	#
	print 'Great. You have made damage maps'
	# #################################################################
	# ################## actually run performance measure realization creation #######################
	ppservers = ()
	# Creates jobserver with automatically detected number of workers
	job_server = pp.Server(ppservers=ppservers)
	print "Starting pp with", job_server.get_ncpus(), "workers"
	# set up jobs
	jobs = []

	for i in targets:
		jobs.append(job_server.submit(compute_road_performance, (None, bridge_array_internal[i], demand, no_damage_travel_time, no_damage_vmt, no_damage_flow, no_damage_shortest_path, master_dict_ret, targets[i], ), modules = ('networkx', 'time', 'pickle', 'pdb', 'util', 'random', 'math', 'ita', ), depfuncs = (get_graph, add_superdistrict_centroids, damage_bridges, damage_highway_network, measure_performance, compute_flow, compute_shortest_paths, compute_tt_vmt, ))) # functions, modules

	# get the results that have already run and save them
	travel_index_times = []
	#print jobs
	i = 0
	for job in jobs:
		(index,  road_bridges_out, flow, shortest_paths, travel_time, vmt) = job()

		#print indices_array[i]
		#print index
		print travel_time

		assert indices_array[i] == index, 'the damage maps should correspond to the performance measure realizations'
		assert bridge_array_hwy_num[i] == road_bridges_out, 'we should also have the same number of hwy bridges out'
		travel_index_times.append((index, road_bridges_out, flow, shortest_paths, travel_time, vmt, road_bridges_out/float(num_of_interest_bridges), len(bridge_array_new[i])/float(num_of_total_bridges), magnitudes[index%len(magnitudes)]))

		#save as you go
		if i%len(lnsas) == 0:
			save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(len(lnsas))), seed_num)
		i += 1

	#save an extra time at the very end
	save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(len(lnsas))), seed_num) #save again when totally done
	print 'Great. You have calculated network performance. Good job!'
def main():
  '''can change the number of epsilons below'''
  seed(0) #set seed 
  simple = False  #simple is just %bridges out, which is computationally efficient
  number_of_highway_bridges = 1743
  numeps = 3 #the number of epsilons
  tol = 0.00001 #the minimum annual rate that you care about in the original event set (the weight now is the original annual rate / number of epsilons per event)
  demand = bd.build_demand('input/BATS2000_34SuperD_TripTableData.csv', 'input/superdistricts_centroids_dummies.csv') #we just take a percentage in ita.py, namely  #to get morning flows, take 5.3% of daily driver values. 11.5/(4.5*6+11.5*10+14*4+4.5*4) from Figure S10 of http://www.nature.com/srep/2012/121220/srep01001/extref/srep01001-s1.pdf
  #figure out ground motions
  # lnsas, weights = ground_motions(numeps, tol, 'input/SF2_mtc_total_3909scenarios_1743bridgesPlusBART_1epsFake.txt')
  lnsas, weights, magnitudes = ground_motions(numeps, tol, 'input/SF2_mtc_total_3909scenarios_1743bridgesPlusBART_3eps.txt')
  # with open ('input/20140114_lnsas_1epsFake.pkl', 'wb') as f:
  #   pickle.dump(lnsas, f)
  with open ('input/20140114_magnitudes_3eps.pkl', 'wb') as f:
    pickle.dump(magnitudes, f)
  with open('input/20140114_lnsas_3eps.pkl','rb') as f:
    lnsas = pickle.load(f)
  # with open('input/20140114_lnsas_1epsFake.pkl','rb') as f:
  #   lnsas = pickle.load(f)
  print 'the number of ground motion events we are considering: ', len(lnsas)
  print 'first length: ', len(lnsas[0])

  bart_dict = transit_to_damage.make_bart_dict()
  muni_dict = transit_to_damage.make_muni_dict()

  bridge_array_new = []
  bridge_array_internal = []
  travel_index_times = []
  # G = nx.read_gpickle("input/graphMTC_noCentroidsLength15.gpickle")
  G = get_graph()

  print 'am I a multi graph? I really do not want to be!', G.is_multigraph() #An undirected graph class that can store multiedges. Multiedges are multiple edges between two nodes. Each edge can hold optional data or attributes.A MultiGraph holds undirected edges. Self loops are allowed.
  no_damage_travel_time, no_damage_vmt = compute_tt_vmt(G, demand)
  G = util.clean_up_graph(G)
  # make_directories(range(len(lnsas)))
  # transit_to_damage.set_main_path('input/trn/transit_lines/', 'input/trncopy/transit_lines/') #TODO: need to change THREE file paths (these plus bart)

  # run in SERIES
  #---------------------------------------------
  # targets = [0, 5000]
  # # targets = range(len(lnsas))
  # for i in targets:
  #   print i
  #   start = time.time()
  #   damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges, flow, shortest_paths, travel_time, vmt = compute_performance(lnsas[i], G, i, demand, no_damage_travel_time, no_damage_vmt)
  #   bridge_array_internal.append(damaged_bridges_internal)
  #   bridge_array_new.append(damaged_bridges_new)
  #   travel_index_times.append((i, num_damaged_bridges, flow, shortest_paths, travel_time, vmt, num_damaged_bridges/float(number_of_highway_bridges), magnitudes[i]))
  #   print 'time for one: ', time.time() - start
  #   if i%3909 == 0:
      # save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(3909)))
  
  #   # scenario = lnsas[i]
  #   # #figure out bridge damage for each scenario
  #   # damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges = damage_bridges(scenario) #e.g., [1, 89, 598] #num_bridges_out is highway bridges only
  #   # bridge_array_internal.append(damaged_bridges_internal)
  #   # bridge_array_new.append(damaged_bridges_new)

  #   # #figure out network damage and output Cube files to this effect
  #   # G = damage_network(damaged_bridges_internal, damaged_bridges_new, G, time.strftime("%Y%m%d")+'_filesForCube/', i)

  #   # #figure out impact (performance metrics)
  #   # flow, shortest_paths, travel_time, vmt = measure_performance(G, num_damaged_bridges, demand, no_damage_travel_time, no_damage_vmt)
  #   # travel_index_times.append((i, num_damaged_bridges, flow, shortest_paths, travel_time, vmt, num_damaged_bridges/float(number_of_highway_bridges), magnitudes[i]))
  #   # G = util.clean_up_graph(G)
  #   # # if i%3909 == 0:
  #   # if i%1 == 0:
  #   #   save_results(bridge_array_internal, bridge_array_new, travel_index_times, int(i/float(3909)))

  # # #---------------------------------------------

  # # # run in PARALLEL
  # # # #---------------------------------------------
  ppservers = ()    
  # Creates jobserver with automatically detected number of workers
  job_server = pp.Server(ppservers=ppservers)
  print "Starting pp with", job_server.get_ncpus(), "workers"
  start_time = time.time()
  # set up jobs
  jobs = []
  targets = range(3909, len(lnsas)) #len(lnsas)) 7818, 
  # targets = [0, 33, 5000]
  # for i in range(len(lnsas)):
  for i in targets:
    jobs.append(job_server.submit(compute_performance, (lnsas[i], None, i, demand, no_damage_travel_time, no_damage_vmt, ), modules = ('networkx', ))) # functions, modules
  # get results
  # if len(jobs) != len(lnsas):
  #   pdb.set_trace() # error checking!
  index = 0
  for job in jobs:
    (damaged_bridges_internal, damaged_bridges_new, num_damaged_bridges, flow, shortest_paths, travel_time, vmt) = job()
    i = targets[index]
    print 'target id: ', i
    bridge_array_internal.append(damaged_bridges_internal)
    bridge_array_new.append(damaged_bridges_new)
    travel_index_times.append((i, num_damaged_bridges, flow, shortest_paths, travel_time, vmt, num_damaged_bridges/float(number_of_highway_bridges), magnitudes[i]))
    if i%3909 == 0:
      save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(3909)))
    index += 1

  # #---------------------------------------------
  save_results(bridge_array_internal, bridge_array_new, travel_index_times, int((i + 1)/float(3909)))