def _extract_detector_data(self): """Extracts detector data form xml files.""" data_parser = simulation_data_parser.SimulationDataParser() visualizer = map_visualizer.MapVisualizer() detector_folder = os.path.join(self._output_dir, 'detector/') detector_trajectory_folder = os.path.join(detector_folder, 'detector_trajectory/') if not file_util.exists(detector_trajectory_folder): file_util.mkdir(detector_trajectory_folder) detector_files = os.listdir(detector_folder) for detector_file in detector_files: if not detector_file.endswith('.xml'): continue # print('Extract file: ', detector_file) output_file = os.path.splitext(detector_file)[0] + '.pkl' output_file = os.path.join(detector_trajectory_folder, output_file) detector_file = os.path.join(detector_folder, detector_file) print('Save file: ', output_file) data_parser.get_and_save_detector_data(detector_file, output_file) # Creates figures for individual detector. output_figure_folder = os.path.join(detector_folder, 'detector_fig/') if not file_util.f_exists(output_figure_folder): file_util.f_mkdir(output_figure_folder) visualizer.plot_individual_detector(detector_trajectory_folder, output_figure_folder)
def plot_traveling_time(self): """Plot tripinfo data.""" visualizer = map_visualizer.MapVisualizer() data_parser = simulation_data_parser.SimulationDataParser() tripinfo_file = 'output/tripinfo.xml' # output_folder = 'output/' # output_file = 'tripinfo.pkl' tripinfo = data_parser.get_tripinfo_attribute_to_trips(tripinfo_file) bins = np.linspace(0, 12, 49) positions_on_edge = (np.array(tripinfo['depart']) - np.array(tripinfo['departDelay'])) / 3600 values_on_edge = np.array(tripinfo['duration']) print(len(values_on_edge), len(values_on_edge)) # print(positions_on_edge) bin_mean, bin_boundary = visualizer._histogram_along_edge( values_on_edge, positions_on_edge, bins=bins) # print(bin_mean, bin_boundary) fig = pylab.figure(figsize=(8, 6)) fig.add_subplot(111) pylab.plt.plot(bin_boundary[:-1], bin_mean) pylab.plt.xlabel('Time [h]') pylab.plt.xlim(0, 10) pylab.plt.ylim(0, 10000) pylab.plt.ylabel('Average traveling time.') pylab.savefig(os.path.join(self._output_dir, 'traveling_time_hist.pdf'))
def visualize_fcd_on_map(self): """Plot metric maps. Pay attention to the map. """ net = sumolib.net.readNet(self._sumo_net_file) visualizer = map_visualizer.MapVisualizer(net) plot_edges = net.getEdges() trajectory_folder = os.path.join(self._output_dir, 'trajectory/') output_folder = os.path.join(trajectory_folder, 'trajectory_fig/') if not file_util.f_exists(output_folder): file_util.f_mkdir(output_folder) trajectory_file_list = os.listdir(trajectory_folder) # trajectory_file_list = [ # 'edge_id_to_trajectory_9000_10800.pkl'] for trajectory_file in trajectory_file_list: if not trajectory_file.endswith('.pkl'): continue trajectory_pkl_file = os.path.join(trajectory_folder, trajectory_file) print('Loading file: ', trajectory_pkl_file) edge_id_to_trajectory = file_util.load_variable(trajectory_pkl_file) print('Time range: ', edge_id_to_trajectory['time_interval']) output_figure_path = (output_folder + 'speed_map_%s_%s.pdf' % (int(edge_id_to_trajectory['time_interval'][0]), int(edge_id_to_trajectory['time_interval'][1]))) visualizer.plot_edge_trajectory_histogram_on_map( plot_edges, edge_id_to_trajectory, output_figure_path=output_figure_path, plot_max_speed=13.4112)
def plot_save_detector_data_reverse(self): """Plots detector data. Paradise evacuation edges: '27323694.1622', # Skyway Rd. '37625137#0.49' # Skyway Rd reverse. '10293408#4', # Neal Rd. '-184839999#0', # Clark Rd. '-538864403#0' # Pentz Rd. """ self._extract_detector_data() detector_trajectory_folder = os.path.join( self._output_dir, 'detector/detector_trajectory/') output_figure_folder = os.path.join( self._output_dir, 'detector/detector_fig/') if not file_util.exists(output_figure_folder): file_util.mkdir(output_figure_folder) visualizer = map_visualizer.MapVisualizer() detector_pkl_files_by_group = [ [detector_trajectory_folder + 'e1Detector_27323694_0_0.pkl', detector_trajectory_folder + 'e1Detector_27323694_1_1.pkl', detector_trajectory_folder + 'e1Detector_37625137#1_0_6.pkl', detector_trajectory_folder + 'e1Detector_37625137#1_1_7.pkl'], [detector_trajectory_folder + 'e1Detector_10293408#4_0_2.pkl'], [detector_trajectory_folder + 'e1Detector_-184839999#0_0_3.pkl', detector_trajectory_folder + 'e1Detector_-184839999#0_1_4.pkl', detector_trajectory_folder + 'e1Detector_-184839999#0_2_8.pkl', detector_trajectory_folder + 'e1Detector_-184839999#0_3_9.pkl'], [detector_trajectory_folder + 'e1Detector_-538864403#0_0_5.pkl', detector_trajectory_folder + 'e1Detector_-538864403#0_1_10.pkl']] visualizer.plot_detector_flow_density_by_group( detector_pkl_files_by_group, ['Skyway', 'Neal_Rd', 'Clark_Rd', 'Pentz_Rd'], output_figure_folder=output_figure_folder) # Cumulative vehicle flow. detector_pkl_files = [ 'e1Detector_27323694_0_0.pkl', 'e1Detector_27323694_1_1.pkl', 'e1Detector_37625137#1_0_6.pkl', 'e1Detector_37625137#1_1_7.pkl', 'e1Detector_10293408#4_0_2.pkl', 'e1Detector_-184839999#0_0_3.pkl', 'e1Detector_-184839999#0_1_4.pkl', 'e1Detector_-184839999#0_2_8.pkl', 'e1Detector_-184839999#0_3_9.pkl', 'e1Detector_-538864403#0_0_5.pkl', 'e1Detector_-538864403#0_1_10.pkl'] detector_pkl_files = [os.path.join(detector_trajectory_folder, filename) for filename in detector_pkl_files] visualizer.plot_detector_arrival_time_by_group( detector_pkl_files, output_figure_folder)
def setUp(self): super(MapVisualizerTests, self).setUp() self._output_dir = tempfile.mkdtemp(dir=absltest.get_default_test_tmpdir()) self._fcd_file = _load_file(_TESTDATA_DIR, _FCD_FILE_NAME) self._summary_file = _load_file(_TESTDATA_DIR, _SUMMARY_FILE_NAME) self._route_file = _load_file(_TESTDATA_DIR, _ROUTE_FILE_NAME) mtv_map_file = _load_file(_TESTDATA_DIR, _MTV_MAP_FILE_NAME) net = sumolib.net.readNet(mtv_map_file) self._map_visualier = map_visualizer.MapVisualizer(net) self._data_parser = simulation_data_parser.SimulationDataParser()
def plot_path(sumo_net_file): """Plot path.""" net = sumolib.net.readNet(sumo_net_file) map_visualier = map_visualizer.MapVisualizer(net) edge_from = '-12183460#1' edge_to = '23797526' route_length = map_visualier.plot_shortest_path(edge_from, edge_to) print(route_length) edge_from = '-12183460#1' edge_to = '35869652' route_length = map_visualier.plot_shortest_path(edge_from, edge_to) print(route_length)
def plot_map(self, output_file_name): """Plot the edges by types.""" net = sumolib.net.readNet(self._sumo_net_file) visualizer = map_visualizer.MapVisualizer(net) residential_edge_type = ['highway.residential'] parking_edge_type = ['highway.service'] residential_edges = net.filterEdges(residential_edge_type) parking_edges = net.filterEdges(parking_edge_type) visualizer.plot_edges([(residential_edges, 'lime', 0.2), (parking_edges, 'darkgreen', 0.2)], output_figure_path=os.path.join( self._output_dir, output_file_name))
def scenarios_detector_comparison(output_dir): """Compare different scenarios.""" data_parser = simulation_data_parser.SimulationDataParser() visualizer = map_visualizer.MapVisualizer() fig = pylab.figure(figsize=(8, 6)) ax = fig.add_subplot(111) load_input = file_util.load_variable( 'Paradise_reverse_roads/demands/demands_taz_tuple.pkl') load_input = sorted(load_input) demand_time_line, _, demand_car_count = list(zip(*load_input)) cumulative_values = np.cumsum(demand_car_count) pylab.plt.plot(np.array(demand_time_line) / 3600, cumulative_values) # print(cumulative_values[-1]) # print(np.sum(demand_car_count)) # visualizer.add_pertentage_interception_lines( # np.array(demand_time_line) / 3600, demand_car_count, [0.5, .9, .95]) detector_trajectory_folder = 'Paradise_reverse_roads/output/detector/detector_trajectory/' (time_line, arrival_car_count) = file_util.load_variable(detector_trajectory_folder + 'all_arrival_flow.pkl') cumulative_values = np.cumsum(arrival_car_count) print(cumulative_values[-1]) pylab.plt.plot(time_line, cumulative_values) visualizer.add_pertentage_interception_lines(time_line, arrival_car_count, [0.5, .9, .95]) detector_trajectory_folder = 'Paradise_auto_routing/output/detector/detector_trajectory/' (time_line, arrival_car_count) = file_util.load_variable(detector_trajectory_folder + 'all_arrival_flow.pkl') cumulative_values = np.cumsum(arrival_car_count) print(cumulative_values[-1]) pylab.plt.plot(time_line / 3600, cumulative_values) # visualizer.add_pertentage_interception_lines( # time_line, arrival_car_count, [0.5, .9, .95]) detector_trajectory_folder = 'Paradise_2s_baseline/output/detector/detector_trajectory/' (time_line, arrival_car_count) = file_util.load_variable(detector_trajectory_folder + 'all_arrival_flow.pkl') cumulative_values = np.cumsum(arrival_car_count) print(cumulative_values[-1]) pylab.plt.plot(time_line, cumulative_values) pylab.plt.xlabel('Time [h]') pylab.plt.ylabel('Cummulative vehicles') ax.autoscale_view(True, True, True) pylab.savefig(os.path.join(output_dir, 'scenarios_arrival_comparison.pdf'))
def _analyze_summary_demands_vs_evacuation(self, demand_file, summary_file, output_dir=None): """Plot summary vs demands.""" data_parser = simulation_data_parser.SimulationDataParser() visualizer = map_visualizer.MapVisualizer() demands = file_util.load_variable(demand_file) sorted_demands = sorted(demands, key=lambda x: x.time) demand_time_line = [x.time for x in sorted_demands] demand_time_line = np.array(demand_time_line) / 3600 demand_car_count = [x.num_cars for x in sorted_demands] demand_cumulative_values = (np.cumsum(demand_car_count) / sum(demand_car_count)) summary = data_parser.parse_summary_file(summary_file) summary_time_line = np.array(summary['time']) / 3600 summary_cumulative_values = (np.array(summary['ended']) / sum(demand_car_count)) # Calculate the gap between them. gap_area = visualizer.calculate_gap_area_between_cummulative_curves( demand_time_line, demand_cumulative_values, summary_time_line, summary_cumulative_values) if not output_dir: return (demand_time_line, demand_cumulative_values, summary_time_line, summary_cumulative_values, gap_area) # Plot demands v.s. evacuation. fig = pylab.figure(figsize=(8, 6)) ax = fig.add_subplot(111) pylab.plt.plot(demand_time_line, demand_cumulative_values, label='Demands') pylab.plt.plot(summary_time_line, summary_cumulative_values, label='Evacuation') visualizer.add_pertentage_interception_lines( summary_time_line, summary_cumulative_values, [0.5, .9, .95]) pylab.plt.xlabel('Time [h]') pylab.plt.ylabel('Cummulative percentage of total vehicles') pylab.plt.legend() ax.autoscale_view(True, True, True) output_figure_path = os.path.join(output_dir, 'evacuation_curve.pdf') pylab.savefig(output_figure_path) return (demand_time_line, demand_cumulative_values, summary_time_line, summary_cumulative_values, gap_area)
def scenarios_summary_comparison(output_dir): """Compare different scenarios.""" data_parser = simulation_data_parser.SimulationDataParser() visualizer = map_visualizer.MapVisualizer() fig = pylab.figure(figsize=(8, 6)) ax = fig.add_subplot(111) demands = file_util.load_variable( 'MillValley_template/demands/demands_taz_tuple_std_0.5_portion_1.pkl') sorted_demands = sorted(demands, key=lambda x: x.time) demand_time_line = [x.time for x in sorted_demands] demand_car_count = [x.num_cars for x in sorted_demands] cumulative_values = np.cumsum(demand_car_count) / sum(demand_car_count) pylab.plt.plot(np.array(demand_time_line) / 3600, cumulative_values, label='Demands') summary = data_parser.parse_summary_file( 'MillValley_RevRd_noTFL/output_std_0.5_portion_1/summary.xml') time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='New scenario') summary = data_parser.parse_summary_file( 'MillValley_auto_routing_baseline/output_std_0.5_portion_1/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='Baseline auto-routing') summary = data_parser.parse_summary_file( 'MillValley_shortest_path_baseline/output_std_0.5_portion_1/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='Baseline fixed path') visualizer.add_pertentage_interception_lines(time_line, cumulative_values, [0.5, .9, .95]) pylab.plt.xlabel('Time [h]') pylab.plt.ylabel('Cummulative vehicles') ax.autoscale_view(True, True, True) # pylab.plt.xlim(0, 8) pylab.plt.legend(loc='lower right') pylab.savefig( os.path.join(output_dir, 'MV_evacuation_curve_std_0.5_comparison.pdf'))
def plot_path(_): """Plot path.""" net = sumolib.net.readNet(FLAGS.sumo_net_file) map_visualier = map_visualizer.MapVisualizer(net) vehicle_type_list = 'passenger' edge_from = '-8953730#1' edge_from = net.getEdge(edge_from) edge_to = '514320223' edge_to = net.getEdge(edge_to) print(edge_from.getID() + '_' + edge_to.getID()) # return path_edges, route_length = net.getRestrictedShortestPath( edge_from, edge_to, vehicleClass=vehicle_type_list) selected_edges = [([edge_from], 'lime', 1), ([edge_to], 'red', 1)] selected_edges = [(path_edges, 'darkblue', 1)] + selected_edges map_visualier.plot_edges( selected_edges, output_figure_path=(edge_from.getID() + '_' + edge_to.getID() + '_path.pdf')) print(route_length)
def generate_evacuation_taz_demands(self, residential_car_density, serving_car_density, demand_mean_hours, demand_stddev_hours, population_portion): """Generates evacuation TAZ demands.""" # TODO(yusef): Fix map + total number of cars. # To make the demands consistent, use the default map, paradise_type.net.xml # as the input map instead of the reversed. For Paradise map, an easy way to # check is that the total number of cars is 11072. net = sumolib.net.readNet(self._sumo_net_file) traffic_generator = random_traffic_generator.RandomTrafficGenerator( net) visualizer = map_visualizer.MapVisualizer(net) print('Generating TAZ demands with STD: ', demand_stddev_hours, ' Portion: ', population_portion) # Demands from residential roads. residential_edge_type = ['highway.residential'] residential_edges = net.filterEdges(residential_edge_type) demand_mean_seconds = demand_mean_hours * 60 * 60 demand_stddev_seconds = demand_stddev_hours * 60 * 60 time_sampler_parameters = random_traffic_generator.TimeSamplerGammaMeanStd( demand_mean_seconds, demand_stddev_seconds) car_per_meter_residential = residential_car_density * population_portion np.random.seed(FLAGS.random_seed) residential = traffic_generator.create_evacuation_auto_routing_demands( residential_edges, time_sampler_parameters, car_per_meter_residential) # Demands from parking roads. parking_edge_type = ['highway.service'] parking_edges = net.filterEdges(parking_edge_type) time_sampler_parameters = random_traffic_generator.TimeSamplerGammaMeanStd( demand_mean_seconds, demand_stddev_seconds) car_per_meter_parking = serving_car_density * population_portion parking = traffic_generator.create_evacuation_auto_routing_demands( parking_edges, time_sampler_parameters, car_per_meter_parking) all_demands = residential + parking departure_time_points = [x.time for x in all_demands] cars_per_time_point = [x.num_cars for x in all_demands] departure_time_points = np.array(departure_time_points) / 3600 print('TAZ demands. Total vehicles: ', sum(cars_per_time_point)) # TODO(yusef): reconcile. demands_dir = os.path.join(self._output_dir, _DEMANDS) file_util.f_makedirs(demands_dir) output_hist_figure_path = os.path.join( demands_dir, 'departure_time_histogram_taz_std_%s_portion_%s.pdf' % (demand_stddev_hours, population_portion)) output_cumulative_figure_path = os.path.join( demands_dir, 'departure_time_cumulative_taz_std_%s_portion_%s.pdf' % (demand_stddev_hours, population_portion)) pkl_file = os.path.join( demands_dir, 'demands_taz_tuple_std_%s_portion_%s.pkl' % (demand_stddev_hours, population_portion)) routes_file = os.path.join( demands_dir, 'demands_taz_std_%s_portion_%s.rou.xml' % (demand_stddev_hours, population_portion)) # Output the demand xml file. visualizer.plot_demands_departure_time( departure_time_points, cars_per_time_point, output_hist_figure_path=output_hist_figure_path, output_cumulative_figure_path=output_cumulative_figure_path) file_util.save_variable(pkl_file, all_demands) exit_taz = 'exit_taz' traffic_generator.write_evacuation_vehicle_auto_routing_demands( all_demands, exit_taz, routes_file)
def generate_evacuation_shortest_path_demands( self, residential_car_density, serving_car_density, evacuation_edges, demand_mean_hours, demand_stddev_hours, population_portion): """Generates evacuation demands.""" net = sumolib.net.readNet(self._sumo_net_file) traffic_generator = random_traffic_generator.RandomTrafficGenerator( net) visualizer = map_visualizer.MapVisualizer(net) print('Generating TAZ demands with STD: ', demand_stddev_hours, ' Portion: ', population_portion) # Calculate the distance to the evacuation exits. evacuation_path_trees = {} evacuation_path_length = {} for exit_edge in evacuation_edges: evacuation_path_trees[exit_edge], evacuation_path_length[ exit_edge] = ( net.getRestrictedShortestPathsTreeToEdge(exit_edge)) # Demands from residential roads. residential_edge_type = ['highway.residential'] residential_edges = net.filterEdges(residential_edge_type) demand_mean_seconds = demand_mean_hours * 60 * 60 demand_stddev_seconds = demand_stddev_hours * 60 * 60 time_sampler_parameters = random_traffic_generator.TimeSamplerGammaMeanStd( demand_mean_seconds, demand_stddev_seconds) car_per_meter_residential = residential_car_density * population_portion np.random.seed(FLAGS.random_seed) residential = traffic_generator.create_evacuation_shortest_path_demands( residential_edges, time_sampler_parameters, car_per_meter_residential, evacuation_edges, evacuation_path_trees, evacuation_path_length) # Demands from parking roads. parking_edge_type = ['highway.service'] parking_edges = net.filterEdges(parking_edge_type) time_sampler_parameters = random_traffic_generator.TimeSamplerGammaMeanStd( demand_mean_seconds, demand_stddev_seconds) car_per_meter_parking = serving_car_density * population_portion parking = traffic_generator.create_evacuation_shortest_path_demands( parking_edges, time_sampler_parameters, car_per_meter_parking, evacuation_edges, evacuation_path_trees, evacuation_path_length) all_demands = residential + parking departure_time_points = [x.time for x in all_demands] cars_per_time_point = [x.num_cars for x in all_demands] departure_time_points = np.array(departure_time_points) / 3600 print('Shortest path demands. Total vehicles: ', sum(cars_per_time_point)) # Output the demand xml file. demands_dir = os.path.join(self._output_dir, _DEMANDS) file_util.f_makedirs(demands_dir) output_hist_figure_path = os.path.join( demands_dir, 'departure_time_histogram_shortest_path_std_%s_portion_%s.pdf' % (demand_stddev_hours, population_portion)) output_cumulative_figure_path = os.path.join( demands_dir, 'departure_time_cumulative_shortest_path_std_%s_portion_%s.pdf' % (demand_stddev_hours, population_portion)) pkl_file = os.path.join( demands_dir, 'demands_shortest_path_tuple_std_%s_portion_%s.pkl' % (demand_stddev_hours, population_portion)) routes_file = os.path.join( demands_dir, 'demands_shortest_path_std_%s_portion_%s.rou.xml' % (demand_stddev_hours, population_portion)) visualizer.plot_demands_departure_time( departure_time_points, cars_per_time_point, output_hist_figure_path=output_hist_figure_path, output_cumulative_figure_path=output_cumulative_figure_path) file_util.save_variable(pkl_file, all_demands) traffic_generator.write_evacuation_vehicle_path_demands( all_demands, routes_file)
def scenarios_summary_comparison(output_dir): """Compare different scenarios.""" data_parser = simulation_data_parser.SimulationDataParser() visualizer = map_visualizer.MapVisualizer() fig = pylab.figure(figsize=(8, 6)) ax = fig.add_subplot(111) demands = file_util.load_variable( 'Paradise_template/demands/demands_taz_tuple_std_0.7.pkl') sorted_demands = sorted(demands, key=lambda x: x.time) demand_time_line = [x.time for x in sorted_demands] demand_car_count = [x.num_cars for x in sorted_demands] cumulative_values = np.cumsum(demand_car_count) / sum(demand_car_count) pylab.plt.plot(np.array(demand_time_line) / 3600, cumulative_values, ':', label='Demands', color='black') summary = data_parser.parse_summary_file( 'Paradise_RevRd_noTFL/output_std_0.7/summary.xml') time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, '--', label='No block') summary = data_parser.parse_summary_file( 'Paradise_RevRd_noTFL_road_blocker/output_std_0.7_road_block_21600/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='t=5 h', color=pylab.plt.cm.jet(1 / 6)) summary = data_parser.parse_summary_file( 'Paradise_RevRd_noTFL_road_blocker/output_std_0.7_road_block_18000/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='t=4 h', color=pylab.plt.cm.jet(2 / 6)) summary = data_parser.parse_summary_file( 'Paradise_RevRd_noTFL_road_blocker/output_std_0.7_road_block_10800/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='t=3 h', color=pylab.plt.cm.jet(3 / 6)) summary = data_parser.parse_summary_file( 'Paradise_RevRd_noTFL_road_blocker/output_std_0.7_road_block_7200/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='t=2 h', color=pylab.plt.cm.jet(4 / 6)) summary = data_parser.parse_summary_file( 'Paradise_RevRd_noTFL_road_blocker/output_std_0.7_road_block_3600/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='t=1 h', color=pylab.plt.cm.jet(5 / 6)) summary = data_parser.parse_summary_file( 'Paradise_RevRd_noTFL_road_blocker/output_std_0.7_road_block_0/summary.xml' ) time_line = np.array(summary['time']) / 3600 cumulative_values = np.array(summary['ended']) / sum(demand_car_count) pylab.plt.plot(time_line, cumulative_values, label='t=0 h', color=pylab.plt.cm.jet(0.95)) # visualizer.add_pertentage_interception_lines( # time_line, cumulative_values, [0.5, .9, .95]) pylab.plt.xlabel('Time [h]') pylab.plt.ylabel('Cummulative vehicles') ax.autoscale_view(True, True, True) pylab.plt.xlim(1, 6) pylab.plt.legend(loc='lower right') pylab.savefig( os.path.join(output_dir, 'evacuation_curve_std_0.7_road_block_comparison.pdf'))
def __init__(self, sumo_net=None): self._net = sumo_net self._map_visualizer = map_visualizer.MapVisualizer(sumo_net)