def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print "Valid area contains %s edges" % len(edges) if options.trips: output_type = 'trips' writer = write_trip else: output_type = 'routes' writer = write_route busStopEdges = {} if options.stops_output: busStops = codecs.open(options.stops_output, 'w', encoding='utf8') busStops.write( '<additional xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/additional_file.xsd">\n' ) if options.additional_input: for busStop in parse(options.additional_input, 'busStop'): edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: busStops.write(busStop.toXML(' ')) if options.stops_output: busStops.write('</additional>\n') busStops.close() def write_to_file(vehicles, f): f.write( '<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( '<%s xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n' % output_type) num_routes = 0 for _, v in vehicles: num_routes += 1 writer(f, v) f.write('</%s>\n' % output_type) print "Wrote %s %s" % (num_routes, output_type) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file(cut_routes(edges, orig_net, options, busStopEdges), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: output_type = 'trips' writer = write_trip else: output_type = 'routes' writer = write_route busStopEdges = {} if options.stops_output: busStops = codecs.open(options.stops_output, 'w', encoding='utf8') busStops.write( '<additional xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/additional_file.xsd">\n') if options.additional_input: for busStop in parse(options.additional_input, 'busStop'): edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: busStops.write(busStop.toXML(' ')) if options.stops_output: busStops.write('</additional>\n') busStops.close() def write_to_file(vehicles, f): f.write('<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( ('<%s xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n') % output_type) num_routes = 0 for _, v in vehicles: num_routes += 1 writer(f, v) f.write('</%s>\n' % output_type) print("Wrote %s %s" % (num_routes, output_type)) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file( cut_routes(edges, orig_net, options, busStopEdges), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print "Valid area contains %s edges" % len(edges) if options.trips: start_tag = "<trips>" end_tag = "</trips>" output_type = "trips" writer = write_trip else: start_tag = "<routes>" end_tag = "</routes>" output_type = "routes" writer = write_route def write_to_file(routes, f): comment = "<!-- generated with %s for %s from %s -->" % ( os.path.basename(__file__), options.network, options.routeFiles, ) print >> f, comment print >> f, start_tag num_routes = 0 for route in routes: num_routes += 1 writer(f, *route) print >> f, end_tag print "Wrote %s %s" % (num_routes, output_type) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with open(tmpname, "w") as f: write_to_file(cut_routes(edges, orig_net, options), f) # sort out of memory sort_routes.main([tmpname, "--big", "--outfile", options.output]) else: routes = list(cut_routes(edges, orig_net, options)) routes.sort() with open(options.output, "w") as f: write_to_file(routes, f)
def __init__(self, cost_attribute, pessimism=0, network_file=None): # the cost attribute to parse (i.e. 'traveltime') self.cost_attribute = cost_attribute.decode("utf8") # the duaIterate iteration index self.iteration = None # the main data store: for every interval and edge id we store costs and # whether data was seen in the last call of load_costs() # start -> (edge_id -> EdgeMemory) self.intervals = defaultdict(dict) # the intervall length (only known for certain if multiple intervals # have been seen) self.interval_length = 214748 # SUMOTIME_MAXSTRING # the intervall currently being parsed self.current_interval = None # the combined weigth of all previously loaded costs self.memory_weight = 0.0 # update is done according to: memory * memory_factor + new * (1 - # memory_factor) self.memory_factor = None # differences between the previously loaded costs and the memorized # costs self.errors = None # some statistics self.num_loaded = 0 self.num_decayed = 0 # travel times without obstructing traffic # XXX could use the minimum known traveltime self.traveltime_free = defaultdict(lambda: 0) if network_file is not None: # build a map of default weights for decaying edges assuming the # attribute is traveltime self.traveltime_free = dict( [(e.getID(), e.getLength() / e.getSpeed()) for e in readNet(network_file).getEdges()] ) self.pessimism = pessimism
def main(args): options = parse_args(args) net = readNet(options.net) known_ids = set() def unique_id(cand, index=0): cand2 = cand if index > 0: cand2 = "%s#%s" % (cand, index) if cand2 in known_ids: return unique_id(cand, index + 1) else: known_ids.add(cand2) return cand2 with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') for routefile in options.routefiles: print("parsing %s" % routefile) if options.standalone: for route in parse(routefile, 'route'): # print("found veh", vehicle.id) generate_poly(net, unique_id(route.id), options.colorgen(), options.layer, options.geo, route.edges.split(), options.blur, outf) else: for vehicle in parse(routefile, 'vehicle'): # print("found veh", vehicle.id) generate_poly(net, unique_id(vehicle.id), options.colorgen(), options.layer, options.geo, vehicle.route[0].edges.split(), options.blur, outf) outf.write('</polygons>\n')
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths1 = {} lengths2 = {} lengthDiffStats = Statistics("route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile1, 'vehicle'): lengths1[vehicle.id] = getRouteLength(net, vehicle) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) lengthDiffStats.add(lengths2[vehicle.id] - lengths1[vehicle.id], vehicle.id) print(lengthDiffStats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in lengthDiffStats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: differences = sorted([(lengths2[id] - lengths1[id], id) for id in lengths1.keys()]) for diff, id in differences: f.write("%s %s\n" % (diff, id))
def main(args): options = parse_args(args) net = readNet(options.net, withInternal=options.internal) with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') if options.scaleWidth is None: for route_id, edges in parseRoutes(options): generate_poly(options, net, route_id, options.colorgen(), edges, outf) else: count = {} for route_id, edges in parseRoutes(options): edges = tuple(edges) if edges in count: count[edges][0] += 1 else: count[edges] = [1, route_id] for edges, (n, route_id) in count.items(): width = options.scaleWidth * n params = {'count': str(n)} generate_poly(options, net, route_id, options.colorgen(), edges, outf, lineWidth=width, params=params) outf.write('</polygons>\n')
def __init__(self, netfile, lanewise=True, undirected_graph=False, routefile=None, addlfiles=None, seed=None, binfile='sumo'): self.netfile = netfile self.net = readNet(netfile) self.undirected_graph = undirected_graph self.lanewise = lanewise self.routefile = routefile self.seed = seed self.data_dfs = [] self.detector_def_files = [] self.tls_output_def_files = [] self.other_addl_files = [] if isinstance(addlfiles, six.string_types): addlfiles = [addlfiles] self.additional_files = addlfiles or [] self.classify_additional_files() self.tls_list = self.net.getTrafficLights() # tl.getLinks() returns a dict with a consistent ordering of movements self.config_gen = self.get_config_generator() self.binfile = checkBinary(binfile) self.reset_graph()
def __init__(self, cost_attribute, pessimism=0, network_file=None): # the cost attribute to parse (i.e. 'traveltime') self.cost_attribute = cost_attribute.decode('utf8') # the duaIterate iteration index self.iteration = None # the main data store: for every interval and edge id we store costs and # whether data was seen in the last call of load_costs() # start -> (edge_id -> EdgeMemory) self.intervals = defaultdict(dict) # the intervall length (only known for certain if multiple intervals have been seen) self.interval_length = 214748 # SUMOTIME_MAXSTRING # the intervall currently being parsed self.current_interval = None # the combined weigth of all previously loaded costs self.memory_weight = 0.0 # update is done according to: memory * memory_factor + new * (1 - memory_factor) self.memory_factor = None # differences between the previously loaded costs and the memorized costs self.errors = None # some statistics self.num_loaded = 0 self.num_decayed = 0 # travel times without obstructing traffic self.traveltime_free = defaultdict( lambda: 0) # XXX could use the minimum known traveltime if network_file is not None: # build a map of default weights for decaying edges assuming the attribute is traveltime self.traveltime_free = dict([ (e.getID(), e.getLength() / e.getSpeed()) for e in readNet(network_file).getEdges() ]) self.pessimism = pessimism
def e2_detector_graph(netfile, detector_file, undirected=False, lanewise=True): net = readNet(netfile) tree = etree.parse(detector_file) if undirected: detector_graph = nx.Graph() else: detector_graph = nx.DiGraph() for element in tree.iter(): if element.tag in ['e2Detector', 'laneAreaDetector']: det_id = element.get('id') info_dict = dict(element.items()) detector_graph.add_node(det_id, **info_dict) lane_to_det = {lane: det for det, lane in detector_graph.node('lane')} for node in net.getNodes(): for conn in node.getConnections(): detector_graph.add_edge(lane_to_det[conn.getFromLane().getID()], lane_to_det[conn.getToLane().getID()], direction=conn.getDirection()) return detector_graph
def run_ere(scenario_name, closed_roads, s_time, duration): """ This is to enable the enroute event scenario using TraCI :param scenario_name: the name of the scenario :param closed_roads: the list of closed road id :param s_time: the starting time stamp for the road closure in seconds :param duration: the road closure duration in seconds """ sumo_net = net.readNet(load_map(scenario_name)) pre_sp_lim = [] for i in closed_roads: pre_sp_lim.append(sumo_net.getEdge(i).getSpeed()) s_time += traci.simulation.getCurrentTime()/1000 e_time = s_time + duration while traci.simulation.getMinExpectedNumber() > 0: cur_step = traci.simulation.getCurrentTime()/1000 if cur_step == s_time: for i in closed_roads: traci.edge.setMaxSpeed(i, 0.1) if cur_step == e_time: for seq, rid in enumerate(closed_roads): traci.edge.setMaxSpeed(rid, pre_sp_lim[seq]) traci.simulationStep() traci.close() sys.stdout.flush()
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths1 = {} lengths2 = {} lengthDiffStats = Statistics( "route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile1, 'vehicle'): lengths1[vehicle.id] = getRouteLength(net, vehicle) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) lengthDiffStats.add( lengths2[vehicle.id] - lengths1[vehicle.id], vehicle.id) print lengthDiffStats if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in lengthDiffStats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: differences = sorted( [(lengths2[id] - lengths1[id], id) for id in lengths1.keys()]) for diff, id in differences: f.write("%s %s\n" % (diff, id))
def main(): argParser = ArgumentParser() db_manipulator.add_db_arguments(argParser) argParser.add_argument("-n", "--net-file", help="specifying the net file of the scenario to use") argParser.add_argument("-k", "--simkey", default="test", help="simulation key to use") argParser.add_argument("-l", "--limit", type=int, help="maximum number of trips to retrieve") argParser.add_argument("--representatives", default="", help="set the route alternatives file to read representative travel times from") argParser.add_argument("--real-trips", default="", help="set the route file to read travel times for real trips from") argParser.add_argument("-a", "--all-pairs", default=False, action="store_true", help="Only write the all pairs table") options, args = argParser.parse_known_args() if len(args) == 2: aggregate_weights(args[0], [float(x) for x in args[1].split(",")]) return conn = db_manipulator.get_conn(options) if os.path.isfile(options.real_trips) and not options.all_pairs: upload_trip_results(conn, options.simkey, SP.OPTIONAL, options.real_trips, options.limit) if os.path.isfile(options.representatives): tables = create_all_pairs(conn, options.simkey, SP.OPTIONAL) upload_all_pairs(conn, tables, 0, 86400, "passenger", options.real_trips, options.representatives, readNet(options.net_file), []) conn.close()
def main(args): options = parse_args(args) net = readNet(options.net) known_ids = set() def unique_id(cand, index=0): cand2 = cand if index > 0: cand2 = "%s#%s" % (cand, index) if cand2 in known_ids: return unique_id(cand, index + 1) else: known_ids.add(cand2) return cand2 with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') for routefile in options.routefiles: print("parsing %s" % routefile) for vehicle in parse(routefile, 'vehicle'): #print("found veh", vehicle.id) generate_poly(net, unique_id(vehicle.id), options.colorgen(), options.layer, options.geo, vehicle.route[0].edges.split(), options.blur, outf) outf.write('</polygons>\n')
def main(): options = parse_args() net = readNet(options.net) with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') for vehicle in parse(options.routefile, 'vehicle'): generate_poly(net, vehicle.id, options.colorgen(), options.layer, vehicle.route[0].edges.split(), outf) outf.write('</polygons>\n')
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print "Valid area contains %s edges" % len(edges) if options.trips: start_tag = '<trips>' end_tag = '</trips>' output_type = 'trips' writer = write_trip else: start_tag = '<routes>' end_tag = '</routes>' output_type = 'routes' writer = write_route def write_to_file(routes, f): comment = '<!-- generated with %s for %s from %s -->' % ( os.path.basename(__file__), options.network, options.routeFiles) print >> f, comment print >> f, start_tag num_routes = 0 for route in routes: num_routes += 1 writer(f, *route) print >> f, end_tag print "Wrote %s %s" % (num_routes, output_type) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with open(tmpname, 'w') as f: write_to_file(cut_routes(edges, orig_net, options), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options)) routes.sort() with open(options.output, 'w') as f: write_to_file(routes, f)
def __init__(self, netfile): self._sumonet = net.readNet(netfile) self._intersections=[] self._links=[] self._linkMap=[] self._preprocessNet( netfile ) self._processNet() self._postprocessNet()
def main(netFile, outFile, radius, useTravelDist): net = readNet(netFile, withConnections=False, withFoes=False) with open(outFile, 'w') as outf: outf.write('<tazs>\n') for taz, edges in computeBidiTaz(net, radius, useTravelDist): outf.write(' <taz id="%s" edges="%s"/>\n' % ( taz.getID(), ' '.join(sorted([e.getID() for e in edges])))) outf.write('</tazs>\n') return net
def main(): options = get_options() net = None attribute_retriever = None if options.attribute == "length": net = readNet(options.network) attribute_retriever = lambda vehicle: sum([ net.getEdge(e).getLength() for e in vehicle.route[0].edges.split() ]) elif options.attribute == "depart": attribute_retriever = lambda vehicle: float(vehicle.depart) elif options.attribute == "numEdges": attribute_retriever = lambda vehicle: len(vehicle.route[0].edges.split( )) else: sys.exit("Invalid value '%s' for option --attribute" % options.attribute) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics("route %ss" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = attribute_retriever(vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics("route %s difference" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = attribute_retriever(vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def main(): random.seed(42) options = parse_args() net = readNet(options.net) with open(options.outfile, "w") as outf: outf.write("<polygons>\n") for taz in parse(options.routefile, "taz"): generate_poly(net, taz.id, options.colorgen(), options.layer, taz.edges.split(), outf) outf.write("</polygons>\n")
def main(): random.seed(42) options = parse_args() net = readNet(options.net) with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') for taz in parse(options.routefile, 'taz'): generate_poly(net, taz.id, options.colorgen(), options.layer, taz.edges.split(), outf) outf.write('</polygons>\n')
def get_edge_lengths(road_map_file_path): edges = net.readNet(road_map_file_path).getEdges() lengths = {} for vehicle in [u'passenger', u'motorcycle', u'bus', u'taxi']: length = 0. for edge in edges: if edge.allows(vehicle): length += edge.getLaneNumber() * edge.getLength() lengths[vehicle] = length return lengths
def main(): options = get_options() net = None attribute_retriever = None if options.attribute == "length": net = readNet(options.network) def attribute_retriever(vehicle): return sum([net.getEdge(e).getLength() for e in vehicle.route[0].edges.split()]) elif options.attribute == "depart": def attribute_retriever(vehicle): return float(vehicle.depart) elif options.attribute == "numEdges": def attribute_retriever(vehicle): return len(vehicle.route[0].edges.split()) else: sys.exit("Invalid value '%s' for option --attribute" % options.attribute) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics( "route %ss" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = attribute_retriever(vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics( "route %s difference" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = attribute_retriever(vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def __init__(self, network, nb_cars): self.network = net.readNet(network) self.nb_cars = nb_cars self.my_car_id = "my_car" # self.front_car_id = "front_car" self.generate_route_file() options = self.get_options() params = self.set_params(options) traci.start(params)
def __init__(self, net_file, giveway_value=1): self._net_obj = net.readNet(net_file, withPrograms=True, withConnections=True) # Information stored as dicts, which are simple and less likely to lead mistakes (intersection id can be used # each time to access correct data). self._tls_stages = stages_as_dict(self._net_obj, clean_stages=True) self._compatible_phases = stages_to_compatible_phases_matrix(self._tls_stages, giveway_value=giveway_value) # Conversion to numpy arrays which will be more efficient (some users may prefer this implementation). # More likely to lead to mistakes, but also opens up options for using map and reduce. self._tls_ids = self._tls_stages.keys() self._tls_stages_as_numpy_matrix = np.matrix([np.matrix(matrix) for matrix in self._tls_stages.values()]) # As key -> val order may differ in the other dictionary, we iter through the self._tls_ids list for the second numpy matrix self._compatible_phases_as_numpy_matrix = np.matrix([np.matrix(self._compatible_phases[tls_id]) for tls_id in self._tls_ids])
def main(options): net = readNet(options.network) nodes = net.getNodes() nodesDict = {} neighbors = [] numNodes = len(nodes) numUndirectedEdges = 0 for i in range(numNodes): nodesDict.update({nodes[i]: i}) neighs = nodes[i].getNeighboringNodes() for n in neighs: if n not in nodesDict: numUndirectedEdges += 1 neighbors.append(neighs) # write metis input file with codecs.open("metisInputFile", 'w', encoding='utf8') as f: f.write("%s %s\n" % (numNodes, numUndirectedEdges)) for neighs in neighbors: f.write( "%s\n" % (" ".join([str(i + 1) for i in [nodesDict[n] for n in neighs]]))) # execute metis subprocess.call([ "gpmetis", "-objtype=vol", "-contig", "metisInputFile", options.parts ]) # get edges corresponding to partitions edges = [set() for _ in range(int(options.parts))] curr = 0 with codecs.open("metisInputFile.part." + options.parts, 'r', encoding='utf8') as f: for line in f: part = int(line) nodeEdges = nodes[curr].getIncoming() + nodes[curr].getOutgoing() for e in nodeEdges: if e.getID() not in edges[part]: edges[part].add(e.getID()) curr += 1 # write edges of partitions in separate files for i in range(len(edges)): with codecs.open("edgesPart" + str(i), 'w', encoding='utf8') as f: for eID in edges[i]: f.write("%s\n" % (eID))
def __init__(self, json_data, nogui=True, vehicle_mode_id=None, do_clean=False): GeneralSettings.initialize(json_data['general'], do_clean) road_map_file_path = json_data['map']['map_location'] RoadMapData.initialize(net.readNet(road_map_file_path), json_data['map']['edges_occupancy_file'], json_data['map']['landmarks_num']) # start SUMO and store connection self.conn_label = "v_mode_" + str( vehicle_mode_id) if vehicle_mode_id is not None else "sim_0" traci.start([ checkBinary('sumo') if nogui else checkBinary('sumo-gui'), "-c", "{}/map.sumo.cfg".format( GeneralSettings.base_dir), "--no-warnings", "True", "--max-depart-delay", GeneralSettings.max_depart_delay ], label=self.conn_label) self.conn = traci._connections[self.conn_label] # Init Road traffic control center self.rtc = RoadTrafficControl(self.conn) # Init Vehicle service self.vehicle_service = VehicleService(json_data, self.conn, self.rtc, vehicle_mode_id) self.rtc.set_vehicle_service_connection(self.vehicle_service) if GeneralSettings.debug_print: print('\n****** MAP STATISTICS ******') print('\tNumber of edges: {}'.format( len(RoadMapData.road_map.getEdges()))) print('\tNumber of nodes: {}\n'.format( len(RoadMapData.road_map.getNodes()))) print('\tTotal roads length: {}'.format( sum([ edge.getLength() for edge in RoadMapData.road_map.getEdges() ]))) print('\tTotal lanes length: {}'.format( sum([ lane.getLength() for edge in RoadMapData.road_map.getEdges() for lane in edge.getLanes() ])))
def __init__(self): self.conf = config.Config() self.conf.readConfig(constants.CONFIG_FILE) self._options = self.__getOptions__() self.sumo_net = net.readNet(self.conf.network_file) self.original_network = nx.DiGraph() # check output directory if os.path.isdir(self.conf.output_dir) == False: print("there is not output directory...") os.mkdir(self.conf.output_dir) print("create output directory.") if self.conf.real_net == True: netutil.readRealNetwork(self.sumo_net, self.original_network) else: netutil.readNetwork(self.sumo_net, self.original_network)
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics("route lengths", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = getRouteLength(net, vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics("route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) print("Valid area contains %s edges" % len(edges)) def write_to_file(vehicles, f): f.write( '<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write(( '<routes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n' )) num_trips = 0 num_persons = 0 for _, v in vehicles: if v.name == 'trip': num_trips += 1 else: num_persons += 1 writer(f, v) f.write('</routes>\n') if num_persons > 0: print("Wrote %s trips and %s persons" % (num_trips, num_persons)) else: print("Wrote %s trips" % (num_trips)) validTaz = set() if options.additional_input: for taz in parse(options.additional_input, 'taz'): validTaz.add(taz.id) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file(cut_trips(edges, options, validTaz), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_trips(edges, options, validTaz)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) print("Valid area contains %s edges" % len(edges)) def write_to_file(vehicles, f): f.write('<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( ('<routes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n')) num_trips = 0 num_persons = 0 for _, v in vehicles: if v.name == 'trip': num_trips += 1 else: num_persons += 1 writer(f, v) f.write('</routes>\n') if num_persons > 0: print("Wrote %s trips and %s persons" % (num_trips, num_persons)) else: print("Wrote %s trips" % (num_trips)) validTaz = set() if options.additional_input: for taz in parse(options.additional_input, 'taz'): validTaz.add(taz.id) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file( cut_trips(edges, options, validTaz), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_trips(edges, options, validTaz)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths = {} lengths2 = {} for vehicle in parse(options.routeFile, 'vehicle'): lengths[vehicle.id] = getRouteLength(net, vehicle) if options.routeFile2 is None: # write statistics on a single route file stats = Statistics( "route lengths", histogram=True, scale=options.binwidth) for id, length in lengths.items(): stats.add(length, id) else: # compare route lengths between two files stats = Statistics( "route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def run_normal(): options = get_options() with open(options.config_file) as json_file: json_data = json.load(json_file) # Clear output and statistics GeneralSettings.clear_output_dir(json_data['general']['statistics_output_dir']) GeneralSettings.clear_output_dir(json_data['general']['debug_output_dir']) num_of_iterations = json_data['general']['num_of_iterations'] base_dir = json_data['general']['base_dir'] road_map_file_path = json_data['map']['map_location'] trip_settings = TripSettings(json_data['random_trips']) num_of_modes = len(json_data['vehicle_modes']) if num_of_iterations < 1: raise ValueError("Number of iterations must be greater than 0") edges = net.readNet(road_map_file_path).getEdges() print "Start of {} iterations".format(num_of_iterations) start = time.time() for i in range(0, num_of_iterations): # generate routes prepare_trips(base_dir, road_map_file_path, edges, trip_settings) lock = mp.Lock() func = partial(processor, options.config_file) processes = [Process(target=func, args=(lock, vehicle_mode)) for vehicle_mode in range(0, num_of_modes)] [p.start() for p in processes] [p.join() for p in processes] print("Finished {} iteration.".format(i)) elapsed = time.time() - start print "Simulation elapsed seconds count: %02d" % elapsed SimulationRunner.export_archive(base_dir) print "Finished!"
def __init__(self, netfile_filepath): netobj = net.readNet(netfile_filepath, withPrograms=True, withConnections=True) self._network_intersection_ids = [] self._network_tls_ids = [] self._network_intersection_input_lanes_to_indices_dict = None self._network_intersection_output_lanes_to_indices_dict = None self._network_intersection_output_edge_to_indices_dict = None self._network_intersection_input_lane_to_output_edge_to_link_index_dict = None # Copying the dicts into an ordered array for mapping functions self._network_intersection_input_lanes_to_indices = None self._network_intersection_output_lanes_to_indices = None self._network_intersection_output_edge_to_indices = None self._network_intersection_input_lane_to_output_edge_to_link_index = None # Run set method self.set_intersection_in_and_out_lanes_and_ids(netobj) self.set_input_lane_to_output_edge_to_link_index_dict()
def get_edge_graph(netfile, undirected=False, additional_files=None): net = readNet(netfile) if undirected: graph = nx.Graph() else: graph = nx.DiGraph() for edge in net.getEdges(): graph.add_node( edge.getID(), lanes=[l.getID() for l in edge.getLanes()]) for conn_list in edge.getOutgoing().values(): for conn in conn_list: graph.add_edge( conn.getFrom().getID(), conn.getTo().getID(), direction=conn.getDirection() ) if additional_files is not None: edge_info = dict(graph.nodes.data()) if isinstance(additional_files, str): additional_files = [additional_files] for addl_file in additional_files: tree = etree.parse(addl_file) for element in tree.iter(): if element.tag in [ 'e1Detector', 'inductionLoop', 'e2Detector', 'laneAreaDetector' ]: lane_id = element.get('lane') edge_id = net.getLane(lane_id).getEdge().getID() detector_info_dict = dict(element.items()) detector_info_dict['type'] = element.tag edge_info[edge_id][element.get('id')] = detector_info_dict nx.set_node_attributes(graph, edge_info) return graph
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: output_type = 'trips' writer = write_trip else: output_type = 'routes' writer = write_route busStopEdges = {} if options.stops_output: busStops = codecs.open(options.stops_output, 'w', encoding='utf8') busStops.write( '<additional xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/additional_file.xsd">\n') if options.additional_input: num_busstops = 0 kept_busstops = 0 num_taz = 0 kept_taz = 0 for busStop in parse(options.additional_input, ('busStop', 'trainStop')): num_busstops += 1 edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: kept_busstops += 1 if busStop.access: busStop.access = [acc for acc in busStop.access if acc.lane[:-2] in edges] busStops.write(busStop.toXML(' ').decode('utf8')) for taz in parse(options.additional_input, 'taz'): num_taz += 1 taz_edges = [e for e in taz.edges.split() if e in edges] if taz_edges: taz.edges = " ".join(taz_edges) if options.stops_output: kept_taz += 1 busStops.write(taz.toXML(' ')) if num_busstops > 0 and num_taz > 0: print("Kept %s of %s busStops and %s of %s tazs" % ( kept_busstops, num_busstops, kept_taz, num_taz)) elif num_busstops > 0: print("Kept %s of %s busStops" % ( kept_busstops, num_busstops)) elif num_taz > 0: print("Kept %s of %s tazs" % ( kept_taz, num_taz)) if options.stops_output: busStops.write('</additional>\n') busStops.close() def write_to_file(vehicles, f): f.write('<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( ('<routes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n')) num_routeRefs = 0 num_vehicles = 0 for _, v in vehicles: if v.name == 'route': num_routeRefs += 1 else: num_vehicles += 1 writer(f, v) f.write('</routes>\n') if num_routeRefs > 0: print("Wrote %s standalone-routes and %s vehicles" % (num_routeRefs, num_vehicles)) else: print("Wrote %s %s" % (num_vehicles, output_type)) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file( cut_routes(edges, orig_net, options, busStopEdges), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(): options = get_options() if options.verbose: print("parsing network from", options.network) net = readNet(options.network, withInternal=True) read = 0 routeInfos = {} # id-> RouteInfo skipped = set() for routeFile in options.routeFiles: if options.verbose: print("parsing routes from", routeFile) idx = 0 if options.standalone: for idx, route in enumerate(parse(routeFile, 'route')): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "routes read") addOrSkip(routeInfos, skipped, route.id, route, options.min_edges) else: if options.heterogeneous: for idx, vehicle in enumerate(parse(routeFile, 'vehicle')): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "vehicles read") addOrSkip(routeInfos, skipped, vehicle.id, vehicle.route[0], options.min_edges) else: prev = (None, None) for vehicle, route in parse_fast_nested(routeFile, 'vehicle', 'id', 'route', 'edges'): if prev[0] != vehicle.id: if options.verbose and idx > 0 and idx % 500000 == 0: print(idx, "vehicles read") if prev[0] is not None: addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges) prev = (vehicle.id, route) idx += 1 if prev[0] is not None: addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges) read += idx if options.verbose: print(read, "routes read", len(skipped), "short routes skipped") if options.verbose: print("calculating air distance and checking loops") for idx, ri in enumerate(routeInfos.values()): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "routes checked") calcDistAndLoops(ri, net, options) prefix = os.path.commonprefix(options.routeFiles) duarouterOutput = prefix + '.rerouted.rou.xml' duarouterAltOutput = prefix + '.rerouted.rou.alt.xml' if os.path.exists(duarouterAltOutput) and options.reuse_routing: if options.verbose: print("reusing old duarouter file", duarouterAltOutput) else: if options.standalone: duarouterInput = prefix # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') else: duarouterInput = ",".join(options.routeFiles) command = [sumolib.checkBinary('duarouter'), '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log', '--routing-threads', str(options.threads), '--routing-algorithm', 'astar', '--aggregate-warnings', '1'] if options.verbose: command += ["-v"] if options.verbose: print("calling duarouter:", " ".join(command)) subprocess.call(command) for vehicle in parse(duarouterAltOutput, 'vehicle'): if vehicle.id in skipped: continue routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert(routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = sumolib.route.getLength(net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(("Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % ( vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overall implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = (options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold or ri.edgeLoop or ri.nodeLoop: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) # write xml output if options.xmlOutput is not None: with open(options.xmlOutput, 'w') as outf: sumolib.writeXMLHeader(outf, "$Id$", options=options) # noqa outf.write('<implausibleRoutes>\n') for score, rID, ri in sorted(implausible): edges = " ".join(ri.edges) outf.write(' <route id="%s" edges="%s" score="%s"/>\n' % ( rID, edges, score)) outf.write('</implausibleRoutes>\n') if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print("Loaded %s routes to ignore. Reducing implausible from %s to %s" % ( len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = prefix + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(options, net, rID, colorgen(), ri.edges, outf, score) outf.write('</additional>\n') sys.stdout.write('score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist, edgeLoop, nodeLoop)\n') for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist, ri.edgeLoop, ri.nodeLoop))) print(allRoutesStats) print(implausibleRoutesStats)
def __init__(self, netfile, weightfile=None): self.net = readNet(netfile) self.cost_attribute = 'traveltime' self.weightfile = weightfile if self.weightfile is not None: self.load_weights(self.weightfile)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: writer = write_trip else: writer = write_route busStopEdges = {} if options.stops_output: busStops = io.open(options.stops_output, 'w', encoding="utf8") writeHeader(busStops, os.path.basename(__file__), 'additional') if options.additional_input: num_busstops = 0 kept_busstops = 0 num_taz = 0 kept_taz = 0 for busStop in parse(options.additional_input, ('busStop', 'trainStop')): num_busstops += 1 edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: kept_busstops += 1 if busStop.access: busStop.access = [acc for acc in busStop.access if acc.lane[:-2] in edges] busStops.write(busStop.toXML(u' ')) for taz in parse(options.additional_input, 'taz'): num_taz += 1 taz_edges = [e for e in taz.edges.split() if e in edges] if taz_edges: taz.edges = " ".join(taz_edges) if options.stops_output: kept_taz += 1 busStops.write(taz.toXML(u' ')) if num_busstops > 0 and num_taz > 0: print("Kept %s of %s busStops and %s of %s tazs" % ( kept_busstops, num_busstops, kept_taz, num_taz)) elif num_busstops > 0: print("Kept %s of %s busStops" % (kept_busstops, num_busstops)) elif num_taz > 0: print("Kept %s of %s tazs" % (kept_taz, num_taz)) if options.stops_output: busStops.write(u'</additional>\n') busStops.close() def write_to_file(vehicles, f): writeHeader(f, os.path.basename(__file__), 'routes') numRefs = defaultdict(int) for _, v in vehicles: if options.trips and v.name == "vehicle": numRefs["trip"] += 1 else: numRefs[v.name] += 1 if v.name == "vType": f.write(v.toXML(u' ')) else: writer(f, v) f.write(u'</routes>\n') if numRefs: print("Wrote", ", ".join(["%s %ss" % (k[1], k[0]) for k in sorted(numRefs.items())])) else: print("Wrote nothing") startEndEdgeMap = {} if options.pt_input: allRouteFiles = options.routeFiles options.routeFiles = [options.pt_input] startEndRouteEdge = {} with io.open(options.pt_output if options.pt_output else options.pt_input + ".cut", 'w', encoding="utf8") as f: writeHeader(f, os.path.basename(__file__), 'routes') for _, v in cut_routes(edges, orig_net, options, busStopEdges): f.write(v.toXML(u' ')) if v.name == "route": routeEdges = v.edges.split() startEndRouteEdge[v.id] = (routeEdges[0], routeEdges[-1]) elif isinstance(v.route, list): routeEdges = v.route[0].edges.split() startEndEdgeMap[v.line] = (routeEdges[0], routeEdges[-1]) elif v.route is not None: startEndEdgeMap[v.line] = startEndRouteEdge[v.route] f.write(u'</routes>\n') options.routeFiles = allRouteFiles if options.big: # write output unsorted tmpname = options.output + ".unsorted" with io.open(tmpname, 'w', encoding="utf8") as f: write_to_file(cut_routes(edges, orig_net, options, busStopEdges, startEndEdgeMap), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges, startEndEdgeMap)) routes.sort(key=lambda v: v[0]) with io.open(options.output, 'w', encoding="utf8") as f: write_to_file(routes, f)
def main(): DUAROUTER = sumolib.checkBinary('duarouter') options = get_options() net = readNet(options.network) routeInfos = {} # id-> RouteInfo if options.standalone: for route in parse(options.routeFile, 'route'): ri = RouteInfo() ri.edges = route.edges.split() routeInfos[route.id] = ri else: for vehicle in parse(options.routeFile, 'vehicle'): ri = RouteInfo() ri.edges = vehicle.route[0].edges.split() routeInfos[vehicle.id] = ri for rInfo in routeInfos.values(): rInfo.airDist = euclidean( net.getEdge(rInfo.edges[0]).getShape()[0], net.getEdge(rInfo.edges[-1]).getShape()[-1]) rInfo.length = getRouteLength(net, rInfo.edges) rInfo.airDistRatio = rInfo.length / rInfo.airDist duarouterInput = options.routeFile if options.standalone: # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') duarouterOutput = options.routeFile + '.rerouted.rou.xml' duarouterAltOutput = options.routeFile + '.rerouted.rou.alt.xml' subprocess.call([ DUAROUTER, '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log' ]) for vehicle in parse(duarouterAltOutput, 'vehicle'): routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[ vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert (routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = getRouteLength( net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(( "Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % (vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overal implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = ( options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print( "Loaded %s routes to ignore. Reducing implausible from %s to %s" % (len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = options.routeFile + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(options, net, rID, colorgen(), ri.edges, outf, score) outf.write('</additional>\n') sys.stdout.write( 'score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist)\n' ) for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist))) print(allRoutesStats) print(implausibleRoutesStats)
def main(): DUAROUTER = sumolib.checkBinary('duarouter') options = get_options() net = readNet(options.network) routeInfos = {} # id-> RouteInfo if options.standalone: for route in parse(options.routeFile, 'route'): ri = RouteInfo() ri.edges = route.edges.split() routeInfos[route.id] = ri else: for vehicle in parse(options.routeFile, 'vehicle'): ri = RouteInfo() ri.edges = vehicle.route[0].edges.split() routeInfos[vehicle.id] = ri for rInfo in routeInfos.values(): rInfo.airDist = euclidean( net.getEdge(rInfo.edges[0]).getShape()[0], net.getEdge(rInfo.edges[-1]).getShape()[-1]) rInfo.length = getRouteLength(net, rInfo.edges) rInfo.airDistRatio = rInfo.length / rInfo.airDist duarouterInput = options.routeFile if options.standalone: # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') duarouterOutput = options.routeFile + '.rerouted.rou.xml' duarouterAltOutput = options.routeFile + '.rerouted.rou.alt.xml' subprocess.call([DUAROUTER, '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log']) for vehicle in parse(duarouterAltOutput, 'vehicle'): routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert(routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = getRouteLength(net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(("Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % ( vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overal implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = (options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print("Loaded %s routes to ignore. Reducing implausible from %s to %s" % ( len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = options.routeFile + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(net, rID, colorgen(), 100, False, ri.edges, options.blur, outf, score) outf.write('</additional>\n') sys.stdout.write('score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist)\n') for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist))) print(allRoutesStats) print(implausibleRoutesStats)
def __init__(self, environment, cars_number): #Initialise the enviroment from traci self.environment = net.readNet(environment) self.nbrCars = cars_number self.myControlledCarId = "my_car" self.generate_network_file() options = self.get_options() params = self.set_parameters(options) traci.start(params) self.totalRewards = 1 # Parameters initialisation of Alpha, Gamma and Epsilon self.alpha = 0.1 self.gamma = 0.99 self.epsilon = 0.1 # Parameters initialisation of the State self.space_headway = { "min": 0., "max": 150., "decimals": 0, "nb_values": 6 } self.relative_speed = { "min": -8.33, "max": 8.33, "decimals": 2, "nb_values": 6 } self.speed = {"min": 0., "max": 13.89, "decimals": 2, "nb_values": 6} #Actions to take 1: accelerate 2: decelerate 0: nothing self.action = [1, -1, 0] # Indexes from Traci self.index_space_headway = \ dict(((round(i, self.space_headway.get('decimals'))), iteration) for iteration, i in enumerate(np.linspace(self.space_headway.get('min'), self.space_headway.get('max'), self.space_headway.get('nb_values')))) self.index_relative_speed = \ dict(((round(i, self.relative_speed.get('decimals'))), iteration) for iteration, i in enumerate(np.linspace(self.relative_speed.get('min'), self.relative_speed.get('max'), self.relative_speed.get('nb_values')))) self.index_speed = \ dict(((round(i, self.speed.get('decimals'))), iteration) for iteration, i in enumerate(np.linspace(self.speed.get('min'), self.speed.get('max'), self.speed.get('nb_values')))) self.index_action = \ dict((i, iteration) for iteration, i in enumerate(self.action)) # 4 dimentionnal Q-table initialisation self.q = np.array( np.zeros([ len(self.index_space_headway), len(self.index_relative_speed), len(self.index_speed), len(self.index_action) ]))
def get_lane_graph(netfile, undirected=False, detector_def_files=None, tls_output_def_files=None): net = readNet(netfile) if undirected: graph = nx.Graph() else: graph = nx.DiGraph() for edge in net.getEdges(): for lane in edge.getLanes(): graph.add_node(lane.getID()) tls_to_edges = {} for node in net.getNodes(): for conn in node.getConnections(): tls_id = conn.getTLSID() if tls_id not in tls_to_edges: tls_to_edges[tls_id] = [] edge_from_to = (conn.getFromLane().getID(), conn.getToLane().getID()) graph.add_edge(*edge_from_to, direction=conn.getDirection(), tls=tls_id) tls_to_edges[tls_id].append(edge_from_to) # sanity check tls_to_edges_2 = { tl.getID(): [ tuple([lane.getID() for lane in conn[:-1]]) for conn in tl.getConnections() ] for tl in net.getTrafficLights() } assert tls_to_edges == tls_to_edges_2 if detector_def_files is not None: if isinstance(detector_def_files, six.string_types): detector_def_files = [detector_def_files] for detfile in detector_def_files: tree = etree.iterparse(detfile, tag=[ 'e1Detector', 'inductionLoop', 'e2Detector', 'laneAreaDetector' ]) for _, element in tree: lane_id = element.get('lane') if lane_id in graph.nodes: if 'detectors' not in graph.node[lane_id]: graph.node[lane_id]['detectors'] = {} detector_info_dict = dict(element.items()) detector_info_dict['type'] = element.tag graph.node[lane_id]['detectors'][element.get( 'id')] = detector_info_dict element.clear() if tls_output_def_files is not None: if isinstance(tls_output_def_files, six.string_types): tls_output_def_files = [tls_output_def_files] for tlsfile in tls_output_def_files: tree = etree.iterparse(tlsfile, tag='timedEvent') for _, element in tree: if element.get('type') == 'SaveTLSSwitchTimes': tls_id = element.get('source') for edge in tls_to_edges[tls_id]: graph.edges[edge].update( {'tls_output_info': dict(element.items())}) in_lane = edge[0] if 'tls_output_info' not in graph.nodes[in_lane]: graph.nodes[in_lane].update( {'tls_output_info': dict(element.items())}) element.clear() return graph
#!/usr/bin/env python # -*- coding: utf-8 -*- from sumolib import net import sys if __name__ == '__main__': if len(sys.argv) < 2: print "Usage: " + sys.argv[0] + " <input network file>" sys.exit() sumo_net = net.readNet(sys.argv[1]) print_str = "\"" for i, e in enumerate(sumo_net.getNodes()): if i != len(sumo_net.getNodes()) - 1: print_str += e.getID() + "," else: print print_str + e.getID() + "\""