def main(args): options = parse_args(args) net = readNet(options.net) known_ids = set() def unique_id(cand, index=0): cand2 = cand if index > 0: cand2 = "%s#%s" % (cand, index) if cand2 in known_ids: return unique_id(cand, index + 1) else: known_ids.add(cand2) return cand2 with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') for routefile in options.routefiles: print("parsing %s" % routefile) if options.standalone: for route in parse(routefile, 'route'): # print("found veh", vehicle.id) generate_poly(net, unique_id(route.id), options.colorgen(), options.layer, options.geo, route.edges.split(), options.blur, outf) else: for vehicle in parse(routefile, 'vehicle'): # print("found veh", vehicle.id) generate_poly(net, unique_id(vehicle.id), options.colorgen(), options.layer, options.geo, vehicle.route[0].edges.split(), options.blur, outf) outf.write('</polygons>\n')
def write_diff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength"] # parseTime works just fine for floats attr_conversions = dict([(a, parseTime) for a in attrs]) vehicles_orig = OrderedDict([(v.id, v) for v in parse(options.orig, 'tripinfo', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations', histogram=options.useHist, scale=options.histScale) durations = Statistics('new durations', histogram=options.useHist, scale=options.histScale) durationDiffs = Statistics('duration differences', histogram=options.useHist, scale=options.histScale) with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for v in parse(options.new, 'tripinfo', attr_conversions=attr_conversions): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] diffs = [v.getAttribute(a) - vOrig.getAttribute(a) for a in attrs] durations.add(v.duration, v.id) origDurations.add(vOrig.duration, v.id) durationDiffs.add(v.duration - vOrig.duration, v.id) diffAttrs = ''.join([' %sDiff="%s"' % (a, x) for a, x in zip(attrs, diffs)]) f.write(' <vehicle id="%s"%s/>\n' % (v.id, diffAttrs)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths1 = {} lengths2 = {} lengthDiffStats = Statistics( "route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile1, 'vehicle'): lengths1[vehicle.id] = getRouteLength(net, vehicle) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) lengthDiffStats.add( lengths2[vehicle.id] - lengths1[vehicle.id], vehicle.id) print lengthDiffStats if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in lengthDiffStats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: differences = sorted( [(lengths2[id] - lengths1[id], id) for id in lengths1.keys()]) for diff, id in differences: f.write("%s %s\n" % (diff, id))
def write_persondiff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength", "waitingTime"] attr_conversions = dict([(a, parseTime) for a in attrs]) persons_orig = OrderedDict([(p.id, p) for p in parse(options.orig, 'personinfo', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') statAttrs = ["duration", "walkTimeLoss", "rideWait", "walks", "accesses", "rides", "stops"] with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for p in parse(options.new, 'personinfo', attr_conversions=attr_conversions): if p.id in persons_orig: pOrig = persons_orig[p.id] stats = plan_stats(p) statsOrig = plan_stats(pOrig) diffs = [a - b for a, b in zip(stats, statsOrig)] durations.add(stats[0], p.id) origDurations.add(statsOrig[0], p.id) durationDiffs.add(stats[0] - statsOrig[0], p.id) diffAttrs = ''.join([' %sDiff="%s"' % (a, x) for a, x in zip(statAttrs, diffs)]) f.write(' <personinfo id="%s"%s/>\n' % (p.id, diffAttrs)) del persons_orig[p.id] else: f.write(' <personinfo id="%s" comment="new"/>\n' % p.id) for id in persons_orig.keys(): f.write(' <personinfo id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out): diffStats = defaultdict(Statistics) with open(out, 'w') as f: f.write("<meandata>\n") for interval_old, interval_new in zip(parse(orig, 'interval'), parse(new, 'interval')): f.write(' <interval begin="%s" end="%s">\n' % (interval_old.begin, interval_old.end)) for edge_old, edge_new in zip(interval_old.edge, interval_new.edge): assert(edge_old.id == edge_new.id) f.write(' <edge id="%s"' % edge_old.id) for attr in edge_old._fields: if attr == 'id': continue try: delta = float(getattr(edge_new, attr)) - \ float(getattr(edge_old, attr)) diffStats[attr].add(delta, edge_old.id) f.write(' %s="%s"' % (attr, delta)) except: pass f.write("/>\n") f.write("</interval>\n") f.write("</meandata>\n") for attr, stats in diffStats.items(): stats.label = attr print(stats)
def main(): options = get_options() net = None attribute_retriever = None if options.attribute == "length": net = readNet(options.network) def attribute_retriever(vehicle): return sum([net.getEdge(e).getLength() for e in vehicle.route[0].edges.split()]) elif options.attribute == "depart": def attribute_retriever(vehicle): return float(vehicle.depart) elif options.attribute == "numEdges": def attribute_retriever(vehicle): return len(vehicle.route[0].edges.split()) else: sys.exit("Invalid value '%s' for option --attribute" % options.attribute) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics( "route %ss" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = attribute_retriever(vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics( "route %s difference" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = attribute_retriever(vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def write_diff(orig, new, out, earliest_out=None): attr_conversions = {"depart": parseTime, "arrival": parseTime} earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, 'vehicle', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<routeDiff>\n") for v in parse(new, 'vehicle', attr_conversions=attr_conversions): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = v.depart - vOrig.depart arrivalDiff = v.arrival - vOrig.arrival if v.route[0].exitTimes is None: sys.exit("Error: Need route input with 'exitTimes'\n") exitTimes = map(parseTime, v.route[0].exitTimes.split()) origExitTimes = map(parseTime, vOrig.route[0].exitTimes.split()) exitTimesDiff = [ e - eOrig for e, eOrig in zip(exitTimes, origExitTimes)] durations.add(v.arrival - v.depart, v.id) origDurations.add(vOrig.arrival - vOrig.depart, v.id) durationDiffs.add(arrivalDiff - departDiff, v.id) update_earliest( earliest_diffs, departDiff, vOrig.depart, v.id + ' (depart)') for diff, timestamp in zip(exitTimesDiff, origExitTimes): update_earliest(earliest_diffs, diff, timestamp, v.id) f.write(''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" exitTimesDiff="%s"/>\n''' % ( v.id, departDiff, arrivalDiff, ' '.join(map(str, exitTimesDiff)))) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</routeDiff>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def main(): options = parse_args() edges = set() for routefile in options.routefiles: for route in parse_fast(routefile, 'route', ['edges']): edges.update(route.edges.split()) for walk in parse_fast(routefile, 'walk', ['edges']): edges.update(walk.edges.split()) # warn about potentially missing edges for trip in parse(routefile, ['trip', 'flow'], heterogeneous=True): edges.update([trip.attr_from, trip.to]) if trip.via is not None: edges.update(trip.via.split()) print( "Warning: Trip %s is not guaranteed to be connected within the extracted edges." % trip.id) for walk in parse_fast(routefile, 'walk', ['from', 'to']): edges.update([walk.attr_from, walk.to]) print("Warning: Walk from %s to %s is not guaranteed to be connected within the extracted edges." % ( walk.attr_from, walk.to)) with open(options.outfile, 'w') as outf: for e in sorted(list(edges)): outf.write('edge:%s\n' % e)
def main(tripinfos, lengthThreshold=0.1): lengthThreshold = float(lengthThreshold) stats = Statistics('walkfactor') statsZeroDuration = Statistics('length of zero-duration walks') statsShort = Statistics('duration of short walks (length <= %s)' % lengthThreshold) numUnfinished = 0 for personinfo in parse(tripinfos, 'personinfo'): if personinfo.hasChild('walk'): for i, walk in enumerate(personinfo.walk): if walk.arrival[0] == '-': numUnfinished += 1 continue walkID = "%s.%s" % (personinfo.id, i) duration = float(walk.duration) routeLength = float(walk.routeLength) if duration > 0: if routeLength <= lengthThreshold: statsShort.add(duration, walkID) else: avgSpeed = routeLength / duration walkFactor = avgSpeed / float(walk.maxSpeed) stats.add(walkFactor, walkID) else: statsZeroDuration.add(routeLength, walkID) print(stats) if statsZeroDuration.count() > 0: print(statsZeroDuration) if statsShort.count() > 0: print(statsShort) if numUnfinished > 0: print("unfinished walks: %s" % numUnfinished)
def write_diff(options): diffStats = defaultdict(Statistics) with open(options.out, 'w') as f: f.write("<meandata>\n") for interval_old, interval_new in zip( parse(options.orig, 'interval', heterogeneous=True), parse(options.new, 'interval', heterogeneous=True)): f.write(' <interval begin="%s" end="%s">\n' % (interval_old.begin, interval_old.end)) interval_new_edges = dict([(e.id, e) for e in interval_new.edge]) for edge_old in interval_old.edge: edge_new = interval_new_edges.get(edge_old.id, None) if edge_new is None: continue assert (edge_old.id == edge_new.id) f.write(' <edge id="%s"' % edge_old.id) for attr in edge_old._fields: if attr == 'id': continue try: val_new = float(getattr(edge_new, attr)) val_old = float(getattr(edge_old, attr)) delta = val_new - val_old if not options.no_statistics and attr.startswith( 'std_'): delta = math.sqrt(val_new**2 + val_old**2) else: if options.relative: if val_old != 0: delta /= abs(val_old) else: delta = options.undefined elif options.geh: delta = geh(val_new, val_old) diffStats[attr].add(delta, edge_old.id) f.write(' %s="%s"' % (attr, delta)) except Exception: pass f.write("/>\n") f.write(" </interval>\n") f.write("</meandata>\n") for attr, stats in diffStats.items(): stats.label = attr print(stats)
def write_diff(orig, new, out, earliest_out=None): earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, 'vehicle')]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<routeDiff>\n") for v in parse(new, 'vehicle'): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) if v.route[0].exitTimes is None: sys.exit("Error: Need route input with 'exitTimes'\n") exitTimes = map(float, v.route[0].exitTimes.split()) origExitTimes = map(float, vOrig.route[0].exitTimes.split()) exitTimesDiff = [ e - eOrig for e, eOrig in zip(exitTimes, origExitTimes)] durations.add(float(v.arrival) - float(v.depart), v.id) origDurations.add( float(vOrig.arrival) - float(vOrig.depart), v.id) durationDiffs.add(arrivalDiff - departDiff, v.id) update_earliest( earliest_diffs, departDiff, vOrig.depart, v.id + ' (depart)') for diff, timestamp in zip(exitTimesDiff, origExitTimes): update_earliest(earliest_diffs, diff, timestamp, v.id) f.write(''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" exitTimesDiff="%s"/>\n''' % ( v.id, departDiff, arrivalDiff, ' '.join(map(str, exitTimesDiff)))) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</routeDiff>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def write_diff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength"] # parseTime works just fine for floats attr_conversions = dict([(a, parseTime) for a in attrs]) vehicles_orig = OrderedDict([(v.id, v) for v in parse(options.orig, 'tripinfo', attr_conversions=attr_conversions)]) descr = "" if options.filterIDs: descr = " (%s)" % options.filterIDs origDurations = Statistics('original durations%s' % descr, histogram=options.useHist, scale=options.histScale) durations = Statistics('new durations%s' % descr, histogram=options.useHist, scale=options.histScale) durationDiffs = Statistics('duration differences%s new-old' % descr, histogram=options.useHist, scale=options.histScale) numNew = 0 numMissing = 0 with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for v in parse(options.new, 'tripinfo', attr_conversions=attr_conversions): if options.filterIDs and options.filterIDs not in v.id: del vehicles_orig[v.id] continue if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] diffs = [v.getAttribute(a) - vOrig.getAttribute(a) for a in attrs] durations.add(v.duration, v.id) origDurations.add(vOrig.duration, v.id) durationDiffs.add(v.duration - vOrig.duration, v.id) diffAttrs = ''.join([' %sDiff="%s"' % (a, x) for a, x in zip(attrs, diffs)]) f.write(' <vehicle id="%s"%s/>\n' % (v.id, diffAttrs)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) numNew += 1 for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) numMissing += 1 f.write("</tripDiffs>\n") if numMissing > 0: print("missing: %s" % numMissing) if numNew > 0: print("new: %s" % numNew) print(origDurations) print(durations) print(durationDiffs)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: output_type = 'trips' writer = write_trip else: output_type = 'routes' writer = write_route busStopEdges = {} if options.stops_output: busStops = codecs.open(options.stops_output, 'w', encoding='utf8') busStops.write( '<additional xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/additional_file.xsd">\n' ) if options.additional_input: for busStop in parse(options.additional_input, 'busStop'): edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: busStops.write(busStop.toXML(' ')) if options.stops_output: busStops.write('</additional>\n') busStops.close() def write_to_file(vehicles, f): f.write( '<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( '<%s xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n' % output_type) num_routes = 0 for _, v in vehicles: num_routes += 1 writer(f, v) f.write('</%s>\n' % output_type) print("Wrote %s %s" % (num_routes, output_type)) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file(cut_routes(edges, orig_net, options, busStopEdges), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def write_diff(orig, new, out, earliest_out=None): earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, "vehicle")]) origDurations = Statistics("original durations") durations = Statistics("new durations") durationDiffs = Statistics("duration differences") with open(out, "w") as f: f.write("<routeDiff>\n") for v in parse(new, "vehicle"): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) if v.route[0].exitTimes is None: sys.exit("Error: Need route input with 'exitTimes'\n") exitTimes = map(float, v.route[0].exitTimes.split()) origExitTimes = map(float, vOrig.route[0].exitTimes.split()) exitTimesDiff = [e - eOrig for e, eOrig in zip(exitTimes, origExitTimes)] durations.add(float(v.arrival) - float(v.depart), v.id) origDurations.add(float(vOrig.arrival) - float(vOrig.depart), v.id) durationDiffs.add(arrivalDiff - departDiff, v.id) update_earliest(earliest_diffs, departDiff, vOrig.depart, v.id + " (depart)") for diff, timestamp in zip(exitTimesDiff, origExitTimes): update_earliest(earliest_diffs, diff, timestamp, v.id) f.write( """ <vehicle id="%s" departDiff="%s" arrivalDiff="%s" exitTimesDiff="%s"/>\n""" % (v.id, departDiff, arrivalDiff, " ".join(map(str, exitTimesDiff))) ) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</routeDiff>\n") if earliest_out is not None: with open(earliest_out, "w") as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def main(): options = parse_args() net = readNet(options.net) with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') for vehicle in parse(options.routefile, 'vehicle'): generate_poly(net, vehicle.id, options.colorgen(), options.layer, vehicle.route[0].edges.split(), outf) outf.write('</polygons>\n')
def cut_routes(areaEdges, orig_net, options): num_vehicles = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda: 0) multiAffectedRoutes = 0 # routes which enter the sub-scenario multiple times teleportFactorSum = 0.0 too_short = 0 for routeFile in options.routeFiles: print "Parsing routes from %s" % routeFile for vehicle in parse(routeFile, "vehicle"): num_vehicles += 1 edges = vehicle.route[0].edges.split() fromIndex = getFirstIndex(areaEdges, edges) if fromIndex is None: continue # route does not touch the area toIndex = len(edges) - 1 - getFirstIndex(areaEdges, reversed(edges)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 continue # check for connectivity if missingEdges(areaEdges, edges[fromIndex:toIndex], missingEdgeOccurences): multiAffectedRoutes += 1 if options.disconnected_action == "discard": continue # compute new departure if vehicle.route[0].exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = float(vehicle.depart) + sum( [ (orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex] ] ) else: print "Could not reconstruct new departure for vehicle '%s'. Using old departure" % vehicle.id newDepart = vehicle.depart else: exitTimes = vehicle.route[0].exitTimes.split() departTimes = [vehicle.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = int(float(departTimes[int(fromIndex * teleportFactor)])) yield (newDepart, vehicle.id, vehicle.type, edges[fromIndex : toIndex + 1]) num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % (1 - teleportFactorSum / num_returned) else: teleports = "" print "Parsed %s vehicles and kept %s routes%s" % (num_vehicles, num_returned, teleports) if too_short > 0: print "Discared %s routes because they have less than %s edges" % (too_short, options.min_length) print "Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes printTop(missingEdgeOccurences)
def _parse_vehicle_info(routes): sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") stats = [] for v in output.parse(routes, ('vehicle', 'person')): if not v.id.endswith( BACKGROUND_TRAFFIC_SUFFIX) and v.depart != "triggered": duration = float(v.arrival) - float(v.depart) length = float(v.routeLength) if v.routeLength else 0 sumoTime.add(duration, v.id) sumoDist.add(length, v.id) if v.name == "vehicle": stats.append( tuple(v.id.split('_')) + ("{0,0,%s}" % duration, "{0,0,%s}" % length)) else: walkLength = [0, 0] walkDuration = [0, 0] rideLength = 0 rideEnd = float(v.depart) idx = 0 initWait = 0 transfers = 0 transferTime = 0 for stage in v.getChildList(): if stage.name == "walk": walkLength[idx] += float(stage.routeLength) walkDuration[idx] = float( stage.exitTimes.split()[-1]) - rideEnd elif stage.name == "ride": if idx == 0: idx = 1 initWait = float(stage.depart) - float( v.depart) - walkDuration[0] else: transfers += 1 transferTime += float(stage.depart) - rideEnd rideEnd = float(stage.ended) rideLength += float(stage.routeLength) + walkLength[1] walkLength[1] = 0 # reset from intermediate walks if idx == 0: stats.append( tuple(v.id.split('_')) + ("{%s}" % duration, "{%s}" % walkLength[0])) else: dur = (duration - sum(walkDuration) - initWait, walkDuration[0], initWait, walkDuration[1], transferTime) length = (rideLength, walkLength[0], walkLength[1], transfers) stats.append( tuple(v.id.split('_')) + ("{0,0,0,0,%s,%s,%s,%s,%s}" % dur, "{0,0,0,0,%s,%s,0,%s,%s}" % length)) print("Parsed results for %s vehicles and persons" % len(stats)) print(sumoTime) print(sumoDist) return stats
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: output_type = 'trips' writer = write_trip else: output_type = 'routes' writer = write_route busStopEdges = {} if options.stops_output: busStops = codecs.open(options.stops_output, 'w', encoding='utf8') busStops.write( '<additional xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/additional_file.xsd">\n') if options.additional_input: for busStop in parse(options.additional_input, 'busStop'): edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: busStops.write(busStop.toXML(' ')) if options.stops_output: busStops.write('</additional>\n') busStops.close() def write_to_file(vehicles, f): f.write('<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( ('<%s xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n') % output_type) num_routes = 0 for _, v in vehicles: num_routes += 1 writer(f, v) f.write('</%s>\n' % output_type) print("Wrote %s %s" % (num_routes, output_type)) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file( cut_routes(edges, orig_net, options, busStopEdges), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(): random.seed(42) options = parse_args() net = readNet(options.net) with open(options.outfile, "w") as outf: outf.write("<polygons>\n") for taz in parse(options.routefile, "taz"): generate_poly(net, taz.id, options.colorgen(), options.layer, taz.edges.split(), outf) outf.write("</polygons>\n")
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics("route lengths", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = getRouteLength(net, vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics("route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def cut_routes(areaEdges, orig_net, options): num_vehicles = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda:0) multiAffectedRoutes = 0 # routes which enter the sub-scenario multiple times teleportFactorSum = 0.0 too_short = 0 for routeFile in options.routeFiles: print "Parsing routes from %s" % routeFile for vehicle in parse(routeFile, 'vehicle'): num_vehicles += 1 edges = vehicle.route[0].edges.split() fromIndex = getFirstIndex(areaEdges, edges) if fromIndex is None: continue # route does not touch the area toIndex = len(edges) - 1 - getFirstIndex(areaEdges, reversed(edges)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 continue # check for connectivity if missingEdges(areaEdges, edges[fromIndex:toIndex], missingEdgeOccurences): multiAffectedRoutes += 1 if options.disconnected_action == 'discard': continue # compute new departure if vehicle.route[0].exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = (float(vehicle.depart) + sum([(orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex]])) else: print "Could not reconstruct new departure for vehicle '%s'. Using old departure" % vehicle.id newDepart = vehicle.depart else: exitTimes = vehicle.route[0].exitTimes.split() departTimes = [vehicle.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = int(float(departTimes[int(fromIndex * teleportFactor)])) yield (newDepart, vehicle.id, vehicle.type, edges[fromIndex:toIndex+1]) num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % (1 - teleportFactorSum/num_returned) else: teleports = "" print "Parsed %s vehicles and kept %s routes%s" % (num_vehicles, num_returned, teleports) if too_short > 0: print "Discared %s routes because they have less than %s edges" % (too_short, options.min_length) print "Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes printTop(missingEdgeOccurences)
def main(): random.seed(42) options = parse_args() net = readNet(options.net) with open(options.outfile, 'w') as outf: outf.write('<polygons>\n') for taz in parse(options.routefile, 'taz'): generate_poly(net, taz.id, options.colorgen(), options.layer, taz.edges.split(), outf) outf.write('</polygons>\n')
def write_diff(options): diffStats = defaultdict(Statistics) with open(options.out, 'w') as f: f.write("<meandata>\n") for interval_old, interval_new in zip( parse(options.orig, 'interval', heterogeneous=True), parse(options.new, 'interval', heterogeneous=True)): f.write(' <interval begin="%s" end="%s">\n' % (interval_old.begin, interval_old.end)) interval_new_edges = dict([(e.id, e) for e in interval_new.edge]) for edge_old in interval_old.edge: edge_new = interval_new_edges.get(edge_old.id, None) if edge_new is None: continue assert(edge_old.id == edge_new.id) f.write(' <edge id="%s"' % edge_old.id) for attr in edge_old._fields: if attr == 'id': continue try: val_new = float(getattr(edge_new, attr)) val_old = float(getattr(edge_old, attr)) delta = val_new - val_old if options.relative: if val_old != 0: delta /= abs(val_old) else: delta = options.undefined elif options.geh: delta = geh(val_new, val_old) diffStats[attr].add(delta, edge_old.id) f.write(' %s="%s"' % (attr, delta)) except Exception: pass f.write("/>\n") f.write("</interval>\n") f.write("</meandata>\n") for attr, stats in diffStats.items(): stats.label = attr print(stats)
def main(): options = get_options() routes = defaultdict(list) if options.routes is not None: for route in parse(options.routes, 'route'): routes[route.edges].append(route.id) restrictions = {} if options.restrictionfile is not None: for line in open(options.restrictionfile): count, edges = line.strip().split(None, 1) for rID in routes[edges]: restrictions[rID] = int(count) routeUsage = defaultdict(int) for flow in parse(options.emitters, 'flow'): num = int(flow.number) if flow.route is None: dist = flow.routeDistribution[0] probs = map(float, dist.probabilities.split()) probs = [p / sum(probs) for p in probs] for rID, p in zip(dist.routes.split(), probs): routeUsage[rID] += p * num else: routeUsage[flow.route] += num usage = Statistics("routeUsage") restrictUsage = Statistics("restrictedRouteUsage") for rID, count in routeUsage.items(): usage.add(count, rID) if rID in restrictions: restrictUsage.add(count, rID) print(usage) print(restrictUsage, "total:", sum(restrictUsage.values)) if options.unused_output is not None: with open(options.unused_output, 'w') as outf: for rID, count in routeUsage.items(): if count <= options.threshold: outf.write("%s\n" % rID) if rID in restrictions and count > restrictions[rID]: outf.write("%s %s %s\n" % (rID, count, restrictions[rID]))
def write_stats(options): """Calculates the statistical values (as mean values and standard deviations) of all (numerical) attributes of the meandata files and writes them as a new meandata file to the output file.""" parsed_input_files = [] for input_file in options.input_list.replace(" ", "").replace('"', '').split(','): parsed_file = parse(input_file, 'interval', heterogeneous=True) parsed_input_files.append(parsed_file) with open(options.out, 'w') as f: f.write("<meandata>\n") for interval_all in zip(*parsed_input_files): # separate calculation for each interval f.write(' <interval begin="%s" end="%s">\n' % (interval_all[0].begin, interval_all[0].end)) edges = dict() # set of (edge.id, edge) all_edges = list() # list of dicts, with one dict for each file for interval in interval_all: my_edges = dict([(e.id, e) for e in interval.edge]) edges.update(my_edges) all_edges.append(my_edges) # interval_new_edges = dict([(e.id, e) for e in interval_new.edge]) => "edges" for id, edge in edges.items(): f.write(' <edge id="%s"' % id) stats = defaultdict(Statistics) # separate stats for each edge for attr in edge._fields: if attr == 'id': continue if options.attributes_list and attr not in options.attributes_list: continue try: for parsed in all_edges: parsed_edge = parsed.get(id, None) if parsed_edge is None: continue try: val = float(getattr(parsed_edge, attr)) stats[attr].add(val, id) except Exception: pass mean, std = stats[attr].meanAndStdDev() median = stats[attr].median() q1, median, q3 = stats[attr].quartiles() # todo: add more statistic values here f.write(' %s="%s"' % ("mean_" + attr, mean)) f.write(' %s="%s"' % ("std_" + attr, std)) f.write(' %s="%s"' % ("median_" + attr, median)) f.write(' %s="%s"' % ("q1_" + attr, q1)) f.write(' %s="%s"' % ("q3_" + attr, q3)) except Exception: pass f.write("/>\n") f.write(" </interval>\n") f.write("</meandata>\n")
def write_persondiff(options): attrs = [ "depart", "arrival", "timeLoss", "duration", "routeLength", "waitingTime" ] attr_conversions = dict([(a, parseTime) for a in attrs]) persons_orig = OrderedDict([(p.id, p) for p in parse( options.orig, 'personinfo', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') statAttrs = [ "duration", "walkTimeLoss", "rideWait", "walks", "accesses", "rides", "stops" ] with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for p in parse(options.new, 'personinfo', attr_conversions=attr_conversions): if p.id in persons_orig: pOrig = persons_orig[p.id] stats = plan_stats(p) statsOrig = plan_stats(pOrig) diffs = [a - b for a, b in zip(stats, statsOrig)] durations.add(stats[0], p.id) origDurations.add(statsOrig[0], p.id) durationDiffs.add(stats[0] - statsOrig[0], p.id) diffAttrs = ''.join([ ' %sDiff="%s"' % (a, x) for a, x in zip(statAttrs, diffs) ]) f.write(' <personinfo id="%s"%s/>\n' % (p.id, diffAttrs)) del persons_orig[p.id] else: f.write(' <personinfo id="%s" comment="new"/>\n' % p.id) for id in persons_orig.keys(): f.write(' <personinfo id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def write_diff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength"] # parseTime works just fine for floats attr_conversions = dict([(a, parseTime) for a in attrs]) vehicles_orig = OrderedDict([(v.id, v) for v in parse( options.orig, 'tripinfo', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations', histogram=options.useHist, scale=options.histScale) durations = Statistics('new durations', histogram=options.useHist, scale=options.histScale) durationDiffs = Statistics('duration differences', histogram=options.useHist, scale=options.histScale) with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for v in parse(options.new, 'tripinfo', attr_conversions=attr_conversions): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] diffs = [ v.getAttribute(a) - vOrig.getAttribute(a) for a in attrs ] durations.add(v.duration, v.id) origDurations.add(vOrig.duration, v.id) durationDiffs.add(v.duration - vOrig.duration, v.id) diffAttrs = ''.join( [' %sDiff="%s"' % (a, x) for a, x in zip(attrs, diffs)]) f.write(' <vehicle id="%s"%s/>\n' % (v.id, diffAttrs)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths = {} lengths2 = {} for vehicle in parse(options.routeFile, 'vehicle'): lengths[vehicle.id] = getRouteLength(net, vehicle) if options.routeFile2 is None: # write statistics on a single route file stats = Statistics( "route lengths", histogram=True, scale=options.binwidth) for id, length in lengths.items(): stats.add(length, id) else: # compare route lengths between two files stats = Statistics( "route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def write_diff(orig, new, out, earliest_out=None): earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, 'tripinfo')]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<tripDiffs>\n") for v in parse(new, 'tripinfo'): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) timeLossDiff = float(v.timeLoss) - float(vOrig.timeLoss) durationDiff = float(v.duration) - float(vOrig.duration) routeLengthDiff = float(v.routeLength) - \ float(vOrig.routeLength) durations.add(float(v.duration), v.id) origDurations.add(float(vOrig.duration), v.id) durationDiffs.add(durationDiff, v.id) f.write( ''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" timeLossDiff="%s" durationDiff="%s" routeLengthDiff="%s"/>\n''' % (v.id, departDiff, arrivalDiff, timeLossDiff, durationDiff, routeLengthDiff)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def _parse_vehicle_info_taz(routes, start, end, vType): stats = [] if os.path.isfile(routes): for v in output.parse(routes, 'vehicle'): if not v.id.endswith(BACKGROUND_TRAFFIC_SUFFIX) and v.depart != "triggered": depart = float(v.depart) % (24 * 3600) # vType is something like "passenger" and v.type "passenger_PHEMlight/PC_G_EU3" if depart >= start and depart < end and v.type is not None and v.type.startswith(vType): fromTaz, toTaz = _parseTaz(v) stats.append((fromTaz, toTaz, 0, float(v.arrival) - float(v.depart), float(v.routeLength))) print("Parsed taz results for %s vehicles" % len(stats)) return stats
def write_diff(orig, new, out, earliest_out=None): earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, 'tripinfo')]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<tripDiffs>\n") for v in parse(new, 'tripinfo'): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) timeLossDiff = float(v.timeLoss) - float(vOrig.timeLoss) durationDiff = float(v.duration) - float(vOrig.duration) routeLengthDiff = float(v.routeLength) - \ float(vOrig.routeLength) durations.add(float(v.duration), v.id) origDurations.add(float(vOrig.duration), v.id) durationDiffs.add(durationDiff, v.id) f.write(''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" timeLossDiff="%s" durationDiff="%s" routeLengthDiff="%s"/>\n''' % ( v.id, departDiff, arrivalDiff, timeLossDiff, durationDiff, routeLengthDiff)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out): vehicles_orig = OrderedDict([(v.id, v) for v in parse(orig, 'tripinfo')]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<tripDiffs>\n") for v in parse(new, 'tripinfo'): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) timeLossDiff = float(v.timeLoss) - float(vOrig.timeLoss) durationDiff = float(v.duration) - float(vOrig.duration) routeLengthDiff = float(v.routeLength) - \ float(vOrig.routeLength) durations.add(float(v.duration), v.id) origDurations.add(float(vOrig.duration), v.id) durationDiffs.add(durationDiff, v.id) f.write(( ''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" timeLossDiff="%s" \ durationDiff="%s" routeLengthDiff="%s"/>\n''') % (v.id, departDiff, arrivalDiff, timeLossDiff, durationDiff, routeLengthDiff)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def _parse_vehicle_info(routes): sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") stats = [] for v in output.parse(routes, ('vehicle', 'person')): if not v.id.endswith(BACKGROUND_TRAFFIC_SUFFIX) and v.depart != "triggered": duration = float(v.arrival) - float(v.depart) length = float(v.routeLength) if v.routeLength else 0 sumoTime.add(duration, v.id) sumoDist.add(length, v.id) stats.append(tuple(v.id.split('_')) + ("{0,0,%s}" % duration, "{0,0,%s}" % length)) print("Parsed results for %s vehicles and persons" % len(stats)) print(sumoTime) print(sumoDist) return stats
def _get_all_pair_stats(roualt_file, net): """Parses a duarouter .rou.alt.xml output for travel times and calculates the route length. The file is supposed to contain only vehicles of a single vType""" sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") for vehicle in output.parse(roualt_file, 'vehicle'): duration = float(vehicle.routeDistribution[0].route[0].cost) edges = vehicle.routeDistribution[0].route[0].edges.split() distance = sum(map(lambda e: net.getEdge(e).getLength(), edges)) sumoTime.add(duration, vehicle.id) sumoDist.add(distance, vehicle.id) if sumoDist.count() % 10000 == 0: print("parsed %s taz representatives" % sumoDist.count()) fromTaz, toTaz = _parseTaz(vehicle) yield fromTaz, toTaz, 1, duration, distance print(sumoTime) print(sumoDist)
def main(tag, attr, *xmlfiles): data = [] for xmlfile in xmlfiles: stats = Statistics('%s %s' % (tag, attr)) for elem in parse(xmlfile, tag): stats.add(float(elem.getAttribute(attr)), elem.id) print(stats) data.append(stats.values) try: import matplotlib.pyplot as plt except Exception as e: sys.exit(e) plt.figure() plt.xticks(range(len(xmlfiles)), xmlfiles) plt.ylabel("%s %s" % (tag, attr)) plt.boxplot(data) plt.show()
def parseTimed(outf, options): departCounts = defaultdict(lambda: 0) arrivalCounts = defaultdict(lambda: 0) intermediateCounts = defaultdict(lambda: 0) lastDepart = 0 period = options.period if options.period else options.end begin = options.begin periodEnd = options.period if options.period else options.end for elem in parse(options.routefile, options.elements2): depart = elem.depart if elem.depart is not None else elem.begin if depart != "triggered": depart = parseTime(depart) lastDepart = depart if depart < lastDepart: sys.stderr.write("Unsorted departure %s for %s '%s'" % (depart, elem.tag, elem.id)) lastDepart = depart if depart < begin: continue if depart >= periodEnd or depart >= options.end: description = "%s-%s " % (begin, periodEnd) writeInterval(outf, options, departCounts, arrivalCounts, intermediateCounts, begin, periodEnd, description) periodEnd += period begin += period if depart >= options.end: break number = getFlowNumber(elem) if elem.name == 'flow' else 1 src, dst, edges = getEdges(elem, options.taz) filterBy = [src, dst] if options.taz or not edges else edges if not hasSubpart(filterBy, options.subparts): continue departCounts[src] += number arrivalCounts[dst] += number if options.intermediate: for e in edges: intermediateCounts[e] += number description = "%s-%s " % (begin, periodEnd) if periodEnd != END_UNLIMITED else "" if len(departCounts) > 0: writeInterval(outf, options, departCounts, arrivalCounts, intermediateCounts, begin, lastDepart, description)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) print("Valid area contains %s edges" % len(edges)) def write_to_file(vehicles, f): f.write( '<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write(( '<routes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n' )) num_trips = 0 num_persons = 0 for _, v in vehicles: if v.name == 'trip': num_trips += 1 else: num_persons += 1 writer(f, v) f.write('</routes>\n') if num_persons > 0: print("Wrote %s trips and %s persons" % (num_trips, num_persons)) else: print("Wrote %s trips" % (num_trips)) validTaz = set() if options.additional_input: for taz in parse(options.additional_input, 'taz'): validTaz.add(taz.id) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file(cut_trips(edges, options, validTaz), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_trips(edges, options, validTaz)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) print("Valid area contains %s edges" % len(edges)) def write_to_file(vehicles, f): f.write('<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( ('<routes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n')) num_trips = 0 num_persons = 0 for _, v in vehicles: if v.name == 'trip': num_trips += 1 else: num_persons += 1 writer(f, v) f.write('</routes>\n') if num_persons > 0: print("Wrote %s trips and %s persons" % (num_trips, num_persons)) else: print("Wrote %s trips" % (num_trips)) validTaz = set() if options.additional_input: for taz in parse(options.additional_input, 'taz'): validTaz.add(taz.id) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file( cut_trips(edges, options, validTaz), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_trips(edges, options, validTaz)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def parseTimed(outf, options): departCounts = defaultdict(lambda: 0) arrivalCounts = defaultdict(lambda: 0) intermediateCounts = defaultdict(lambda: 0) lastDepart = 0 period = options.period if options.period else options.end begin = options.begin periodEnd = options.period if options.period else options.end for elem in parse(options.routefile, options.elements2): depart = elem.depart if depart != "triggered": depart = parseTime(depart) lastDepart = depart if depart < lastDepart: sys.stderr.write("Unsorted departure %s for %s '%s'" % (depart, elem.tag, elem.id)) lastDepart = depart if depart < begin: continue if depart >= periodEnd or depart >= options.end: description = "%s-%s " % (begin, periodEnd) writeInterval(outf, options, departCounts, arrivalCounts, intermediateCounts, begin, periodEnd, description) periodEnd += period begin += period if depart >= options.end: break if elem.route: edges = elem.route[0].edges.split() if not hasSubpart(edges, options.subparts): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 for e in edges: intermediateCounts[e] += 1 description = "%s-%s " % (begin, periodEnd) if len(departCounts) > 0: writeInterval(outf, options, departCounts, arrivalCounts, intermediateCounts, begin, lastDepart, description)
def trackLanes(netstate, out): # veh_id -> values laneTimes = defaultdict(list) laneChanges = defaultdict(lambda: 0) lastEdge = defaultdict(lambda: None) arrivals = {} running = set() with open(out, 'w') as f: f.write("<vehLanes>\n") for timestep in parse(netstate, 'timestep'): seen = set() if timestep.edge is not None: for edge in timestep.edge: if edge.lane is not None: for lane in edge.lane: if lane.vehicle is not None: for vehicle in lane.vehicle: seen.add(vehicle.id) if vehicle.id not in running or laneTimes[vehicle.id][-1][1] != lane.id: laneTimes[vehicle.id].append( (timestep.time, lane.id)) running.add(vehicle.id) if lastEdge[vehicle.id] == edge.id: laneChanges[vehicle.id] += 1 lastEdge[vehicle.id] = edge.id for veh_id in running: if veh_id not in seen: arrivals[veh_id] = timestep.time running = running - set(arrivals.keys()) for veh_id, lt in laneTimes.items(): f.write(' <vehicle id="%s" laneTimes="%s" arrival="%s" laneChanges="%s"/>\n' % ( veh_id, ' '.join(['%s,%s' % (t, l) for t, l in lt]), arrivals.get(veh_id), laneChanges[veh_id])) f.write("</vehLanes>\n")
def trackLanes(netstate, out): # veh_id -> values laneTimes = defaultdict(list) laneChanges = defaultdict(lambda: 0) lastEdge = defaultdict(lambda: None) arrivals = {} running = set() with open(out, 'w') as f: f.write("<vehLanes>\n") for timestep in parse(netstate, 'timestep'): seen = set() if timestep.edge is not None: for edge in timestep.edge: if edge.lane is not None: for lane in edge.lane: if lane.vehicle is not None: for vehicle in lane.vehicle: seen.add(vehicle.id) if not vehicle.id in running or laneTimes[vehicle.id][-1][1] != lane.id: laneTimes[vehicle.id].append( (timestep.time, lane.id)) running.add(vehicle.id) if lastEdge[vehicle.id] == edge.id: laneChanges[vehicle.id] += 1 lastEdge[vehicle.id] = edge.id for veh_id in running: if not veh_id in seen: arrivals[veh_id] = timestep.time running = running - set(arrivals.keys()) for veh_id, lt in laneTimes.items(): f.write(' <vehicle id="%s" laneTimes="%s" arrival="%s" laneChanges="%s"/>\n' % ( veh_id, ' '.join(['%s,%s' % (t, l) for t, l in lt]), arrivals.get(veh_id), laneChanges[veh_id])) f.write("</vehLanes>\n")
def aggregate_weights(weights_in, timeline): with open(weights_in[:-4] + '_aggregated.xml', 'w') as weights_out: weights_out.write('<meandata_aggregated>\n') idx = 0 samples = collections.defaultdict(float) travel_time_ratios = collections.defaultdict(float) def write_interval(begin, end): if idx > 0: weights_out.write(' </interval>\n') weights_out.write( ' <interval begin="%s" end="%s">\n' % (begin, end)) for e in sorted(samples.keys()): weights_out.write( ' <edge id="%s" traveltime="%s"/>\n' % (e, samples[e] / travel_time_ratios[e])) begin = 24 * 3600 for interval in output.parse(weights_in, ['interval']): if interval.edge is not None: for edge in interval.edge: if edge.traveltime is not None: s = float(edge.sampledSeconds) samples[edge.id] += s travel_time_ratios[edge.id] += s / \ float(edge.traveltime) end = (timeline[idx] + 24) * 3600 if float(interval.end) == end: write_interval(begin, end) idx += 1 if idx == len(timeline): break begin = end samples.clear() travel_time_ratios.clear() if idx < len(timeline): write_interval(begin, (timeline[idx] + 24) * 3600) weights_out.write(' </interval>\n</meandata_aggregated>\n') return weights_out.name
', variance_travel_time(s*volume)'\ ', total_travel_time(s*volume)'\ ', average_time_loss_in_congestion(s)'\ ', variance_time_loss_in_congestion(s)'\ ', total_time_loss_in_congestion(s*volume)'\ ', average_depart_delay(s)'\ ', variance_depart_delay(s)'\ ', total_depart_delay(s*volume)'\ ', average_total_travel_time+depart_delay(s)'\ ', variance_total_travel_time+depart_delay(s)'\ ', total_total_travel_time+depart_delay(s*volume)'\ for f in file_list: print re.sub(r'.*[/|](.+)_w([0-9]+)_i([0-9]+)\.xml', r"\1, \2, \3", f)+", ", # print f.replace('tripinfo', '').replace('.xml', ''),', ', parse_obj = output.parse(f, ["tripinfo"]) alist = [tripinfo for tripinfo in parse_obj] print output.average(alist, 'duration'),', ', print output.variance(alist, 'duration'),', ', print output.sum(alist, 'duration'),', ', print output.average(alist, 'waitSteps'),', ', print output.variance(alist, 'waitSteps'),', ', print output.sum(alist, 'waitSteps'),', ', print output.average(alist, 'departDelay'),', ', print output.variance(alist, 'departDelay'),', ', print output.sum(alist, 'departDelay'),', ', print averageTravelTimePlusDepartDelay(alist),', ', print varianceTravelTimePlusDepartDelay(alist),', ', print sumTravelTimePlusDepartDelay(alist)
def cut_routes(areaEdges, orig_net, options, busStopEdges=None): num_vehicles = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda: 0) # routes which enter the sub-scenario multiple times multiAffectedRoutes = 0 teleportFactorSum = 0.0 too_short = 0 for routeFile in options.routeFiles: print "Parsing routes from %s" % routeFile for vehicle in parse(routeFile, "vehicle"): num_vehicles += 1 edges = vehicle.route[0].edges.split() fromIndex = getFirstIndex(areaEdges, edges) if fromIndex is None: continue # route does not touch the area toIndex = len(edges) - 1 - getFirstIndex(areaEdges, reversed(edges)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 continue # check for connectivity if missingEdges(areaEdges, edges[fromIndex:toIndex], missingEdgeOccurences): multiAffectedRoutes += 1 if options.disconnected_action == "discard": continue # compute new departure if vehicle.route[0].exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = float(vehicle.depart) + sum( [ (orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex] ] ) else: print "Could not reconstruct new departure time for vehicle '%s'. Using old departure time." % vehicle.id newDepart = float(vehicle.depart) else: exitTimes = vehicle.route[0].exitTimes.split() departTimes = [vehicle.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = float(departTimes[int(fromIndex * teleportFactor)]) del vehicle.route[0].exitTimes remaining = edges[fromIndex : toIndex + 1] stops = [] if vehicle.stop: for stop in vehicle.stop: if stop.busStop: if not busStopEdges: print "No bus stop locations parsed, skipping bus stop '%s'." % stop.busStop continue if stop.busStop not in busStopEdges: print "Skipping bus stop '%s', which could not be located." % stop.busStop continue if busStopEdges[stop.busStop] in remaining: stops.append(stop) elif stop.lane[:-2] in remaining: stops.append(stop) vehicle.route[0].edges = " ".join(remaining) vehicle.stop = stops vehicle.depart = "%.2f" % newDepart yield newDepart, vehicle num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % (1 - teleportFactorSum / num_returned) else: teleports = "" print "Parsed %s vehicles and kept %s routes%s" % (num_vehicles, num_returned, teleports) if too_short > 0: print "Discarded %s routes because they have less than %s edges" % (too_short, options.min_length) print "Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes printTop(missingEdgeOccurences)
def main(options): # cache stand-alone routes routesDepart = {} # first edge for each route routesArrival = {} # last edge for each route with codecs.open(options.outfile, 'w', encoding='utf8') as out: out.write("<routes>\n") for route in parse(options.infile, "route"): if route.hasAttribute('id') and route.id is not None: routesDepart[route.id] = route.edges.split()[0] routesArrival[route.id] = route.edges.split()[-1] out.write(route.toXML(' ')) for obj in parse(options.infile, ['vehicle', 'trip', 'flow', 'vType'], heterogeneous=options.heterogeneous, warn=False): if obj.name == 'vType': # copy pass else: if options.modify_ids: obj.id += options.name_suffix # compute depart-edge filter departEdge = None if options.depart_edges is not None: # determine the departEdge of the current vehicle if obj.name == 'trip': departEdge = obj.attr_from elif obj.name == 'vehicle': if obj.hasAttribute('route') and obj.route is not None: departEdge = routesDepart[obj.route] else: # route child element departEdge = obj.route[0].edges.split()[0] elif obj.name == 'flow': if obj.hasAttribute( 'attr_from') and obj.attr_from is not None: departEdge = obj.attr_from elif obj.hasAttribute( 'route') and obj.route is not None: departEdge = routesDepart[obj.route] else: # route child element departEdge = obj.route[0].edges.split()[0] # compute arrival-edge filter arrivalEdge = None if options.arrival_edges is not None: # determine the arrivalEdge of the current vehicle if obj.name == 'trip': arrivalEdge = obj.to elif obj.name == 'vehicle': if obj.hasAttribute('route') and obj.route is not None: arrivalEdge = routesArrival[obj.route] else: # route child element arrivalEdge = obj.route[0].edges.split()[-1] elif obj.name == 'flow': if obj.hasAttribute( 'to') and obj.attr_from is not None: arrivalEdge = obj.to elif obj.hasAttribute( 'route') and obj.route is not None: arrivalEdge = routesArrival[obj.route] else: # route child element arrivalEdge = obj.route[0].edges.split()[-1] # modify departure time if ((departEdge is None or departEdge in options.depart_edges) and (arrivalEdge is None or arrivalEdge in options.arrival_edges)): if options.offset is not None: # shift by offset if obj.name in ['trip', 'vehicle']: obj.depart = str( intIfPossible( float(obj.depart) + options.offset)) else: obj.begin = str( intIfPossible( float(obj.begin) + options.offset)) obj.end = str( intIfPossible(float(obj.end) + options.offset)) else: # shift by interval if obj.name in ['trip', 'vehicle']: obj.depart = shiftInterval(obj.depart, options.interval) else: obj.begin = shiftInterval(obj.begin, options.interval) obj.end = shiftInterval(obj.end, options.interval) out.write(obj.toXML(' ')) out.write("</routes>\n")
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: output_type = 'trips' writer = write_trip else: output_type = 'routes' writer = write_route busStopEdges = {} if options.stops_output: busStops = codecs.open(options.stops_output, 'w', encoding='utf8') busStops.write( '<additional xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/additional_file.xsd">\n') if options.additional_input: num_busstops = 0 kept_busstops = 0 num_taz = 0 kept_taz = 0 for busStop in parse(options.additional_input, ('busStop', 'trainStop')): num_busstops += 1 edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: kept_busstops += 1 if busStop.access: busStop.access = [acc for acc in busStop.access if acc.lane[:-2] in edges] busStops.write(busStop.toXML(' ').decode('utf8')) for taz in parse(options.additional_input, 'taz'): num_taz += 1 taz_edges = [e for e in taz.edges.split() if e in edges] if taz_edges: taz.edges = " ".join(taz_edges) if options.stops_output: kept_taz += 1 busStops.write(taz.toXML(' ')) if num_busstops > 0 and num_taz > 0: print("Kept %s of %s busStops and %s of %s tazs" % ( kept_busstops, num_busstops, kept_taz, num_taz)) elif num_busstops > 0: print("Kept %s of %s busStops" % ( kept_busstops, num_busstops)) elif num_taz > 0: print("Kept %s of %s tazs" % ( kept_taz, num_taz)) if options.stops_output: busStops.write('</additional>\n') busStops.close() def write_to_file(vehicles, f): f.write('<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write( ('<routes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n')) num_routeRefs = 0 num_vehicles = 0 for _, v in vehicles: if v.name == 'route': num_routeRefs += 1 else: num_vehicles += 1 writer(f, v) f.write('</routes>\n') if num_routeRefs > 0: print("Wrote %s standalone-routes and %s vehicles" % (num_routeRefs, num_vehicles)) else: print("Wrote %s %s" % (num_vehicles, output_type)) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file( cut_routes(edges, orig_net, options, busStopEdges), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(): DUAROUTER = sumolib.checkBinary('duarouter') options = get_options() net = readNet(options.network) routeInfos = {} # id-> RouteInfo if options.standalone: for route in parse(options.routeFile, 'route'): ri = RouteInfo() ri.edges = route.edges.split() routeInfos[route.id] = ri else: for vehicle in parse(options.routeFile, 'vehicle'): ri = RouteInfo() ri.edges = vehicle.route[0].edges.split() routeInfos[vehicle.id] = ri for rInfo in routeInfos.values(): rInfo.airDist = euclidean( net.getEdge(rInfo.edges[0]).getShape()[0], net.getEdge(rInfo.edges[-1]).getShape()[-1]) rInfo.length = getRouteLength(net, rInfo.edges) rInfo.airDistRatio = rInfo.length / rInfo.airDist duarouterInput = options.routeFile if options.standalone: # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') duarouterOutput = options.routeFile + '.rerouted.rou.xml' duarouterAltOutput = options.routeFile + '.rerouted.rou.alt.xml' subprocess.call([DUAROUTER, '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log']) for vehicle in parse(duarouterAltOutput, 'vehicle'): routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert(routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = getRouteLength(net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(("Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % ( vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overal implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = (options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print("Loaded %s routes to ignore. Reducing implausible from %s to %s" % ( len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = options.routeFile + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(net, rID, colorgen(), 100, False, ri.edges, options.blur, outf, score) outf.write('</additional>\n') sys.stdout.write('score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist)\n') for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist))) print(allRoutesStats) print(implausibleRoutesStats)
def parse_standalone_routes(file, into): for route in parse(file, 'route'): into[route.id] = route
def main(options): # cache stand-alone routes routesDepart = {} # first edge for each route routesArrival = {} # last edge for each route with codecs.open(options.outfile, 'w', encoding='utf8') as out: out.write("<routes>\n") for route in parse(options.infile, "route"): if route.hasAttribute('id') and route.id is not None: routesDepart[route.id] = route.edges.split()[0] routesArrival[route.id] = route.edges.split()[-1] out.write(route.toXML(' ')) for obj in parse(options.infile, ['vehicle', 'trip', 'flow', 'vType'], heterogeneous=options.heterogeneous, warn=False): if obj.name == 'vType': # copy pass else: if options.modify_ids: obj.id += options.name_suffix # compute depart-edge filter departEdge = None if options.depart_edges is not None: # determine the departEdge of the current vehicle if obj.name == 'trip': departEdge = obj.attr_from elif obj.name == 'vehicle': if obj.hasAttribute('route') and obj.route is not None: departEdge = routesDepart[obj.route] else: # route child element departEdge = obj.route[0].edges.split()[0] elif obj.name == 'flow': if obj.hasAttribute('attr_from') and obj.attr_from is not None: departEdge = obj.attr_from elif obj.hasAttribute('route') and obj.route is not None: departEdge = routesDepart[obj.route] else: # route child element departEdge = obj.route[0].edges.split()[0] # compute arrival-edge filter arrivalEdge = None if options.arrival_edges is not None: # determine the arrivalEdge of the current vehicle if obj.name == 'trip': arrivalEdge = obj.to elif obj.name == 'vehicle': if obj.hasAttribute('route') and obj.route is not None: arrivalEdge = routesArrival[obj.route] else: # route child element arrivalEdge = obj.route[0].edges.split()[-1] elif obj.name == 'flow': if obj.hasAttribute('to') and obj.attr_from is not None: arrivalEdge = obj.to elif obj.hasAttribute('route') and obj.route is not None: arrivalEdge = routesArrival[obj.route] else: # route child element arrivalEdge = obj.route[0].edges.split()[-1] # modify departure time if ((departEdge is None or departEdge in options.depart_edges) and (arrivalEdge is None or arrivalEdge in options.arrival_edges)): if options.offset is not None: # shift by offset if obj.name in ['trip', 'vehicle']: obj.depart = str(intIfPossible( float(obj.depart) + options.offset)) else: obj.begin = str(intIfPossible( float(obj.begin) + options.offset)) obj.end = str(intIfPossible( float(obj.end) + options.offset)) else: # shift by interval if obj.name in ['trip', 'vehicle']: obj.depart = shiftInterval( obj.depart, options.interval) else: obj.begin = shiftInterval( obj.begin, options.interval) obj.end = shiftInterval(obj.end, options.interval) out.write(obj.toXML(' ')) out.write("</routes>\n")
def cut_trips(aEdges, options, validTaz): areaEdges = set(aEdges) num_trips = 0 num_returned = 0 for routeFile in options.routeFiles: print("Parsing trips from %s" % routeFile) for trip in parse(routeFile, 'trip'): num_trips += 1 if trip.attr_from is not None and not trip.attr_from in areaEdges: continue if trip.to is not None and not trip.to in areaEdges: continue if trip.fromTaz is not None and not trip.fromTaz in validTaz: continue if trip.toTaz is not None and not trip.toTaz in validTaz: continue yield float(trip.depart), trip num_returned += 1 print("Parsing persontrips from %s" % routeFile) ignored_planitems = defaultdict(lambda: 0) num_persons = 0 num_persontrips = 0 from_ok = 0 to_ok = 0 for person in parse(routeFile, 'person'): num_persons += 1 if person.walk is not None: ignored_planitems['walk'] += len(person.walk) del person.walk if person.stop is not None: ignored_planitems['stop'] += len(person.stop) del person.stop if person.ride is not None: ignored_planitems['ride'] += len(person.ride) del person.ride if person.personTrip is not None: kept_pt = [] for pt in person.personTrip: skip = False if pt.attr_from in areaEdges: from_ok += 1 else: skip = True if pt.to in areaEdges: to_ok += 1 else: skip = True if skip: continue kept_pt.append(pt) num_persontrips += 1 if kept_pt: person.personTrip = kept_pt yield float(person.depart), person print("Parsed %s trips and kept %s" % (num_trips, num_returned)) if num_persons > 0: print("Parsed %s persons and kept %s persontrips" % (num_trips, num_persontrips)) print("Discared %s person that departed in the area and %s persons that arrived in the area" % ( from_ok, to_ok)) if ignored_planitems: print("Ignored plan items:") for itemtype, count in ignored_planitems.items(): print(" %s %ss" % (count, itemtype))
def cut_routes(areaEdges, orig_net, options, busStopEdges=None): num_vehicles = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda: 0) # routes which enter the sub-scenario multiple times multiAffectedRoutes = 0 teleportFactorSum = 0.0 too_short = 0 for routeFile in options.routeFiles: print "Parsing routes from %s" % routeFile for vehicle in parse(routeFile, 'vehicle'): num_vehicles += 1 edges = vehicle.route[0].edges.split() fromIndex = getFirstIndex(areaEdges, edges) if fromIndex is None: continue # route does not touch the area toIndex = len(edges) - 1 - \ getFirstIndex(areaEdges, reversed(edges)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 continue # check for connectivity if missingEdges(areaEdges, edges[fromIndex:toIndex], missingEdgeOccurences): multiAffectedRoutes += 1 if options.disconnected_action == 'discard': continue # compute new departure if vehicle.route[0].exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = (float(vehicle.depart) + sum([(orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex]])) else: print "Could not reconstruct new departure time for vehicle '%s'. Using old departure time." % vehicle.id newDepart = float(vehicle.depart) else: exitTimes = vehicle.route[0].exitTimes.split() departTimes = [vehicle.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = float(departTimes[int(fromIndex * teleportFactor)]) del vehicle.route[0].exitTimes remaining = edges[fromIndex:toIndex + 1] stops = [] if vehicle.stop: for stop in vehicle.stop: if stop.busStop: if not busStopEdges: print "No bus stop locations parsed, skipping bus stop '%s'." % stop.busStop continue if stop.busStop not in busStopEdges: print "Skipping bus stop '%s', which could not be located." % stop.busStop continue if busStopEdges[stop.busStop] in remaining: stops.append(stop) elif stop.lane[:-2] in remaining: stops.append(stop) vehicle.route[0].edges = " ".join(remaining) vehicle.stop = stops vehicle.depart = "%.2f" % newDepart yield newDepart, vehicle num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - teleportFactorSum / num_returned) else: teleports = "" print "Parsed %s vehicles and kept %s routes%s" % (num_vehicles, num_returned, teleports) if too_short > 0: print "Discarded %s routes because they have less than %s edges" % ( too_short, options.min_length) print "Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes printTop(missingEdgeOccurences)
def writeTraveltimeMatrix(options): id2TAZ = {} # vehicleID : (fromTaz, toTaz) flowIds2TAZ = {} # flowID : (fromTaz, toTaz) flowIDs = set() sinkEdge2TAZ = {} # edgeID : TAZ sourceEdge2TAZ = {} # edgeID : TAZ attrs = defaultdict(lambda: ['id', 'fromTaz', 'toTaz']) for routeFile in options.routeFiles: for veh in parse(routeFile, ['trip', 'vehicle'], attrs): if veh.fromTaz and veh.toTaz: id2TAZ[veh.id] = (veh.fromTaz, veh.toTaz) for flow in parse(routeFile, 'flow', attrs): flowIDs.add(flow.id) if flow.fromTaz and flow.toTaz: flowIds2TAZ[flow.id] = (flow.fromTaz, flow.toTaz) for tazFile in options.tazFiles: for taz in parse(tazFile, 'taz'): sourceEdges = [] sinkEdges = [] if taz.edges: sourceEdges = taz.edges.split() sinkEdges = sourceEdges if taz.tazSource: for ts in taz.tazSource: sourceEdges.append(ts.id) if taz.tazSink: for ts in taz.tazSink: sinkEdges.append(ts.id) for e in sourceEdges: if e in sourceEdge2TAZ: print("edge %s s already assigned as source for taz %s. Reasignemnt to taz %s is not supported" % ( e, sourceEdge2TAZ[e], taz.id)) else: sourceEdge2TAZ[e] = taz.id for e in sinkEdges: if e in sinkEdge2TAZ: print("edge %s s already assigned as sink for taz %s. Reasignemnt to taz %s is not supported" % ( e, sinkEdge2TAZ[e], taz.id)) else: sinkEdge2TAZ[e] = taz.id odpairs = {} for trip in parse(options.tripinfoFile, 'tripinfo'): odpair = id2TAZ.get(trip.id) tripID = trip.id if odpair is None and '.' in trip.id: flowID = trip.id[:trip.id.rfind('.')] if flowID in flowIDs: tripID = flowID odpair = flowIds2TAZ.get(tripID) if odpair is None: fromEdge = trip.departLane[:trip.departLane.rfind('_')] toEdge = trip.arrivalLane[:trip.arrivalLane.rfind('_')] odpair = (sourceEdge2TAZ.get(fromEdge, '?'), sinkEdge2TAZ.get(toEdge, '?')) if odpair not in odpairs: odpairs[odpair] = Statistics(' '.join(odpair)) odpairs[odpair].add(parseTime(getattr(trip, options.attribute)), tripID) if options.output: with open(options.output, 'w') as outf: outf.write('<tripinfosByTAZ attribute="%s">\n' % options.attribute) for (fromTaz, toTaz), stats in sorted(odpairs.items()): q1, median, q3 = stats.quartiles() outf.write(' <odInfo fromTaz="%s" toTaz="%s" count="%s" min="%s" minVeh="%s"' % (fromTaz, toTaz, stats.count(), stats.min, stats.min_label)) outf.write(' max="%s" maxVeh="%s" mean="%s" Q1="%s" median="%s" Q3="%s"/>\n' % (stats.max, stats.max_label, stats.avg(), q1, median, q3)) outf.write('</tripinfosByTAZ>\n') else: for (fromTaz, toTaz), stats in sorted(odpairs.items()): print(stats)
def cut_routes(aEdges, orig_net, options, busStopEdges=None): areaEdges = set(aEdges) num_vehicles = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda: 0) # routes which enter the sub-scenario multiple times multiAffectedRoutes = 0 teleportFactorSum = 0.0 too_short = 0 standaloneRoutes = {} # routeID -> routeObject standaloneRoutesDepart = {} # routeID -> time or 'discard' or None if options.additional_input: parse_standalone_routes(options.additional_input, standaloneRoutes) for routeFile in options.routeFiles: parse_standalone_routes(routeFile, standaloneRoutes) for routeFile in options.routeFiles: print("Parsing routes from %s" % routeFile) for vehicle in parse(routeFile, 'vehicle'): num_vehicles += 1 if type(vehicle.route) == list: edges = vehicle.route[0].edges.split() routeRef = False else: newDepart = standaloneRoutesDepart.get(vehicle.route) if newDepart is 'discard': # route was already checked and discared continue elif newDepart is not None: # route was already treated vehicle.depart = "%.2f" % (newDepart + float(vehicle.depart)) yield vehicle.depart, vehicle continue else: routeRef = standaloneRoutes[vehicle.route] edges = routeRef.edges.split() firstIndex = getFirstIndex(areaEdges, edges) if firstIndex is None: continue # route does not touch the area lastIndex = len(edges) - 1 - \ getFirstIndex(areaEdges, reversed(edges)) # check for connectivity route_parts = [(firstIndex + i, firstIndex + j) for (i, j) in missingEdges(areaEdges, edges[firstIndex:(lastIndex + 1)], missingEdgeOccurences)] # print("areaEdges: %s"%str(areaEdges)) # print("routeEdges: %s"%str(edges)) # print("firstIndex = %d"%firstIndex) # print("route_parts = %s"%str(route_parts)) if len(route_parts) > 1: multiAffectedRoutes += 1 if options.disconnected_action == 'discard': if routeRef: standaloneRoutesDepart[vehicle.route] = 'discard' continue # loop over different route parts for ix_part, ix_interval in enumerate(route_parts): fromIndex, toIndex = ix_interval # print("(fromIndex,toIndex) = (%d,%d)"%(fromIndex,toIndex)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 if routeRef: standaloneRoutesDepart[vehicle.route] = 'discard' continue # compute new departure if routeRef or vehicle.route[0].exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = (float(vehicle.depart) + sum([(orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex]])) else: print( "Could not reconstruct new departure time for vehicle '%s'. Using old departure time." % vehicle.id) newDepart = float(vehicle.depart) else: exitTimes = vehicle.route[0].exitTimes.split() departTimes = [vehicle.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = float(departTimes[int(fromIndex * teleportFactor)]) del vehicle.route[0].exitTimes departShift = None if routeRef: departShift = newDepart - float(vehicle.depart) standaloneRoutesDepart[vehicle.route] = departShift remaining = edges[fromIndex:toIndex + 1] stops = cut_stops(vehicle, busStopEdges, remaining) if routeRef: routeRef.stop = cut_stops(routeRef, busStopEdges, remaining, departShift, options.defaultStopDuration) routeRef.edges = " ".join(remaining) yield None, routeRef else: vehicle.route[0].edges = " ".join(remaining) vehicle.stop = stops vehicle.depart = "%.2f" % newDepart if len(route_parts) > 1: # return copies of the vehicle for each route part yield_veh = copy.deepcopy(vehicle) yield_veh.id = vehicle.id + "_part" + str(ix_part) yield newDepart, yield_veh else: yield newDepart, vehicle num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - teleportFactorSum / num_returned) else: teleports = "" print("Parsed %s vehicles and kept %s routes%s" % (num_vehicles, num_returned, teleports)) if too_short > 0: print("Discarded %s routes because they have less than %s edges" % (too_short, options.min_length)) print("Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes) printTop(missingEdgeOccurences)
def main(): DUAROUTER = sumolib.checkBinary('duarouter') options = get_options() net = readNet(options.network) routeInfos = {} # id-> RouteInfo if options.standalone: for route in parse(options.routeFile, 'route'): ri = RouteInfo() ri.edges = route.edges.split() routeInfos[route.id] = ri else: for vehicle in parse(options.routeFile, 'vehicle'): ri = RouteInfo() ri.edges = vehicle.route[0].edges.split() routeInfos[vehicle.id] = ri for rInfo in routeInfos.values(): rInfo.airDist = euclidean( net.getEdge(rInfo.edges[0]).getShape()[0], net.getEdge(rInfo.edges[-1]).getShape()[-1]) rInfo.length = getRouteLength(net, rInfo.edges) rInfo.airDistRatio = rInfo.length / rInfo.airDist duarouterInput = options.routeFile if options.standalone: # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') duarouterOutput = options.routeFile + '.rerouted.rou.xml' duarouterAltOutput = options.routeFile + '.rerouted.rou.alt.xml' subprocess.call([ DUAROUTER, '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log' ]) for vehicle in parse(duarouterAltOutput, 'vehicle'): routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[ vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert (routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = getRouteLength( net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(( "Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % (vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overal implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = ( options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print( "Loaded %s routes to ignore. Reducing implausible from %s to %s" % (len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = options.routeFile + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(options, net, rID, colorgen(), ri.edges, outf, score) outf.write('</additional>\n') sys.stdout.write( 'score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist)\n' ) for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist))) print(allRoutesStats) print(implausibleRoutesStats)
def writeTraveltimeMatrix(options): id2TAZ = {} # vehicleID : (fromTaz, toTaz) flowIds2TAZ = {} # flowID : (fromTaz, toTaz) flowIDs = set() sinkEdge2TAZ = {} # edgeID : TAZ sourceEdge2TAZ = {} # edgeID : TAZ attrs = defaultdict(lambda: ['id', 'fromTaz', 'toTaz']) for routeFile in options.routeFiles: for veh in parse(routeFile, ['trip', 'vehicle'], attrs): if veh.fromTaz and veh.toTaz: id2TAZ[veh.id] = (veh.fromTaz, veh.toTaz) for flow in parse(routeFile, 'flow', attrs): flowIDs.add(flow.id) if flow.fromTaz and flow.toTaz: flowIds2TAZ[flow.id] = (flow.fromTaz, flow.toTaz) for tazFile in options.tazFiles: for taz in parse(tazFile, 'taz'): sourceEdges = [] sinkEdges = [] if taz.edges: sourceEdges = taz.edges.split() sinkEdges = sourceEdges if taz.tazSource: for ts in taz.tazSource: sourceEdges.append(ts.id) if taz.tazSink: for ts in taz.tazSink: sinkEdges.append(ts.id) for e in sourceEdges: if e in sourceEdge2TAZ: print( "edge %s s already assigned as source for taz %s. Reasignemnt to taz %s is not supported" % (e, sourceEdge2TAZ[e], taz.id)) else: sourceEdge2TAZ[e] = taz.id for e in sinkEdges: if e in sinkEdge2TAZ: print( "edge %s s already assigned as sink for taz %s. Reasignemnt to taz %s is not supported" % (e, sinkEdge2TAZ[e], taz.id)) else: sinkEdge2TAZ[e] = taz.id odpairs = {} for trip in parse(options.tripinfoFile, 'tripinfo'): odpair = id2TAZ.get(trip.id) tripID = trip.id if odpair is None and '.' in trip.id: flowID = trip.id[:trip.id.rfind('.')] if flowID in flowIDs: tripID = flowID odpair = flowIds2TAZ.get(tripID) if odpair is None: fromEdge = trip.departLane[:trip.departLane.rfind('_')] toEdge = trip.arrivalLane[:trip.arrivalLane.rfind('_')] odpair = (sourceEdge2TAZ.get(fromEdge, '?'), sinkEdge2TAZ.get(toEdge, '?')) if odpair not in odpairs: odpairs[odpair] = Statistics(' '.join(odpair)) odpairs[odpair].add(parseTime(getattr(trip, options.attribute)), tripID) if options.output: with open(options.output, 'w') as outf: outf.write('<tripinfosByTAZ attribute="%s">\n' % options.attribute) for (fromTaz, toTaz), stats in sorted(odpairs.items()): q1, median, q3 = stats.quartiles() outf.write( ' <odInfo fromTaz="%s" toTaz="%s" count="%s" min="%s" minVeh="%s"' % (fromTaz, toTaz, stats.count(), stats.min, stats.min_label)) outf.write( ' max="%s" maxVeh="%s" mean="%s" Q1="%s" median="%s" Q3="%s"/>\n' % (stats.max, stats.max_label, stats.avg(), q1, median, q3)) outf.write('</tripinfosByTAZ>\n') else: for (fromTaz, toTaz), stats in sorted(odpairs.items()): print(stats)
def cut_routes(aEdges, orig_net, options, busStopEdges=None): areaEdges = set(aEdges) num_vehicles = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda: 0) # routes which enter the sub-scenario multiple times multiAffectedRoutes = 0 teleportFactorSum = 0.0 too_short = 0 too_short_airdist = 0 standaloneRoutes = {} # routeID -> routeObject standaloneRoutesDepart = {} # routeID -> time or 'discard' or None if options.additional_input: parse_standalone_routes(options.additional_input, standaloneRoutes) for routeFile in options.routeFiles: parse_standalone_routes(routeFile, standaloneRoutes) for routeFile in options.routeFiles: print("Parsing routes from %s" % routeFile) for vehicle in parse(routeFile, 'vehicle'): num_vehicles += 1 if type(vehicle.route) == list: edges = vehicle.route[0].edges.split() routeRef = False else: newDepart = standaloneRoutesDepart.get(vehicle.route) if newDepart is 'discard': # route was already checked and discared continue elif newDepart is not None: # route was already treated vehicle.depart = "%.2f" % (newDepart + float(vehicle.depart)) yield vehicle.depart, vehicle continue else: routeRef = standaloneRoutes[vehicle.route] edges = routeRef.edges.split() firstIndex = getFirstIndex(areaEdges, edges) if firstIndex is None: continue # route does not touch the area lastIndex = len(edges) - 1 - \ getFirstIndex(areaEdges, reversed(edges)) # check for connectivity route_parts = [(firstIndex + i, firstIndex + j) for (i, j) in missingEdges(areaEdges, edges[firstIndex:(lastIndex + 1)], missingEdgeOccurences)] # print("areaEdges: %s"%str(areaEdges)) # print("routeEdges: %s"%str(edges)) # print("firstIndex = %d"%firstIndex) # print("route_parts = %s"%str(route_parts)) if len(route_parts) > 1: multiAffectedRoutes += 1 if options.disconnected_action == 'discard': if routeRef: standaloneRoutesDepart[vehicle.route] = 'discard' continue # loop over different route parts for ix_part, ix_interval in enumerate(route_parts): fromIndex, toIndex = ix_interval # print("(fromIndex,toIndex) = (%d,%d)"%(fromIndex,toIndex)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 if routeRef: standaloneRoutesDepart[vehicle.route] = 'discard' continue if options.min_air_dist > 0: fromPos = orig_net.getEdge(edges[fromIndex]).getFromNode().getCoord() toPos = orig_net.getEdge(edges[toIndex]).getToNode().getCoord() if sumolib.miscutils.euclidean(fromPos, toPos) < options.min_air_dist: too_short_airdist += 1 if routeRef: standaloneRoutesDepart[vehicle.route] = 'discard' continue # compute new departure if routeRef or vehicle.route[0].exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = (float(vehicle.depart) + sum([(orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex]])) else: print( "Could not reconstruct new departure time for vehicle '%s'. Using old departure time." % vehicle.id) newDepart = float(vehicle.depart) else: exitTimes = vehicle.route[0].exitTimes.split() departTimes = [vehicle.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = float(departTimes[int(fromIndex * teleportFactor)]) del vehicle.route[0].exitTimes departShift = None if routeRef: departShift = newDepart - float(vehicle.depart) standaloneRoutesDepart[vehicle.route] = departShift remaining = edges[fromIndex:toIndex + 1] stops = cut_stops(vehicle, busStopEdges, remaining) if routeRef: routeRef.stop = cut_stops(routeRef, busStopEdges, remaining, departShift, options.defaultStopDuration) routeRef.edges = " ".join(remaining) yield None, routeRef else: vehicle.route[0].edges = " ".join(remaining) vehicle.stop = stops vehicle.depart = "%.2f" % newDepart if len(route_parts) > 1: # return copies of the vehicle for each route part yield_veh = copy.deepcopy(vehicle) yield_veh.id = vehicle.id + "_part" + str(ix_part) yield newDepart, yield_veh else: yield newDepart, vehicle num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - teleportFactorSum / num_returned) else: teleports = "" print("Parsed %s vehicles and kept %s routes%s" % (num_vehicles, num_returned, teleports)) if too_short > 0: print("Discarded %s routes because they have less than %s edges" % (too_short, options.min_length)) if too_short_airdist > 0: print("Discarded %s routes because the air-line distance between start and end is less than %s" % (too_short_airdist, options.min_air_dist)) print("Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes) printTop(missingEdgeOccurences)
def cut_trips(aEdges, options, validTaz): areaEdges = set(aEdges) num_trips = 0 num_returned = 0 for routeFile in options.routeFiles: print("Parsing trips from %s" % routeFile) for trip in parse(routeFile, 'trip'): num_trips += 1 if trip.attr_from is not None and trip.attr_from not in areaEdges: continue if trip.to is not None and trip.to not in areaEdges: continue if trip.fromTaz is not None and trip.fromTaz not in validTaz: continue if trip.toTaz is not None and trip.toTaz not in validTaz: continue yield float(trip.depart), trip num_returned += 1 print("Parsing persontrips from %s" % routeFile) ignored_planitems = defaultdict(lambda: 0) num_persons = 0 num_persontrips = 0 from_ok = 0 to_ok = 0 for person in parse(routeFile, 'person'): num_persons += 1 if person.walk is not None: ignored_planitems['walk'] += len(person.walk) del person.walk if person.stop is not None: ignored_planitems['stop'] += len(person.stop) del person.stop if person.ride is not None: ignored_planitems['ride'] += len(person.ride) del person.ride if person.personTrip is not None: kept_pt = [] for pt in person.personTrip: skip = False if pt.attr_from in areaEdges: from_ok += 1 else: skip = True if pt.to in areaEdges: to_ok += 1 else: skip = True if skip: continue kept_pt.append(pt) num_persontrips += 1 if kept_pt: person.personTrip = kept_pt yield float(person.depart), person print("Parsed %s trips and kept %s" % (num_trips, num_returned)) if num_persons > 0: print("Parsed %s persons and kept %s persontrips" % (num_trips, num_persontrips)) print("Discared %s person that departed in the area and %s persons that arrived in the area" % ( from_ok, to_ok)) if ignored_planitems: print("Ignored plan items:") for itemtype, count in ignored_planitems.items(): print(" %s %ss" % (count, itemtype))
def cut_routes(aEdges, orig_net, options, busStopEdges=None): areaEdges = set(aEdges) num_vehicles = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda: 0) # routes which enter the sub-scenario multiple times multiAffectedRoutes = 0 teleportFactorSum = 0.0 too_short = 0 for routeFile in options.routeFiles: print("Parsing routes from %s" % routeFile) for vehicle in parse(routeFile, 'vehicle'): num_vehicles += 1 edges = vehicle.route[0].edges.split() firstIndex = getFirstIndex(areaEdges, edges) if firstIndex is None: continue # route does not touch the area lastIndex = len(edges) - 1 - \ getFirstIndex(areaEdges, reversed(edges)) # check for connectivity route_parts = [(firstIndex + i, firstIndex + j) for (i, j) in missingEdges(areaEdges, edges[firstIndex:(lastIndex + 1)], missingEdgeOccurences)] # print("areaEdges: %s"%str(areaEdges)) # print("routeEdges: %s"%str(edges)) # print("firstIndex = %d"%firstIndex) # print("route_parts = %s"%str(route_parts)) if len(route_parts) > 1: multiAffectedRoutes += 1 if options.disconnected_action == 'discard': continue # loop over different route parts for ix_part, ix_interval in enumerate(route_parts): fromIndex, toIndex = ix_interval # print("(fromIndex,toIndex) = (%d,%d)"%(fromIndex,toIndex)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 continue # compute new departure if vehicle.route[0].exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = (float(vehicle.depart) + sum([(orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex]])) else: print( "Could not reconstruct new departure time for vehicle '%s'. Using old departure time." % vehicle.id) newDepart = float(vehicle.depart) else: exitTimes = vehicle.route[0].exitTimes.split() departTimes = [vehicle.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = float(departTimes[int(fromIndex * teleportFactor)]) del vehicle.route[0].exitTimes remaining = edges[fromIndex:toIndex + 1] stops = [] if vehicle.stop: for stop in vehicle.stop: if stop.busStop: if not busStopEdges: print( "No bus stop locations parsed, skipping bus stop '%s'." % stop.busStop) continue if stop.busStop not in busStopEdges: print( "Skipping bus stop '%s', which could not be located." % stop.busStop) continue if busStopEdges[stop.busStop] in remaining: stops.append(stop) elif stop.lane[:-2] in remaining: stops.append(stop) vehicle.route[0].edges = " ".join(remaining) vehicle.stop = stops vehicle.depart = "%.2f" % newDepart if len(route_parts) > 1: # return copies of the vehicle for each route part yield_veh = copy.deepcopy(vehicle) yield_veh.id = vehicle.id + "_part" + str(ix_part) yield newDepart, yield_veh else: yield newDepart, vehicle num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - teleportFactorSum / num_returned) else: teleports = "" print("Parsed %s vehicles and kept %s routes%s" % (num_vehicles, num_returned, teleports)) if too_short > 0: print("Discarded %s routes because they have less than %s edges" % (too_short, options.min_length)) print("Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes) printTop(missingEdgeOccurences)