def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths1 = {} lengths2 = {} lengthDiffStats = Statistics( "route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile1, 'vehicle'): lengths1[vehicle.id] = getRouteLength(net, vehicle) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) lengthDiffStats.add( lengths2[vehicle.id] - lengths1[vehicle.id], vehicle.id) print lengthDiffStats if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in lengthDiffStats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: differences = sorted( [(lengths2[id] - lengths1[id], id) for id in lengths1.keys()]) for diff, id in differences: f.write("%s %s\n" % (diff, id))
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths1 = {} lengths2 = {} lengthDiffStats = Statistics("route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile1, 'vehicle'): lengths1[vehicle.id] = getRouteLength(net, vehicle) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) lengthDiffStats.add(lengths2[vehicle.id] - lengths1[vehicle.id], vehicle.id) print(lengthDiffStats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in lengthDiffStats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: differences = sorted([(lengths2[id] - lengths1[id], id) for id in lengths1.keys()]) for diff, id in differences: f.write("%s %s\n" % (diff, id))
def main(): options = get_options() vals = defaultdict(list) stats = Statistics("%s %ss" % (options.element, options.attribute), histogram=options.binwidth > 0, scale=options.binwidth) missingAttr = set() invalidType = set() if options.fast: def elements(): for element in parse_fast(options.datafile, options.element, [options.idAttr, options.attribute]): yield getattr(element, options.idAttr), getattr(element, options.attribute) else: def elements(): for element in parse(options.datafile, options.element, heterogeneous=True): elementID = None if element.hasAttribute(options.idAttr): elementID = element.getAttribute(options.idAttr) stringVal = None if element.hasAttribute(options.attribute): stringVal = element.getAttribute(options.attribute) yield elementID, stringVal for elementID, stringVal in elements(): if stringVal is not None: try: val = sumolib.miscutils.parseTime(stringVal) vals[elementID].append(val) stats.add(val, elementID) except Exception: invalidType.add(stringVal) else: missingAttr.add(elementID) print(stats.toString(options.precision)) if missingAttr: print("%s elements did not provide attribute '%s' Example ids: '%s'" % (len(missingAttr), options.attribute, "', '".join(sorted(missingAttr)[:10]))) if invalidType: print(("%s distinct values of attribute '%s' could not be interpreted " + "as numerical value or time. Example values: '%s'") % (len(invalidType), options.attribute, "', '".join(sorted(invalidType)[:10]))) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: for id, data in sorted(vals.items()): for x in data: f.write(setPrecision("%.2f %s\n", options.precision) % (x, id)) if options.xml_output is not None: with open(options.xml_output, 'w') as f: sumolib.writeXMLHeader(f, "$Id$", "attributeStats") # noqa f.write(stats.toXML(options.precision)) f.write('</attributeStats>\n')
def filterLog(log="data/sumo_log.txt", statsOut="data/stats.txt", statsIn="stats.scenario", tripinfos="data/tripinfos.xml"): collisions = 0 timeout = 0 simEnd = -1 for line in open(log): if "collision" in line: collisions += 1 if "waited too long" in line: timeout += 1 if line.startswith("Simulation ended at time: "): simEnd = line.split()[-1] if os.path.exists(tripinfos): durationStats = Statistics(' Traveltimes') for trip in parse_fast(tripinfos, 'tripinfo', ['id', 'duration']): durationStats.add(float(trip.duration), trip.id) durationStats = str(durationStats).replace('"','') else: durationStats = '' statLine = "Collisions: %s Timeouts: %s End: %s%s" % (collisions, timeout, simEnd, durationStats) with open(statsOut, 'w') as o: rootLength = len(os.environ["TEXTTEST_SANDBOX_ROOT"]) + 1 testNameStart = os.environ["TEXTTEST_SANDBOX"].find("/", rootLength) + 1 oldStats = os.path.join(os.environ["TEXTTEST_ROOT"], os.environ["TEXTTEST_SANDBOX"][testNameStart:], statsIn) if os.path.exists(oldStats): for line in open(oldStats): o.write(line) if line.strip() == statLine.strip(): o.close() return sumoVersion = subprocess.check_output(get_app('sumo', 'SUMO_BINARY') + " -V", shell=True).splitlines()[0] print("%s %s\n%s" % (datetime.now(), sumoVersion, statLine), file=o)
def main(): options = get_options() attribute_retriever = None def attribute_retriever(tripinfo): return vals = {} stats = Statistics("%s %ss" % (options.element, options.attribute), histogram=True, scale=options.binwidth) for tripinfo in parse(options.tripinfos, options.element): val = float(tripinfo.getAttribute(options.attribute)) vals[tripinfo.id] = val stats.add(val, tripinfo.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: data = [(v, k) for k, v in vals.items()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def main(): options = get_options() attribute_retriever = None def attribute_retriever(tripinfo): return vals = defaultdict(list) stats = Statistics("%s %ss" % (options.element, options.attribute), histogram=True, scale=options.binwidth) for tripinfo in parse(options.tripinfos, options.element): try: val = float(tripinfo.getAttribute(options.attribute)) except: val = sumolib.miscutils.parseTime(tripinfo.getAttribute(options.attribute)) vals[tripinfo.id].append(val) stats.add(val, tripinfo.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: for id, data in vals.items(): for x in data: f.write("%s %s\n" % (x, id))
def main(): options = get_options() routeUsage = defaultdict(lambda: 0) for flow in parse(options.emitters, 'flow'): num = int(flow.number) if flow.route is None: dist = flow.routeDistribution[0] probs = map(float, dist.probabilities.split()) probs = [p / sum(probs) for p in probs] for rID, p in zip(dist.routes.split(), probs): routeUsage[rID] += p * num else: routeUsage[flow.route] += num usage = Statistics("routeUsage") for rID, count in routeUsage.items(): usage.add(count, rID) print(usage) if options.unused_output is not None: with open(options.unused_output, 'w') as outf: for rID, count in routeUsage.items(): usage.add(rID, count) if count <= options.threshold: outf.write("%s\n" % rID)
def main(): options = parse_args() departCounts = defaultdict(lambda: 0) arrivalCounts = defaultdict(lambda: 0) for route in parse_fast(options.routefile, 'route', ['edges']): edges = route.edges.split() if options.subpart is not None and not hasSubpart( edges, options.subpart): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 for walk in parse_fast(options.routefile, 'walk', ['edges']): edges = walk.edges.split() if options.subpart is not None and not hasSubpart( edges, options.subpart): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 # warn about potentially missing edges for trip in parse_fast(options.routefile, 'trip', ['id', 'fromTaz', 'toTaz']): if options.subpart is not None: sys.stderr.print("Warning: Ignoring trips when using --subpart") break departCounts[trip.fromTaz] += 1 arrivalCounts[trip.toTaz] += 1 for walk in parse_fast(options.routefile, 'walk', ['from', 'to']): if options.subpart is not None: sys.stderr.print("Warning: Ignoring trips when using --subpart") break departCounts[walk.attr_from] += 1 arrivalCounts[walk.to] += 1 departStats = Statistics("departEdges") arrivalStats = Statistics("arrivalEdges") for e in departCounts.keys(): departStats.add(departCounts[e], e) for e in arrivalCounts.keys(): arrivalStats.add(arrivalCounts[e], e) print(departStats) print(arrivalStats) with open(options.outfile, 'w') as outf: outf.write("<edgedata>\n") outf.write(' <interval begin="0" end="10000" id="routeStats">\n') allEdges = set(departCounts.keys()) allEdges.update(arrivalCounts.keys()) for e in sorted(list(allEdges)): outf.write( ' <edge id="%s" departed="%s" arrived="%s" delta="%s"/>\n' % (e, departCounts[e], arrivalCounts[e], arrivalCounts[e] - departCounts[e])) outf.write(" </interval>\n") outf.write("</edgedata>\n")
def parse_dualog(dualog, limit): print("Parsing %s" % dualog) teleStats = Statistics('Teleports') header = ['#Inserted', 'Running', 'Waiting', 'Teleports', 'Loaded'] step_values = [] # list of lists step_counts = [] # list of edge teleport counters reInserted = re.compile(r"Inserted: (\d*)") reLoaded = re.compile(r"Loaded: (\d*)") reRunning = re.compile(r"Running: (\d*)") reWaiting = re.compile(r"Waiting: (\d*)") reFrom = re.compile("from '([^']*)'") # mesosim teleports = 0 inserted = None loaded = None running = None waiting = None haveMicrosim = None counts = defaultdict(lambda: 0) with open(dualog) as dualogIn: for line in dualogIn: try: if "Warning: Teleporting vehicle" in line: if haveMicrosim is None: if "lane='" in line: haveMicrosim = True reFrom = re.compile("lane='([^']*)'") else: haveMicrosim = False teleports += 1 edge = reFrom.search(line).group(1) if ':' in edge: # mesosim output edge = edge.split(':')[0] counts[edge] += 1 elif "Inserted:" in line: inserted = reInserted.search(line).group(1) if "Loaded:" in line: # optional output loaded = reLoaded.search(line).group(1) else: loaded = inserted elif "Running:" in line: running = reRunning.search(line).group(1) elif "Waiting:" in line: iteration = len(step_values) if iteration > limit: break waiting = reWaiting.search(line).group(1) teleStats.add(teleports, iteration) step_values.append( [inserted, running, waiting, teleports, loaded]) teleports = 0 step_counts.append(counts) counts = defaultdict(lambda: 0) except Exception: sys.exit("error when parsing line '%s'" % line) print(" parsed %s steps" % len(step_values)) print(teleStats) return [header] + step_values, step_counts
def _parse_vehicle_info(routes): sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") stats = [] for v in output.parse(routes, ('vehicle', 'person')): if not v.id.endswith( BACKGROUND_TRAFFIC_SUFFIX) and v.depart != "triggered": duration = float(v.arrival) - float(v.depart) length = float(v.routeLength) if v.routeLength else 0 sumoTime.add(duration, v.id) sumoDist.add(length, v.id) if v.name == "vehicle": stats.append( tuple(v.id.split('_')) + ("{0,0,%s}" % duration, "{0,0,%s}" % length)) else: walkLength = [0, 0] walkDuration = [0, 0] rideLength = 0 rideEnd = float(v.depart) idx = 0 initWait = 0 transfers = 0 transferTime = 0 for stage in v.getChildList(): if stage.name == "walk": walkLength[idx] += float(stage.routeLength) walkDuration[idx] = float( stage.exitTimes.split()[-1]) - rideEnd elif stage.name == "ride": if idx == 0: idx = 1 initWait = float(stage.depart) - float( v.depart) - walkDuration[0] else: transfers += 1 transferTime += float(stage.depart) - rideEnd rideEnd = float(stage.ended) rideLength += float(stage.routeLength) + walkLength[1] walkLength[1] = 0 # reset from intermediate walks if idx == 0: stats.append( tuple(v.id.split('_')) + ("{%s}" % duration, "{%s}" % walkLength[0])) else: dur = (duration - sum(walkDuration) - initWait, walkDuration[0], initWait, walkDuration[1], transferTime) length = (rideLength, walkLength[0], walkLength[1], transfers) stats.append( tuple(v.id.split('_')) + ("{0,0,0,0,%s,%s,%s,%s,%s}" % dur, "{0,0,0,0,%s,%s,0,%s,%s}" % length)) print("Parsed results for %s vehicles and persons" % len(stats)) print(sumoTime) print(sumoDist) return stats
def parse_dualog(dualog, limit): print("Parsing %s" % dualog) teleStats = Statistics('Teleports') header = ['#Inserted', 'Running', 'Waiting', 'Teleports', 'Loaded'] step_values = [] # list of lists step_counts = [] # list of edge teleport counters reInserted = re.compile("Inserted: (\d*)") reLoaded = re.compile("Loaded: (\d*)") reRunning = re.compile("Running: (\d*)") reWaiting = re.compile("Waiting: (\d*)") reFrom = re.compile("from '([^']*)'") # mesosim teleports = 0 inserted = None loaded = None running = None waiting = None haveMicrosim = None counts = defaultdict(lambda: 0) for line in open(dualog): try: if "Warning: Teleporting vehicle" in line: if haveMicrosim is None: if "lane='" in line: haveMicrosim = True reFrom = re.compile("lane='([^']*)'") else: haveMicrosim = False teleports += 1 edge = reFrom.search(line).group(1) if ':' in edge: # mesosim output edge = edge.split(':')[0] counts[edge] += 1 elif "Inserted:" in line: inserted = reInserted.search(line).group(1) if "Loaded:" in line: # optional output loaded = reLoaded.search(line).group(1) else: loaded = inserted elif "Running:" in line: running = reRunning.search(line).group(1) elif "Waiting:" in line: iteration = len(step_values) if iteration > limit: break waiting = reWaiting.search(line).group(1) teleStats.add(teleports, iteration) step_values.append( [inserted, running, waiting, teleports, loaded]) teleports = 0 step_counts.append(counts) counts = defaultdict(lambda: 0) except Exception: sys.exit("error when parsing line '%s'" % line) print(" parsed %s steps" % len(step_values)) print(teleStats) return [header] + step_values, step_counts
def accelStats(netstate): lastSpeed = {} stats = Statistics("Accelerations", histogram=True, printMin=True, scale=0.2) for vehicle in parse_fast(netstate, 'vehicle', ['id', 'speed']): speed = float(vehicle.speed) prevSpeed = lastSpeed.get(vehicle.id, speed) stats.add(speed - prevSpeed, (vehicle.id, vehicle.speed)) lastSpeed[vehicle.id] = speed print stats
def main(): options = get_options() net = None attribute_retriever = None if options.attribute == "length": net = readNet(options.network) attribute_retriever = lambda vehicle: sum([ net.getEdge(e).getLength() for e in vehicle.route[0].edges.split() ]) elif options.attribute == "depart": attribute_retriever = lambda vehicle: float(vehicle.depart) elif options.attribute == "numEdges": attribute_retriever = lambda vehicle: len(vehicle.route[0].edges.split( )) else: sys.exit("Invalid value '%s' for option --attribute" % options.attribute) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics("route %ss" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = attribute_retriever(vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics("route %s difference" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = attribute_retriever(vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def main(): options = get_options() net = None attribute_retriever = None if options.attribute == "length": net = readNet(options.network) def attribute_retriever(vehicle): return sum([net.getEdge(e).getLength() for e in vehicle.route[0].edges.split()]) elif options.attribute == "depart": def attribute_retriever(vehicle): return float(vehicle.depart) elif options.attribute == "numEdges": def attribute_retriever(vehicle): return len(vehicle.route[0].edges.split()) else: sys.exit("Invalid value '%s' for option --attribute" % options.attribute) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics( "route %ss" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = attribute_retriever(vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics( "route %s difference" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = attribute_retriever(vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def main(): options = parse_args() departCounts = defaultdict(lambda: 0) arrivalCounts = defaultdict(lambda: 0) for route in parse_fast(options.routefile, 'route', ['edges']): edges = route.edges.split() if options.subpart is not None and not hasSubpart(edges, options.subpart): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 for walk in parse_fast(options.routefile, 'walk', ['edges']): edges = walk.edges.split() if options.subpart is not None and not hasSubpart(edges, options.subpart): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 # warn about potentially missing edges for trip in parse_fast(options.routefile, 'trip', ['id', 'fromTaz', 'toTaz']): if options.subpart is not None: sys.stderr.print("Warning: Ignoring trips when using --subpart") break departCounts[trip.fromTaz] += 1 arrivalCounts[trip.toTaz] += 1 for walk in parse_fast(options.routefile, 'walk', ['from', 'to']): if options.subpart is not None: sys.stderr.print("Warning: Ignoring trips when using --subpart") break departCounts[walk.attr_from] += 1 arrivalCounts[walk.to] += 1 departStats = Statistics("departEdges") arrivalStats = Statistics("arrivalEdges") for e in departCounts.keys(): departStats.add(departCounts[e], e) for e in arrivalCounts.keys(): arrivalStats.add(arrivalCounts[e], e) print(departStats) print(arrivalStats) with open(options.outfile, 'w') as outf: outf.write("<edgedata>\n") outf.write(' <interval begin="0" end="10000" id="routeStats">\n') allEdges = set(departCounts.keys()) allEdges.update(arrivalCounts.keys()) for e in sorted(list(allEdges)): outf.write(' <edge id="%s" departed="%s" arrived="%s" delta="%s"/>\n' % (e, departCounts[e], arrivalCounts[e], arrivalCounts[e] - departCounts[e])) outf.write(" </interval>\n") outf.write("</edgedata>\n")
def parse_dualog(dualog, limit): print "Parsing %s" % dualog teleStats = Statistics('Teleports') header = ['#Emitted', 'Running', 'Waiting', 'Teleports', 'Loaded'] step_values = [] # list of lists step_counts = [] # list of edge teleport counters reEmitted = re.compile("Emitted: (\d*)") reLoaded = re.compile("Loaded: (\d*)") reRunning = re.compile("Running: (\d*)") reWaiting = re.compile("Waiting: (\d*)") reFrom = re.compile("from '([^']*)'") teleports = 0 emitted = None loaded = None running = None waiting = None counts = defaultdict(lambda:0) for line in open(dualog): try: if "Warning: Teleporting vehicle" in line: teleports += 1 edge = reFrom.search(line).group(1) if ':' in edge: # mesosim output edge = edge.split(':')[0] counts[edge] += 1 elif "Emitted:" in line: emitted = reEmitted.search(line).group(1) if "Loaded:" in line: # optional output loaded = reLoaded.search(line).group(1) else: loaded = emitted elif "Running:" in line: running = reRunning.search(line).group(1) elif "Waiting:" in line: iteration = len(step_values) if iteration > limit: break waiting = reWaiting.search(line).group(1) teleStats.add(teleports, iteration) step_values.append([emitted, running, waiting, teleports, loaded]) teleports = 0 step_counts.append(counts) counts = defaultdict(lambda:0) except: sys.exit("error when parsing line '%s'" % line) print " parsed %s steps" % len(step_values) print teleStats return [header] + step_values, step_counts
def _parse_vehicle_info(routes): sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") stats = [] for v in output.parse(routes, ('vehicle', 'person')): if not v.id.endswith(BACKGROUND_TRAFFIC_SUFFIX) and v.depart != "triggered": duration = float(v.arrival) - float(v.depart) length = float(v.routeLength) if v.routeLength else 0 sumoTime.add(duration, v.id) sumoDist.add(length, v.id) stats.append(tuple(v.id.split('_')) + ("{0,0,%s}" % duration, "{0,0,%s}" % length)) print("Parsed results for %s vehicles and persons" % len(stats)) print(sumoTime) print(sumoDist) return stats
def upload_all_pairs(conn, tables, start, end, vType, real_routes, rep_routes, net, taz_list, startIdx=0): stats = _parse_vehicle_info_taz(real_routes, start, end, vType) stats.extend(_get_all_pair_stats(rep_routes, net)) stats.sort() min_samples = 5 last = None values = [] sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") real = 0 remain = set([(o, d) for o in taz_list for d in taz_list]) for source, dest, faked, duration, dist in stats: if (source, dest) != last: if last is not None: values.append(_createValueTuple(last, vType, end, real, sumoTime, sumoDist)) remain.discard(last) sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") real = 0 last = (source, dest) if not faked or sumoTime.count() < min_samples: if not faked: real += 1 sumoTime.add(duration) sumoDist.add(dist) if last is not None: values.append(_createValueTuple(last, vType, end, real, sumoTime, sumoDist)) remain.discard(last) if remain: print("inserting dummy data for %s unconnected O-D relations" % len(remain)) for o, d in remain: values.append(_createValueTuple((o, d), vType, end)) cursor = conn.cursor() # insert values odValues = [] entryValues = [] for idx, v in enumerate(values): odValues.append(str(v[:4] + (startIdx + idx,))) entryValues.append(str(v[4:] + (startIdx + idx, "{car}"))) odQuery = """INSERT INTO temp.%s (taz_id_start, taz_id_end, sumo_type, interval_end, entry_id) VALUES %s""" % (tables[0], ','.join(odValues)) cursor.execute(odQuery) insertQuery = """INSERT INTO temp.%s (realtrip_count, representative_count, travel_time_sec, travel_time_stddev, distance_real, distance_stddev, entry_id, used_modes) VALUES %s""" % (tables[1], ','.join(entryValues)) cursor.execute(insertQuery) conn.commit() return startIdx + len(values)
def _get_all_pair_stats(roualt_file, net): """Parses a duarouter .rou.alt.xml output for travel times and calculates the route length. The file is supposed to contain only vehicles of a single vType""" sumoTime = Statistics("SUMO durations") sumoDist = Statistics("SUMO distances") for vehicle in output.parse(roualt_file, 'vehicle'): duration = float(vehicle.routeDistribution[0].route[0].cost) edges = vehicle.routeDistribution[0].route[0].edges.split() distance = sum(map(lambda e: net.getEdge(e).getLength(), edges)) sumoTime.add(duration, vehicle.id) sumoDist.add(distance, vehicle.id) if sumoDist.count() % 10000 == 0: print("parsed %s taz representatives" % sumoDist.count()) fromTaz, toTaz = _parseTaz(vehicle) yield fromTaz, toTaz, 1, duration, distance print(sumoTime) print(sumoDist)
def main(tag, attr, *xmlfiles): data = [] for xmlfile in xmlfiles: stats = Statistics('%s %s' % (tag, attr)) for elem in parse(xmlfile, tag): stats.add(float(elem.getAttribute(attr)), elem.id) print(stats) data.append(stats.values) try: import matplotlib.pyplot as plt except Exception as e: sys.exit(e) plt.figure() plt.xticks(range(len(xmlfiles)), xmlfiles) plt.ylabel("%s %s" % (tag, attr)) plt.boxplot(data) plt.show()
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics("route lengths", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile, 'vehicle'): length = getRouteLength(net, vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics("route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def main(): options = get_options() routes = defaultdict(list) if options.routes is not None: for route in parse(options.routes, 'route'): routes[route.edges].append(route.id) restrictions = {} if options.restrictionfile is not None: for line in open(options.restrictionfile): count, edges = line.strip().split(None, 1) for rID in routes[edges]: restrictions[rID] = int(count) routeUsage = defaultdict(int) for flow in parse(options.emitters, 'flow'): num = int(flow.number) if flow.route is None: dist = flow.routeDistribution[0] probs = map(float, dist.probabilities.split()) probs = [p / sum(probs) for p in probs] for rID, p in zip(dist.routes.split(), probs): routeUsage[rID] += p * num else: routeUsage[flow.route] += num usage = Statistics("routeUsage") restrictUsage = Statistics("restrictedRouteUsage") for rID, count in routeUsage.items(): usage.add(count, rID) if rID in restrictions: restrictUsage.add(count, rID) print(usage) print(restrictUsage, "total:", sum(restrictUsage.values)) if options.unused_output is not None: with open(options.unused_output, 'w') as outf: for rID, count in routeUsage.items(): if count <= options.threshold: outf.write("%s\n" % rID) if rID in restrictions and count > restrictions[rID]: outf.write("%s %s %s\n" % (rID, count, restrictions[rID]))
def main(): options = get_options() net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) lengths = {} lengths2 = {} for vehicle in parse(options.routeFile, 'vehicle'): lengths[vehicle.id] = getRouteLength(net, vehicle) if options.routeFile2 is None: # write statistics on a single route file stats = Statistics( "route lengths", histogram=True, scale=options.binwidth) for id, length in lengths.items(): stats.add(length, id) else: # compare route lengths between two files stats = Statistics( "route length difference", histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2, 'vehicle'): lengths2[vehicle.id] = getRouteLength(net, vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def write_persondiff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength", "waitingTime"] attr_conversions = dict([(a, parseTime) for a in attrs]) persons_orig = OrderedDict([(p.id, p) for p in parse(options.orig, 'personinfo', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') statAttrs = ["duration", "walkTimeLoss", "rideWait", "walks", "accesses", "rides", "stops"] with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for p in parse(options.new, 'personinfo', attr_conversions=attr_conversions): if p.id in persons_orig: pOrig = persons_orig[p.id] stats = plan_stats(p) statsOrig = plan_stats(pOrig) diffs = [a - b for a, b in zip(stats, statsOrig)] durations.add(stats[0], p.id) origDurations.add(statsOrig[0], p.id) durationDiffs.add(stats[0] - statsOrig[0], p.id) diffAttrs = ''.join([' %sDiff="%s"' % (a, x) for a, x in zip(statAttrs, diffs)]) f.write(' <personinfo id="%s"%s/>\n' % (p.id, diffAttrs)) del persons_orig[p.id] else: f.write(' <personinfo id="%s" comment="new"/>\n' % p.id) for id in persons_orig.keys(): f.write(' <personinfo id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def writeInterval(outf, options, departCounts, arrivalCounts, intermediateCounts, begin=0, end="1000000", prefix=""): departStats = Statistics(prefix + "departEdges") arrivalStats = Statistics(prefix + "arrivalEdges") intermediateStats = Statistics(prefix + "intermediateEdges") for e in sorted(departCounts.keys()): departStats.add(departCounts[e], e) for e in sorted(arrivalCounts.keys()): arrivalStats.add(arrivalCounts[e], e) print(departStats) print(arrivalStats) if options.intermediate: for e in sorted(intermediateCounts.keys()): intermediateStats.add(intermediateCounts[e], e) print(intermediateStats) outf.write(' <interval begin="%s" end="%s" id="routeStats">\n' % (begin, end)) allEdges = set(departCounts.keys()) allEdges.update(arrivalCounts.keys()) if options.intermediate: allEdges.update(intermediateCounts.keys()) for e in sorted(allEdges): intermediate = ' intermediate="%s"' % intermediateCounts[e] if options.intermediate else '' if (departCounts[e] > options.min_count or arrivalCounts[e] > options.min_count or intermediateCounts[e] > 0): outf.write(' <edge id="%s" departed="%s" arrived="%s" delta="%s"%s/>\n' % (e, departCounts[e], arrivalCounts[e], arrivalCounts[e] - departCounts[e], intermediate)) outf.write(" </interval>\n") departCounts.clear() arrivalCounts.clear() intermediateCounts.clear()
def write_diff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength"] # parseTime works just fine for floats attr_conversions = dict([(a, parseTime) for a in attrs]) vehicles_orig = OrderedDict([(v.id, v) for v in parse( options.orig, 'tripinfo', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for v in parse(options.new, 'tripinfo', attr_conversions=attr_conversions): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] diffs = [ v.getAttribute(a) - vOrig.getAttribute(a) for a in attrs ] durations.add(v.duration, v.id) origDurations.add(vOrig.duration, v.id) durationDiffs.add(v.duration - vOrig.duration, v.id) diffAttrs = ''.join( [' %sDiff="%s"' % (a, x) for a, x in zip(attrs, diffs)]) f.write(' <vehicle id="%s"%s/>\n' % (v.id, diffAttrs)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def main(): options = parse_args() departCounts = defaultdict(lambda: 0) arrivalCounts = defaultdict(lambda: 0) intermediateCounts = defaultdict(lambda: 0) for element in options.elements: for route in parse_fast(options.routefile, element, ['edges']): edges = route.edges.split() if not hasSubpart(edges, options.subparts): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 for e in edges: intermediateCounts[e] += 1 # warn about potentially missing edges fromAttr, toAttr = ('fromTaz', 'toTaz') if options.taz else ('from', 'to') if 'trip' in options.elements: for trip in parse_fast(options.routefile, 'trip', ['id', fromAttr, toAttr]): if options.subparts: sys.stderr.write("Warning: Ignoring trips when using --subpart\n") break departCounts[trip[1]] += 1 arrivalCounts[trip[2]] += 1 if 'walk' in options.elements: for walk in parse_fast(options.routefile, 'walk', ['from', 'to']): if options.subparts: sys.stderr.write("Warning: Ignoring trips when using --subpart\n") break departCounts[walk.attr_from] += 1 arrivalCounts[walk.to] += 1 departStats = Statistics("departEdges") arrivalStats = Statistics("arrivalEdges") intermediateStats = Statistics("intermediateEdges") for e in sorted(departCounts.keys()): departStats.add(departCounts[e], e) for e in sorted(arrivalCounts.keys()): arrivalStats.add(arrivalCounts[e], e) print(departStats) print(arrivalStats) if options.intermediate: for e in sorted(intermediateCounts.keys()): intermediateStats.add(intermediateCounts[e], e) print(intermediateStats) with open(options.outfile, 'w') as outf: outf.write("<edgedata>\n") outf.write(' <interval begin="0" end="10000" id="routeStats">\n') allEdges = set(departCounts.keys()) allEdges.update(arrivalCounts.keys()) if options.intermediate: allEdges.update(intermediateCounts.keys()) for e in sorted(allEdges): intermediate = ' intermediate="%s"' % intermediateCounts[e] if options.intermediate else '' outf.write(' <edge id="%s" departed="%s" arrived="%s" delta="%s"%s/>\n' % (e, departCounts[e], arrivalCounts[e], arrivalCounts[e] - departCounts[e], intermediate)) outf.write(" </interval>\n") outf.write("</edgedata>\n")
def main(tripinfos, lengthThreshold=0.1): lengthThreshold = float(lengthThreshold) stats = Statistics('walkfactor') statsZeroDuration = Statistics('length of zero-duration walks') statsShort = Statistics('duration of short walks (length <= %s)' % lengthThreshold) numUnfinished = 0 for personinfo in parse(tripinfos, 'personinfo'): if personinfo.hasChild('walk'): for i, walk in enumerate(personinfo.walk): if walk.arrival[0] == '-': numUnfinished += 1 continue walkID = "%s.%s" % (personinfo.id, i) duration = float(walk.duration) routeLength = float(walk.routeLength) if duration > 0: if routeLength <= lengthThreshold: statsShort.add(duration, walkID) else: avgSpeed = routeLength / duration walkFactor = avgSpeed / float(walk.maxSpeed) stats.add(walkFactor, walkID) else: statsZeroDuration.add(routeLength, walkID) print(stats) if statsZeroDuration.count() > 0: print(statsZeroDuration) if statsShort.count() > 0: print(statsShort) if numUnfinished > 0: print("unfinished walks: %s" % numUnfinished)
def write_diff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength"] # parseTime works just fine for floats attr_conversions = dict([(a, parseTime) for a in attrs]) vehicles_orig = OrderedDict([(v.id, v) for v in parse(options.orig, 'tripinfo', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations', histogram=options.useHist, scale=options.histScale) durations = Statistics('new durations', histogram=options.useHist, scale=options.histScale) durationDiffs = Statistics('duration differences', histogram=options.useHist, scale=options.histScale) with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for v in parse(options.new, 'tripinfo', attr_conversions=attr_conversions): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] diffs = [v.getAttribute(a) - vOrig.getAttribute(a) for a in attrs] durations.add(v.duration, v.id) origDurations.add(vOrig.duration, v.id) durationDiffs.add(v.duration - vOrig.duration, v.id) diffAttrs = ''.join([' %sDiff="%s"' % (a, x) for a, x in zip(attrs, diffs)]) f.write(' <vehicle id="%s"%s/>\n' % (v.id, diffAttrs)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out): vehicles_orig = OrderedDict([(v.id, v) for v in parse(orig, 'tripinfo')]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<tripDiffs>\n") for v in parse(new, 'tripinfo'): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) timeLossDiff = float(v.timeLoss) - float(vOrig.timeLoss) durationDiff = float(v.duration) - float(vOrig.duration) routeLengthDiff = float(v.routeLength) - \ float(vOrig.routeLength) durations.add(float(v.duration), v.id) origDurations.add(float(vOrig.duration), v.id) durationDiffs.add(durationDiff, v.id) f.write( ''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" timeLossDiff="%s" durationDiff="%s" routeLengthDiff="%s"/>\n''' % (v.id, departDiff, arrivalDiff, timeLossDiff, durationDiff, routeLengthDiff)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") print(origDurations) print(durations) print(durationDiffs)
def write_diff(options): attrs = ["depart", "arrival", "timeLoss", "duration", "routeLength"] # parseTime works just fine for floats attr_conversions = dict([(a, parseTime) for a in attrs]) vehicles_orig = OrderedDict([(v.id, v) for v in parse( options.orig, 'tripinfo', attr_conversions=attr_conversions)]) descr = "" if options.filterIDs: descr = " (%s)" % options.filterIDs origDurations = Statistics('original durations%s' % descr, histogram=options.useHist, scale=options.histScale) durations = Statistics('new durations%s' % descr, histogram=options.useHist, scale=options.histScale) durationDiffs = Statistics('duration differences%s new-old' % descr, histogram=options.useHist, scale=options.histScale) numNew = 0 numMissing = 0 with open(options.output, 'w') as f: f.write("<tripDiffs>\n") for v in parse(options.new, 'tripinfo', attr_conversions=attr_conversions): if options.filterIDs and options.filterIDs not in v.id: del vehicles_orig[v.id] continue if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] diffs = [ v.getAttribute(a) - vOrig.getAttribute(a) for a in attrs ] durations.add(v.duration, v.id) origDurations.add(vOrig.duration, v.id) durationDiffs.add(v.duration - vOrig.duration, v.id) diffAttrs = ''.join( [' %sDiff="%s"' % (a, x) for a, x in zip(attrs, diffs)]) f.write(' <vehicle id="%s"%s/>\n' % (v.id, diffAttrs)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) numNew += 1 for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) numMissing += 1 f.write("</tripDiffs>\n") if numMissing > 0: print("missing: %s" % numMissing) if numNew > 0: print("new: %s" % numNew) print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out, earliest_out=None): attr_conversions = {"depart": parseTime, "arrival": parseTime} earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([ (v.id, v) for v in parse(orig, 'vehicle', attr_conversions=attr_conversions) ]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<routeDiff>\n") for v in parse(new, 'vehicle', attr_conversions=attr_conversions): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = v.depart - vOrig.depart arrivalDiff = v.arrival - vOrig.arrival if v.route[0].exitTimes is None: sys.exit("Error: Need route input with 'exitTimes'\n") exitTimes = map(parseTime, v.route[0].exitTimes.split()) origExitTimes = map(parseTime, vOrig.route[0].exitTimes.split()) exitTimesDiff = [ e - eOrig for e, eOrig in zip(exitTimes, origExitTimes) ] durations.add(v.arrival - v.depart, v.id) origDurations.add(vOrig.arrival - vOrig.depart, v.id) durationDiffs.add(arrivalDiff - departDiff, v.id) update_earliest(earliest_diffs, departDiff, vOrig.depart, v.id + ' (depart)') for diff, timestamp in zip(exitTimesDiff, origExitTimes): update_earliest(earliest_diffs, diff, timestamp, v.id) f.write( ''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" exitTimesDiff="%s"/>\n''' % (v.id, departDiff, arrivalDiff, ' '.join( map(str, exitTimesDiff)))) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</routeDiff>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out, earliest_out=None): attr_conversions = {"depart": parseTime, "arrival": parseTime} earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, 'vehicle', attr_conversions=attr_conversions)]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<routeDiff>\n") for v in parse(new, 'vehicle', attr_conversions=attr_conversions): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = v.depart - vOrig.depart arrivalDiff = v.arrival - vOrig.arrival if v.route[0].exitTimes is None: sys.exit("Error: Need route input with 'exitTimes'\n") exitTimes = map(parseTime, v.route[0].exitTimes.split()) origExitTimes = map(parseTime, vOrig.route[0].exitTimes.split()) exitTimesDiff = [ e - eOrig for e, eOrig in zip(exitTimes, origExitTimes)] durations.add(v.arrival - v.depart, v.id) origDurations.add(vOrig.arrival - vOrig.depart, v.id) durationDiffs.add(arrivalDiff - departDiff, v.id) update_earliest( earliest_diffs, departDiff, vOrig.depart, v.id + ' (depart)') for diff, timestamp in zip(exitTimesDiff, origExitTimes): update_earliest(earliest_diffs, diff, timestamp, v.id) f.write(''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" exitTimesDiff="%s"/>\n''' % ( v.id, departDiff, arrivalDiff, ' '.join(map(str, exitTimesDiff)))) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</routeDiff>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out, earliest_out=None): earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, "vehicle")]) origDurations = Statistics("original durations") durations = Statistics("new durations") durationDiffs = Statistics("duration differences") with open(out, "w") as f: f.write("<routeDiff>\n") for v in parse(new, "vehicle"): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) if v.route[0].exitTimes is None: sys.exit("Error: Need route input with 'exitTimes'\n") exitTimes = map(float, v.route[0].exitTimes.split()) origExitTimes = map(float, vOrig.route[0].exitTimes.split()) exitTimesDiff = [e - eOrig for e, eOrig in zip(exitTimes, origExitTimes)] durations.add(float(v.arrival) - float(v.depart), v.id) origDurations.add(float(vOrig.arrival) - float(vOrig.depart), v.id) durationDiffs.add(arrivalDiff - departDiff, v.id) update_earliest(earliest_diffs, departDiff, vOrig.depart, v.id + " (depart)") for diff, timestamp in zip(exitTimesDiff, origExitTimes): update_earliest(earliest_diffs, diff, timestamp, v.id) f.write( """ <vehicle id="%s" departDiff="%s" arrivalDiff="%s" exitTimesDiff="%s"/>\n""" % (v.id, departDiff, arrivalDiff, " ".join(map(str, exitTimesDiff))) ) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</routeDiff>\n") if earliest_out is not None: with open(earliest_out, "w") as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out, earliest_out=None): earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, 'tripinfo')]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<tripDiffs>\n") for v in parse(new, 'tripinfo'): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) timeLossDiff = float(v.timeLoss) - float(vOrig.timeLoss) durationDiff = float(v.duration) - float(vOrig.duration) routeLengthDiff = float(v.routeLength) - \ float(vOrig.routeLength) durations.add(float(v.duration), v.id) origDurations.add(float(vOrig.duration), v.id) durationDiffs.add(durationDiff, v.id) f.write( ''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" timeLossDiff="%s" durationDiff="%s" routeLengthDiff="%s"/>\n''' % (v.id, departDiff, arrivalDiff, timeLossDiff, durationDiff, routeLengthDiff)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def write_diff(orig, new, out, earliest_out=None): earliest_diffs = defaultdict(lambda: (uMax, None)) # diff -> (time, veh) vehicles_orig = dict([(v.id, v) for v in parse(orig, 'tripinfo')]) origDurations = Statistics('original durations') durations = Statistics('new durations') durationDiffs = Statistics('duration differences') with open(out, 'w') as f: f.write("<tripDiffs>\n") for v in parse(new, 'tripinfo'): if v.id in vehicles_orig: vOrig = vehicles_orig[v.id] departDiff = float(v.depart) - float(vOrig.depart) arrivalDiff = float(v.arrival) - float(vOrig.arrival) timeLossDiff = float(v.timeLoss) - float(vOrig.timeLoss) durationDiff = float(v.duration) - float(vOrig.duration) routeLengthDiff = float(v.routeLength) - \ float(vOrig.routeLength) durations.add(float(v.duration), v.id) origDurations.add(float(vOrig.duration), v.id) durationDiffs.add(durationDiff, v.id) f.write(''' <vehicle id="%s" departDiff="%s" arrivalDiff="%s" timeLossDiff="%s" durationDiff="%s" routeLengthDiff="%s"/>\n''' % ( v.id, departDiff, arrivalDiff, timeLossDiff, durationDiff, routeLengthDiff)) del vehicles_orig[v.id] else: f.write(' <vehicle id="%s" comment="new"/>\n' % v.id) for id in vehicles_orig.keys(): f.write(' <vehicle id="%s" comment="missing"/>\n' % id) f.write("</tripDiffs>\n") if earliest_out is not None: with open(earliest_out, 'w') as f: for diff in reversed(sorted(earliest_diffs.keys())): f.write("%s, %s\n" % (diff, earliest_diffs[diff])) print(origDurations) print(durations) print(durationDiffs)
def main(): options = parse_args() departCounts = defaultdict(lambda: 0) arrivalCounts = defaultdict(lambda: 0) intermediateCounts = defaultdict(lambda: 0) for route in parse_fast(options.routefile, 'route', ['edges']): edges = route.edges.split() if not hasSubpart(edges, options.subparts): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 for e in edges: intermediateCounts[e] += 1 for walk in parse_fast(options.routefile, 'walk', ['edges']): edges = walk.edges.split() if not hasSubpart(edges, options.subparts): continue departCounts[edges[0]] += 1 arrivalCounts[edges[-1]] += 1 for e in edges: intermediateCounts[e] += 1 # warn about potentially missing edges fromAttr, toAttr = ('fromTaz', 'toTaz') if options.taz else ('from', 'to') for trip in parse_fast(options.routefile, 'trip', ['id', fromAttr, toAttr]): if options.subparts: sys.stderr.write("Warning: Ignoring trips when using --subpart\n") break departCounts[trip[1]] += 1 arrivalCounts[trip[2]] += 1 for walk in parse_fast(options.routefile, 'walk', ['from', 'to']): if options.subparts: sys.stderr.write("Warning: Ignoring trips when using --subpart\n") break departCounts[walk.attr_from] += 1 arrivalCounts[walk.to] += 1 departStats = Statistics("departEdges") arrivalStats = Statistics("arrivalEdges") intermediateStats = Statistics("intermediateEdges") for e in sorted(departCounts.keys()): departStats.add(departCounts[e], e) for e in sorted(arrivalCounts.keys()): arrivalStats.add(arrivalCounts[e], e) print(departStats) print(arrivalStats) if options.intermediate: for e in sorted(intermediateCounts.keys()): intermediateStats.add(intermediateCounts[e], e) print(intermediateStats) with open(options.outfile, 'w') as outf: outf.write("<edgedata>\n") outf.write(' <interval begin="0" end="10000" id="routeStats">\n') allEdges = set(departCounts.keys()) allEdges.update(arrivalCounts.keys()) if options.intermediate: allEdges.update(intermediateCounts.keys()) for e in sorted(allEdges): intermediate = ' intermediate="%s"' % intermediateCounts[e] if options.intermediate else '' outf.write(' <edge id="%s" departed="%s" arrived="%s" delta="%s"%s/>\n' % (e, departCounts[e], arrivalCounts[e], arrivalCounts[e] - departCounts[e], intermediate)) outf.write(" </interval>\n") outf.write("</edgedata>\n")
def main(): DUAROUTER = sumolib.checkBinary('duarouter') options = get_options() net = readNet(options.network) routeInfos = {} # id-> RouteInfo if options.standalone: for route in parse(options.routeFile, 'route'): ri = RouteInfo() ri.edges = route.edges.split() routeInfos[route.id] = ri else: for vehicle in parse(options.routeFile, 'vehicle'): ri = RouteInfo() ri.edges = vehicle.route[0].edges.split() routeInfos[vehicle.id] = ri for rInfo in routeInfos.values(): rInfo.airDist = euclidean( net.getEdge(rInfo.edges[0]).getShape()[0], net.getEdge(rInfo.edges[-1]).getShape()[-1]) rInfo.length = getRouteLength(net, rInfo.edges) rInfo.airDistRatio = rInfo.length / rInfo.airDist duarouterInput = options.routeFile if options.standalone: # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') duarouterOutput = options.routeFile + '.rerouted.rou.xml' duarouterAltOutput = options.routeFile + '.rerouted.rou.alt.xml' subprocess.call([ DUAROUTER, '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log' ]) for vehicle in parse(duarouterAltOutput, 'vehicle'): routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[ vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert (routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = getRouteLength( net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(( "Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % (vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overal implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = ( options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print( "Loaded %s routes to ignore. Reducing implausible from %s to %s" % (len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = options.routeFile + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(options, net, rID, colorgen(), ri.edges, outf, score) outf.write('</additional>\n') sys.stdout.write( 'score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist)\n' ) for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist))) print(allRoutesStats) print(implausibleRoutesStats)
def main(): options = get_options() net = None attribute_retriever = None if options.attribute == "length": net = sumolib.net.readNet(options.network) def attribute_retriever(vehicle): return sum([ net.getEdge(e).getLength() for e in vehicle.route[0].edges.split() ]) elif options.attribute == "depart": def attribute_retriever(vehicle): return parseTime(vehicle.depart) elif options.attribute == "numEdges": def attribute_retriever(vehicle): return len(vehicle.route[0].edges.split()) elif options.attribute == "duration": def attribute_retriever(vehicle): return parseTime(vehicle.arrival) - parseTime(vehicle.depart) elif options.attribute == "routeLength": def attribute_retriever(vehicle): return float(vehicle.routeLength) elif options.attribute == "speed": def attribute_retriever(vehicle): return float(vehicle.routeLength) / (parseTime(vehicle.arrival) - parseTime(vehicle.depart)) elif options.attribute == "speedKmh": def attribute_retriever(vehicle): return 3.6 * float(vehicle.routeLength) / ( parseTime(vehicle.arrival) - parseTime(vehicle.depart)) else: sys.exit("Invalid value '%s' for option --attribute" % options.attribute) lengths = {} lengths2 = {} if options.routeFile2 is None: # write statistics on a single route file stats = Statistics("route %ss" % options.attribute, histogram=True, scale=options.binwidth) if options.fast: def parse(routes): return sumolib.xml.parse_fast( routes, 'vehicle', ['id', 'depart', 'arrival', 'routeLength']) else: def parse(routes): return sumolib.xml.parse(routes, 'vehicle') for vehicle in parse(options.routeFile): if vehicle.routeLength is None or float( vehicle.routeLength) >= options.minlength: length = attribute_retriever(vehicle) if options.routeFile2 is None: stats.add(length, vehicle.id) lengths[vehicle.id] = length if options.routeFile2 is not None: # compare route lengths between two files stats = Statistics("route %s difference" % options.attribute, histogram=True, scale=options.binwidth) for vehicle in parse(options.routeFile2): if vehicle.routeLength is None or float( vehicle.routeLength) >= options.minlength: lengths2[vehicle.id] = attribute_retriever(vehicle) stats.add(lengths2[vehicle.id] - lengths[vehicle.id], vehicle.id) print(stats) if options.hist_output is not None: with open(options.hist_output, 'w') as f: for bin, count in stats.histogram(): f.write("%s %s\n" % (bin, count)) if options.full_output is not None: with open(options.full_output, 'w') as f: if options.routeFile2 is None: data = [(v, k) for k, v in lengths.items()] else: data = [(lengths2[id] - lengths[id], id) for id in lengths.keys()] for val, id in sorted(data): f.write("%s %s\n" % (val, id))
def main(): DUAROUTER = sumolib.checkBinary('duarouter') options = get_options() net = readNet(options.network) routeInfos = {} # id-> RouteInfo if options.standalone: for route in parse(options.routeFile, 'route'): ri = RouteInfo() ri.edges = route.edges.split() routeInfos[route.id] = ri else: for vehicle in parse(options.routeFile, 'vehicle'): ri = RouteInfo() ri.edges = vehicle.route[0].edges.split() routeInfos[vehicle.id] = ri for rInfo in routeInfos.values(): rInfo.airDist = euclidean( net.getEdge(rInfo.edges[0]).getShape()[0], net.getEdge(rInfo.edges[-1]).getShape()[-1]) rInfo.length = getRouteLength(net, rInfo.edges) rInfo.airDistRatio = rInfo.length / rInfo.airDist duarouterInput = options.routeFile if options.standalone: # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') duarouterOutput = options.routeFile + '.rerouted.rou.xml' duarouterAltOutput = options.routeFile + '.rerouted.rou.alt.xml' subprocess.call([DUAROUTER, '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log']) for vehicle in parse(duarouterAltOutput, 'vehicle'): routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert(routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = getRouteLength(net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(("Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % ( vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overal implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = (options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print("Loaded %s routes to ignore. Reducing implausible from %s to %s" % ( len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = options.routeFile + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(net, rID, colorgen(), 100, False, ri.edges, options.blur, outf, score) outf.write('</additional>\n') sys.stdout.write('score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist)\n') for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist))) print(allRoutesStats) print(implausibleRoutesStats)
def main(): options = get_options() if options.verbose: print("parsing network from", options.network) net = readNet(options.network, withInternal=True) read = 0 routeInfos = {} # id-> RouteInfo skipped = set() for routeFile in options.routeFiles: if options.verbose: print("parsing routes from", routeFile) idx = 0 if options.standalone: for idx, route in enumerate(parse(routeFile, 'route')): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "routes read") addOrSkip(routeInfos, skipped, route.id, route, options.min_edges) else: if options.heterogeneous: for idx, vehicle in enumerate(parse(routeFile, 'vehicle')): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "vehicles read") addOrSkip(routeInfos, skipped, vehicle.id, vehicle.route[0], options.min_edges) else: prev = (None, None) for vehicle, route in parse_fast_nested(routeFile, 'vehicle', 'id', 'route', 'edges'): if prev[0] != vehicle.id: if options.verbose and idx > 0 and idx % 500000 == 0: print(idx, "vehicles read") if prev[0] is not None: addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges) prev = (vehicle.id, route) idx += 1 if prev[0] is not None: addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges) read += idx if options.verbose: print(read, "routes read", len(skipped), "short routes skipped") if options.verbose: print("calculating air distance and checking loops") for idx, ri in enumerate(routeInfos.values()): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "routes checked") calcDistAndLoops(ri, net, options) prefix = os.path.commonprefix(options.routeFiles) duarouterOutput = prefix + '.rerouted.rou.xml' duarouterAltOutput = prefix + '.rerouted.rou.alt.xml' if os.path.exists(duarouterAltOutput) and options.reuse_routing: if options.verbose: print("reusing old duarouter file", duarouterAltOutput) else: if options.standalone: duarouterInput = prefix # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') else: duarouterInput = ",".join(options.routeFiles) command = [sumolib.checkBinary('duarouter'), '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log', '--routing-threads', str(options.threads), '--routing-algorithm', 'astar', '--aggregate-warnings', '1'] if options.verbose: command += ["-v"] if options.verbose: print("calling duarouter:", " ".join(command)) subprocess.call(command) for vehicle in parse(duarouterAltOutput, 'vehicle'): if vehicle.id in skipped: continue routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert(routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = sumolib.route.getLength(net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(("Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % ( vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overall implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = (options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold or ri.edgeLoop or ri.nodeLoop: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) # write xml output if options.xmlOutput is not None: with open(options.xmlOutput, 'w') as outf: sumolib.writeXMLHeader(outf, "$Id$", options=options) # noqa outf.write('<implausibleRoutes>\n') for score, rID, ri in sorted(implausible): edges = " ".join(ri.edges) outf.write(' <route id="%s" edges="%s" score="%s"/>\n' % ( rID, edges, score)) outf.write('</implausibleRoutes>\n') if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print("Loaded %s routes to ignore. Reducing implausible from %s to %s" % ( len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = prefix + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(options, net, rID, colorgen(), ri.edges, outf, score) outf.write('</additional>\n') sys.stdout.write('score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist, edgeLoop, nodeLoop)\n') for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist, ri.edgeLoop, ri.nodeLoop))) print(allRoutesStats) print(implausibleRoutesStats)