def readEdgeData(edgeDataFile, begin, end, detReader, flowout): edgeFlow = defaultdict(lambda: 0) for interval in parse(edgeDataFile, "interval", attr_conversions={"begin": float, "end": float}): if DEBUG: print("reading intervals for begin=%s end=%s (current interval begin=%s end=%s)" % (begin, end, interval.begin, interval.end)) if interval.begin < end and interval.end > begin: # if read interval is partly outside comparison interval we must scale demand validInterval = interval.end - interval.begin if interval.begin < begin: validInterval -= begin - interval.begin if interval.end > end: validInterval -= interval.end - end scale = validInterval / (interval.end - interval.begin) # store data if flowout: f = open(flowout, 'a') for edge in interval.edge: flow = (int(edge.departed) + int(edge.entered)) * scale edgeFlow[edge.id] += flow # print(interval.begin, interval.end, edge.id, edge.departed, edge.entered, scale, edgeFlow[edge.id]) if flowout: for group in detReader.getEdgeDetGroups(edge.id): f.write(";".join(map(str, [group.ids[0], interval.begin / 60, flow, edge.speed])) + "\n") if flowout: f.close() if DEBUG: print(" validInterval=%s scale=%s" % (validInterval, scale)) return edgeFlow
def parse_standalone_routes(file, into, typesMap, heterogeneous): for element in parse(file, ('vType', 'route'), heterogeneous=heterogeneous): if element.id is not None: if element.name == 'vType': typesMap[element.id] = element else: into[element.id] = element
def elements(): for element in parse(options.datafile, options.element, heterogeneous=True): elementID = None if element.hasAttribute(options.idAttr): elementID = element.getAttribute(options.idAttr) stringVal = None if element.hasAttribute(options.attribute): stringVal = element.getAttribute(options.attribute) yield elementID, stringVal
def main(options): intervals = defaultdict(dict) # time -> (type -> stats) for trip in parse(options.tripinfoFile, 'tripinfo'): typeStats = intervals[getAggregatedTime(options, trip)] if trip.vType not in typeStats: typeStats[trip.vType] = Statistics(trip.vType) typeStats[trip.vType].add(parseTime(getattr(trip, options.attribute)), trip.id) for person in parse(options.tripinfoFile, 'personinfo'): for stage in person.getChildList(): if stage.hasAttribute(options.attribute): typeStats = intervals[getAggregatedTime(options, stage)] if stage.name not in typeStats: typeStats[stage.name] = Statistics(stage.name) typeStats[stage.name].add(parseTime(getattr(stage, options.attribute)), person.id) if options.output: with open(options.output, 'w') as outf: sumolib.writeXMLHeader(outf, "$Id$", "tripinfosByType", options=options) # noqa for time in sorted(intervals.keys()): typeStats = intervals[time] if time is not None: outf.write(' <interval begin="%s" end="%s">\n' % (time, time + options.interval)) for vType, stats in sorted(typeStats.items()): q1, median, q3 = stats.quartiles() outf.write(' <typeInfo vType="%s" count="%s" min="%s" minVeh="%s"' % (vType, stats.count(), stats.min, stats.min_label)) outf.write(' max="%s" maxVeh="%s" mean="%s" Q1="%s" median="%s" Q3="%s"/>\n' % (stats.max, stats.max_label, stats.avg(), q1, median, q3)) if time is not None: outf.write(' </interval>\n') outf.write('</tripinfosByType>\n') else: for time in sorted(intervals.keys()): typeStats = intervals[time] if time is not None: print("Interval: [%s, %s[" % (time, time + options.interval)) for vType, stats in sorted(typeStats.items()): print(stats)
def parseTransitions(options, sgIndex, phases): transitions = {} # id -> [sumoState1, sumoState2, ...] transitionList = list(parse(options.ocitfile, TRANSITION_LIST))[0] index2groups = getIndex2Groups(sgIndex) for transition in transitionList.getChild(TRANSITION): ID = textNode(transition, SHORTID) switching = transition.getChild(SWITCHING)[0] duration = max(1, int(textNode(switching, SWITCHING_DURATION))) if not transition.hasChild(SWITCH_FROM): # if no from phase was given, we cannot add a transition continue fromPhase = textNode(transition, SWITCH_FROM) toPhase = textNode(transition, SWITCH_TO) if not fromPhase in phases or not toPhase in phases: continue initialState = reduceComplexState(options, index2groups, sgIndex, ID, phases, fromPhase) transitionStates = [deepcopy(initialState) for t in range(duration)] for sg in switching.getChild(SWITCHING_ELEMENT): groupID = textNode(sg, SIGNALGROUP) if sg.hasChild(SWITCHING_INITIAL): initial = SIGNALSTATE_SUMOSTATE[textNode(sg, SWITCHING_INITIAL)] else: signalIndex = sgIndex[groupID] if len(signalIndex) > 0: initial = phases[fromPhase][signalIndex[0]][0] if extraInfo(options, ID, groupID): print(ID, groupID, "init", initial, textNode(sg, SWITCHING_INITIAL), sgIndex[groupID]) for time in range(duration): for index in sgIndex[groupID]: gIndex = index2groups[index].index(groupID) transitionStates[time][index][gIndex] = initial for switchObject in sg.getChild(SWITCHING_TIME): switchTime = int(textNode(switchObject, SWITCHING_TIME2)) groupState = SIGNALSTATE_SUMOSTATE[textNode(switchObject, PHASE_SIGNAL)] if extraInfo(options, ID, groupID): print(ID, groupID, "t=%s" % switchTime, groupState, textNode(switchObject, PHASE_SIGNAL), sgIndex[groupID]) for time in range(switchTime, duration): for index in sgIndex[groupID]: gIndex = index2groups[index].index(groupID) transitionStates[time][index][gIndex] = groupState transitions[(ID, fromPhase, toPhase)] = transitionStates if extraInfo(options, ID, None): print(ID) print("\n".join(map(str,transitionStates))) return transitions
def elements(): for datafile in options.datafiles: defaultID = str(None) if len( options.datafiles) == 1 else datafile for element in parse(datafile, options.element, heterogeneous=True): elementID = defaultID if element.hasAttribute(options.idAttr): elementID = element.getAttribute(options.idAttr) stringVal = None if element.hasAttribute(options.attribute): stringVal = element.getAttribute(options.attribute) yield elementID, stringVal
def main(options): if options.output is None: options.outfile = sys.stdout else: options.outfile = open(options.output, 'w') if options.flowout: with open(options.flowout, 'w') as f: f.write("Detector;Time;qPKW;vPKW\n") options.begin = None for interval in parse(options.edgeDataFile, "interval", attr_conversions={ "begin": float, "end": float }): if options.begin is None: options.begin = interval.begin / 60 options.end = interval.end / 60 detReader = detector.DetectorReader(options.detfile, LaneMap()) time = options.begin * 60 haveDetFlows = True while ((options.end is None and haveDetFlows) or (options.end is not None and time < options.end * 60)): intervalBeginM = time / 60 intervalEndM = intervalBeginM + options.interval if options.end is not None: intervalEndM = min(intervalEndM, options.end) if options.flowfile: if options.verbose: print("Reading flows") haveDetFlows = detReader.readFlows(options.flowfile, flow=options.flowcol, time="Time", timeVal=intervalBeginM, timeMax=intervalEndM) if options.verbose: print("Reading edgeData") edgeFlow = readEdgeData(options.edgeDataFile, time, intervalEndM * 60, detReader, options.flowout) if haveDetFlows and options.flowfile: printFlows(options, edgeFlow, detReader) calcStatistics(options, intervalBeginM, edgeFlow, detReader) detReader.clearFlows() time += options.interval * 60 options.outfile.close()
def readEdgeData(edgeDataFile, begin, end, detReader, flowout): edgeFlow = defaultdict(lambda: 0) for interval in parse(edgeDataFile, "interval", attr_conversions={ "begin": parseTime, "end": parseTime }): if DEBUG: print( "reading intervals for begin=%s end=%s (current interval begin=%s end=%s)" % (begin, end, interval.begin, interval.end)) if interval.begin < end and interval.end > begin: # if read interval is partly outside comparison interval we must scale demand validInterval = interval.end - interval.begin if interval.begin < begin: validInterval -= begin - interval.begin if interval.end > end: validInterval -= interval.end - end scale = validInterval / (interval.end - interval.begin) # store data if flowout: f = open(flowout, 'a') if interval.edge is None: continue for edge in interval.edge: flow = (int(edge.departed) + int(edge.entered)) * scale edgeFlow[edge.id] += flow # print(interval.begin, interval.end, edge.id, edge.departed, edge.entered, scale, edgeFlow[edge.id]) if flowout: for group in detReader.getEdgeDetGroups(edge.id): f.write(";".join( map(str, [ group.ids[0], interval.begin / 60, flow, edge.speed ])) + "\n") if flowout: f.close() if DEBUG: print(" validInterval=%s scale=%s" % (validInterval, scale)) return edgeFlow
def parsePhaseStates(ocitfile, cycle, sgIndex, maxIndex): index2groups = getIndex2Groups(sgIndex) defaultState = [['O'] * max(1, len(index2groups[i])) for i in range(maxIndex + 1)] #print(sgIndex) #print(index2groups) #print(defaultState) phases = {} # id -> sumoState phaseList = list(parse(ocitfile, PHASELIST))[0] for phase in phaseList.getChild(PHASE): ID = textNode(phase, SHORTID) if not ID in cycle: continue stateList = deepcopy(defaultState) for sg in phase.getChild(PHASE_ELEMENT): groupID = textNode(sg, SIGNALGROUP) groupState = SIGNALSTATE_SUMOSTATE[textNode(sg, PHASE_SIGNAL)] for index in sgIndex[groupID]: gIndex = index2groups[index].index(groupID) stateList[index][gIndex] = groupState phases[ID] = stateList #print(ID, stateList) return phases
def main(options): if options.output is None: options.outfile = sys.stdout else: options.outfile = open(options.output, 'w') if options.flowout: with open(options.flowout, 'w') as f: f.write("Detector;Time;qPKW;vPKW\n") options.begin = None for interval in parse(options.edgeDataFile, "interval", attr_conversions={"begin": float, "end": float}): if options.begin is None: options.begin = interval.begin / 60 options.end = interval.end / 60 detReader = detector.DetectorReader(options.detfile, LaneMap()) intervalBeginM = options.begin haveDetFlows = True while ((options.end is None and haveDetFlows) or (options.end is not None and intervalBeginM < options.end)): intervalEndM = intervalBeginM + options.interval if options.end is not None: intervalEndM = min(intervalEndM, options.end) if options.flowfile: if options.verbose: print("Reading flows") haveDetFlows = detReader.readFlows(options.flowfile, flow=options.flowcol, time="Time", timeVal=intervalBeginM, timeMax=intervalEndM) if options.verbose: print("Reading edgeData") edgeFlow = readEdgeData( options.edgeDataFile, intervalBeginM * 60, intervalEndM * 60, detReader, options.flowout) if haveDetFlows and options.flowfile: printFlows(options, edgeFlow, detReader) calcStatistics(options, intervalBeginM, edgeFlow, detReader) detReader.clearFlows() intervalBeginM += options.interval options.outfile.close()
def parseSignalGroups(ocitfile, nodeIndex): partialNodeSG = defaultdict(list) sgIndex = defaultdict(list) # signal group id -> [linkIndex1, linkIndex2, ...] yellowDur = defaultdict(lambda : 0) # link index -> duration redYellowDur = defaultdict(lambda : 0) # link index -> duration maxIndex = -1 sgList = list(parse(ocitfile, SIGNALGROUPS))[0] for sg in sgList.getChild(SIGNALGROUP): ID = textNode(sg, SHORTID) nodeIndex = int(textNode(sg, PARTIAL_NODE)) if sg.hasChild(PARTIAL_NODE) else nodeIndex #if nodeIndex is not None: # # only return sigal groups that belong to the current node # # ('Teilknoten'). This is done by matching with the numerical index of # # the first phase in the current cycle # numID = int(RENUM.search(ID).groups()[0]) # # node 1 (phases 11 to 19) uses signal groups 0-9 # # node 2 (phases 21 to 29) uses signal groups 10-19 # # ... # if numID < 10 * (nodeIndex - 1) or numID >= nodeIndex * 10: # continue try: comments = sg.getChild(COMMENTS)[0] comment = textNode(comments, COMMENT) if comment is not None: indices = list(map(int, comment.split(INDEX_SEPARATOR))) sgIndex[ID] = indices maxIndex = max(maxIndex, *indices) # read yellow / redyellow duration (not for blinkers) if not isBlinker(ID): getSignalDuration(sg, SIGNAL_ACTIVATE, indices, redYellowDur, ID) getSignalDuration(sg, SIGNAL_DEACTIVATE, indices, yellowDur, ID) partialNodeSG[nodeIndex].append(ID) except: pass return sgIndex, yellowDur, redYellowDur, maxIndex, partialNodeSG
from __future__ import absolute_import from __future__ import print_function import os import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import sumolib.net # noqa from sumolib.xml import parse # noqa if len(sys.argv) < 2: print("Usage: " + sys.argv[0] + " <NET> <STOPS>", file=sys.stderr) sys.exit() print("Reading net...") net = sumolib.net.readNet(sys.argv[1]) stops = sys.argv[2] print("Writing output...") with open('pois.add.xml', 'w') as f: f.write('<?xml version="1.0"?>\n') f.write('<additional>\n') for stop in parse(stops, 'busStop'): lane = net.getLane(stop.lane) pos = (float(stop.startPos) + float(stop.endPos)) / 2 xypos = sumolib.geomhelper.positionAtShapeOffset(lane.getShape(), pos) lon, lat = net.convertXY2LonLat(xypos[0], xypos[1]) f.write( ' <poi id="%s" type="%s" color="1,0,0" layer="100" lon="%s" lat="%s"/>\n' % (stop.id, stop.name, lon, lat)) f.write('</additional>\n')
def main(options): def formatVehCode(code): time, veh = code if options.hrTime: time = humanReadableTime(time) return "%s (plan=%s)" % (veh, time) # stop (stoppingPlaceID or (lane, pos)) -> [(depart1, veh1), (depart2, veh2), ...] expected_departs = defaultdict(list) actual_departs = defaultdict(dict) ignored_stops = 0 parsed_stops = 0 for vehicle in parse(options.routeFile, ['vehicle', 'trip'], heterogeneous=True): if vehicle.stop is not None: for stop in vehicle.stop: if stop.hasAttribute("until"): if stop.hasAttribute("busStop"): stopCode = stop.busStop else: stopCode = "%s,%s" % (stop.lane, stop.endPos) expected_departs[stopCode].append( (parseTime(stop.until), vehicle.id)) parsed_stops += 1 else: ignored_stops += 1 print("Parsed %s expected stops at %s locations" % (parsed_stops, len(expected_departs))) if ignored_stops > 0: sys.stderr.write("Ignored %s stops without 'until' attribute\n" % ignored_stops) output_stops = 0 for stop in parse(options.stopFile, "stopinfo", heterogeneous=True): if stop.hasAttribute("busStop"): stopCode = stop.busStop else: stopCode = (stop.lane, stop.endPos) ended = parseTime(stop.ended) until = ended - parseTime(stop.delay) actual_departs[stopCode][(until, stop.id)] = ended output_stops += 1 print("Parsed %s actual stops at %s locations" % (output_stops, len(actual_departs))) missing = defaultdict(list) for stopCode in sorted(expected_departs.keys()): vehicles = expected_departs[stopCode] if stopCode in actual_departs: actual_vehicles = actual_departs[stopCode] comparable_expected = [] comparable_actual = [] for vehCode in vehicles: if vehCode in actual_vehicles: comparable_expected.append(vehCode) comparable_actual.append( (actual_vehicles[vehCode], vehCode)) # (ended, (until, vehID)) else: missing[stopCode].append(vehCode) comparable_expected.sort() comparable_actual.sort() num_unexpected = len(actual_vehicles) - len(comparable_actual) if num_unexpected > 0: print("Found %s unexpected stops at %s" % (num_unexpected, stopCode)) # after sorting, discard the 'ended' attribute and only keep vehCode comparable_actual2 = [v[1] for v in comparable_actual] # find and remove duplicate tmp = [] for vehCode in comparable_expected: if len(tmp) != 0: if vehCode != tmp[-1]: tmp.append(vehCode) else: if options.verbose: print("Found duplicate stop at %s for vehicle %s" % (stopCode, vehCode)) comparable_actual2.remove(vehCode) else: tmp.append(vehCode) comparable_expected = tmp if options.verbose: actual = [(v[0], v[1][1]) for v in comparable_actual] print("stop:", stopCode) print(" expected:", comparable_expected) print(" actual:", actual) for i, vehCode in enumerate(comparable_expected): j = comparable_actual2.index(vehCode) indexInStops = "" if options.verbose: indexInStops = " Index in stops: ex=%s act=%s" % (i, j) if i < j: print("At %s vehicle %s comes after %s.%s" % (stopCode, formatVehCode(vehCode), ','.join( map(formatVehCode, comparable_actual2[i:j])), indexInStops)) elif j < i: print("At %s vehicle %s comes before %s.%s" % (stopCode, formatVehCode(vehCode), ','.join( map(formatVehCode, comparable_actual2[j:i])), indexInStops)) if i != j: # swap to avoid duplicate out-of-order warnings tmp = comparable_actual2[i] comparable_actual2[i] = comparable_actual2[j] comparable_actual2[j] = tmp else: missing[stopCode] = vehicles print("Simulation missed %s stops at %s locations" % (sum(map(len, missing.values())), len(missing)))
a.pop(index + 1) a.pop(index) print("key", key) if not os.path.isdir(key): os.makedirs(key) os.chdir(key) subprocess.call(a) TTT = Statistics("TTT") ATT = Statistics("ATT") values = [] for name in glob.glob('*/stats.xml'): # print(name) for stats in parse(name, 'statistics'): vStats = stats.vehicleTripStatistics[0] vehs = stats.vehicles[0] ttt = float(vStats.totalTravelTime) + float(vStats.totalDepartDelay) att = ttt / int(vehs.loaded) TTT.add(ttt, name) ATT.add(att, name) values.append( (att, ttt, vStats.totalTravelTime, vStats.totalDepartDelay, vStats.duration, vStats.departDelay, name)) with open('results.txt', 'w') as f: f.write('#' '\t'.join(['att', 'cttt', 'ttt', 'tdd', 'tt', 'dd', 'file']) + '\n') for dat in values:
from __future__ import print_function import os import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import sumolib.net # noqa from sumolib.xml import parse # noqa if len(sys.argv) < 2: print("Usage: " + sys.argv[0] + " <NET> <STOPS>", file=sys.stderr) sys.exit() print("Reading net...") net = sumolib.net.readNet(sys.argv[1]) stops = sys.argv[2] print("Writing output...") with open('pois.add.xml', 'w') as f: f.write('<?xml version="1.0"?>\n') f.write('<additional>\n') for stop in parse(stops, 'busStop'): lane = net.getLane(stop.lane) pos = (float(stop.startPos) + float(stop.endPos)) / 2 xypos = sumolib.geomhelper.positionAtShapeOffset(lane.getShape(), pos) lon, lat = net.convertXY2LonLat(xypos[0], xypos[1]) f.write(' <poi id="%s" type="%s" color="1,0,0" layer="100" lon="%s" lat="%s"/>\n' % ( stop.id, stop.name, lon, lat)) f.write('</additional>\n')
def main(options): nan = float("nan") columns = [ 'vehID', 'tripId', # tripId of current stop or set by earlier stop 'stopID', # busStop id or lane,pos 'priorStop', # busStop id or lane,pos 'arrival', # route-input 'until', # route-input 'started', # route-input 'ended', # route-input ] columns2 = columns[:3] + [ 'sim_started', # stop-output 'sim_ended', # stop-input ] stops = [] tripIds = dict() # vehID -> lastTripId priorStops = dict() # vehID -> lastStopID for vehicle in parse(options.routeFile, ['vehicle', 'trip'], heterogeneous=True, attr_conversions=ATTR_CONVERSIONS): if vehicle.stop is not None: for stop in vehicle.stop: vehID = vehicle.id tripId = stop.getAttributeSecure("tripId", tripIds.get(vehID)) tripIds[vehID] = tripId stopID = getStopID(stop) priorStop = priorStops.get(vehID) priorStops[vehID] = stopID stops.append(( vehID, tripId, stopID, priorStop, stop.getAttributeSecure("arrival", nan), stop.getAttributeSecure("until", nan), stop.getAttributeSecure("started", nan), stop.getAttributeSecure("ended", nan), )) print("Parsed %s stops" % len(stops)) simStops = [] tripIds = dict() # vehID -> lastTripId priorStops = dict() # vehID -> lastStopID for stop in parse(options.stopFile, "stopinfo", heterogeneous=True, attr_conversions=ATTR_CONVERSIONS): vehID = stop.id tripId = stop.getAttributeSecure("tripId", tripIds.get(vehID)) tripIds[vehID] = tripId stopID = getStopID(stop) priorStop = priorStops.get(vehID) priorStops[vehID] = stopID simStops.append(( vehID, tripId, stopID, # priorStop, stop.getAttributeSecure("started", nan), stop.getAttributeSecure("ended", nan))) print("Parsed %s stopinfos" % len(simStops)) dfSchedule = pd.DataFrame.from_records(stops, columns=columns) dfSim = pd.DataFrame.from_records(simStops, columns=columns2) # merge on common columns vehID, tripId, stopID df = pd.merge( dfSchedule, dfSim, on=columns[:3], # how="outer", how="inner", ) print("Found %s matches" % len(df)) if options.verbose: # print(dfSchedule) # print(dfSim) print(df) if options.output: outf = open(options.output, 'w') sumolib.writeXMLHeader(outf, "$Id$", "scheduleStats") # noqa description, fun = STATS[options.sType] useHist = options.histogram is not None useGHist = options.gHistogram is not None if options.groupBy: numGroups = 0 stats = [] gs = Statistics( "%s %s grouped by [%s]" % (options.gType, description, ','.join(options.groupBy)), abs=True, histogram=useGHist, scale=options.gHistogram) for name, group in df.groupby(options.groupBy): numGroups += 1 s = Statistics("%s:%s" % (description, name), abs=True, histogram=useHist, scale=options.histogram) group.apply(fun, axis=1, args=(s, )) gVal = GROUPSTATS[options.gType](s) gs.add(gVal, name) stats.append((gVal, s)) stats.sort(key=lambda x: x[0]) for gVal, s in stats: print(s.toString(precision=options.precision, histStyle=2)) if options.output: outf.write(s.toXML(precision=options.precision)) print() print(gs.toString(precision=options.precision, histStyle=2)) if options.output: outf.write(gs.toXML(precision=options.precision)) else: s = Statistics(description, abs=True, histogram=useHist, scale=options.histogram) df.apply(fun, axis=1, args=(s, )) print(s.toString(precision=options.precision, histStyle=2)) if options.output: outf.write(s.toXML(precision=options.precision)) if options.output: outf.write("</scheduleStats>\n") outf.close()
def cut_routes(aEdges, orig_net, options, busStopEdges=None): areaEdges = set(aEdges) num_vehicles = 0 num_persons = 0 num_flows = 0 num_returned = 0 missingEdgeOccurences = defaultdict(lambda: 0) # routes which enter the sub-scenario multiple times multiAffectedRoutes = 0 teleportFactorSum = 0.0 too_short = 0 too_short_airdist = 0 standaloneRoutes = {} # routeID -> routeObject standaloneRoutesDepart = {} # routeID -> time or 'discard' or None vehicleTypes = {} if options.additional_input: parse_standalone_routes(options.additional_input, standaloneRoutes, vehicleTypes) for routeFile in options.routeFiles: parse_standalone_routes(routeFile, standaloneRoutes, vehicleTypes) for typeId, t in sorted(vehicleTypes.items()): yield None, t for routeFile in options.routeFiles: print("Parsing routes from %s" % routeFile) for moving in parse(routeFile, ('vehicle', 'person', 'flow')): if moving.name == 'vehicle': num_vehicles += 1 if type(moving.route) == list: old_route = moving.route[0] edges = old_route.edges.split() routeRef = False else: newDepart = standaloneRoutesDepart.get(moving.route) if newDepart is 'discard': # route was already checked and discarded continue elif newDepart is not None: # route was already treated moving.depart = "%.2f" % (newDepart + float(moving.depart)) yield moving.depart, moving continue else: routeRef = standaloneRoutes[moving.route] edges = routeRef.edges.split() elif moving.name == 'person': num_persons += 1 if moving.walk: old_route = moving.walk[0] edges = old_route.edges.split() routeRef = False else: num_flows += 1 firstIndex = getFirstIndex(areaEdges, edges) if firstIndex is None: continue # route does not touch the area lastIndex = len(edges) - 1 - \ getFirstIndex(areaEdges, reversed(edges)) # check for connectivity route_parts = [(firstIndex + i, firstIndex + j) for (i, j) in missingEdges(areaEdges, edges[firstIndex:(lastIndex + 1)], missingEdgeOccurences)] # print("areaEdges: %s"%str(areaEdges)) # print("routeEdges: %s"%str(edges)) # print("firstIndex = %d"%firstIndex) # print("route_parts = %s"%str(route_parts)) if len(route_parts) > 1: multiAffectedRoutes += 1 if options.disconnected_action == 'discard': if routeRef: standaloneRoutesDepart[moving.route] = 'discard' continue # loop over different route parts for ix_part, ix_interval in enumerate(route_parts): fromIndex, toIndex = ix_interval # print("(fromIndex,toIndex) = (%d,%d)"%(fromIndex,toIndex)) # check for minimum length if toIndex - fromIndex + 1 < options.min_length: too_short += 1 if routeRef: standaloneRoutesDepart[moving.route] = 'discard' continue if options.min_air_dist > 0: fromPos = orig_net.getEdge(edges[fromIndex]).getFromNode().getCoord() toPos = orig_net.getEdge(edges[toIndex]).getToNode().getCoord() if sumolib.miscutils.euclidean(fromPos, toPos) < options.min_air_dist: too_short_airdist += 1 if routeRef: standaloneRoutesDepart[moving.route] = 'discard' continue # compute new departure if routeRef or old_route.exitTimes is None: if orig_net is not None: # extrapolate new departure using default speed newDepart = (float(moving.depart) + sum([(orig_net.getEdge(e).getLength() / (orig_net.getEdge(e).getSpeed() * options.speed_factor)) for e in edges[:fromIndex]])) else: print("Could not reconstruct new departure time for %s '%s'. Using old departure time." % (moving.name, moving.id)) newDepart = float(moving.depart) else: exitTimes = old_route.exitTimes.split() departTimes = [moving.depart] + exitTimes[:-1] teleportFactor = len(departTimes) / float(len(edges)) teleportFactorSum += teleportFactor # assume teleports were spread evenly across the vehicles route newDepart = float(departTimes[int(fromIndex * teleportFactor)]) del old_route.exitTimes departShift = None if routeRef: departShift = newDepart - float(moving.depart) standaloneRoutesDepart[moving.route] = departShift remaining = edges[fromIndex:toIndex + 1] stops = cut_stops(moving, busStopEdges, remaining) if routeRef: routeRef.stop = cut_stops(routeRef, busStopEdges, remaining, departShift, options.defaultStopDuration) routeRef.edges = " ".join(remaining) yield None, routeRef else: old_route.edges = " ".join(remaining) moving.stop = stops moving.depart = "%.2f" % newDepart if len(route_parts) > 1: # return copies of the vehicle for each route part yield_mov = copy.deepcopy(moving) yield_mov.id = moving.id + "_part" + str(ix_part) yield newDepart, yield_mov else: yield newDepart, moving num_returned += 1 if teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - teleportFactorSum / num_returned) else: teleports = "" print("Parsed %s vehicles, %s persons, %s flows and kept %s routes%s" % (num_vehicles, num_persons, num_flows, num_returned, teleports)) if too_short > 0: print("Discarded %s routes because they have less than %s edges" % (too_short, options.min_length)) if too_short_airdist > 0: print("Discarded %s routes because the air-line distance between start and end is less than %s" % (too_short_airdist, options.min_air_dist)) print("Number of disconnected routes: %s. Most frequent missing edges:" % multiAffectedRoutes) printTop(missingEdgeOccurences)
def cut_routes(aEdges, orig_net, options, busStopEdges=None, startEndEdgeMap=None): areaEdges = set(aEdges) stats = Statistics() standaloneRoutes = {} # routeID -> routeObject standaloneRoutesDepart = {} # routeID -> time or 'discard' or None vehicleTypes = {} if options.additional_input: parse_standalone_routes(options.additional_input, standaloneRoutes, vehicleTypes, options.heterogeneous) for routeFile in options.routeFiles: parse_standalone_routes(routeFile, standaloneRoutes, vehicleTypes, options.heterogeneous) for _, t in sorted(vehicleTypes.items()): yield -1, t for routeFile in options.routeFiles: print("Parsing routes from %s" % routeFile) for moving in parse(routeFile, (u'vehicle', u'person', u'flow'), {u"walk": (u"edges", u"busStop")}, heterogeneous=options.heterogeneous): old_route = None if moving.name == 'person': stats.num_persons += 1 oldDepart = moving.depart newDepart = None remaining = set() newPlan = [] for planItem in moving.getChildList(): if planItem.name == "walk": disco = "keep" if options.disconnected_action == "keep.walk" else options.disconnected_action routeParts = _cutEdgeList(areaEdges, oldDepart, None, planItem.edges.split(), orig_net, options, stats, disco) if busStopEdges.get(planItem.busStop) not in areaEdges: planItem.busStop = None walkEdges = [] for depart, edges in routeParts: if newDepart is None: newDepart = depart walkEdges += edges if walkEdges: remaining.update(walkEdges) planItem.edges = " ".join(walkEdges) newPlan.append(planItem) elif planItem.name == "ride": keep = True if busStopEdges.get(planItem.busStop) not in areaEdges: if planItem.lines in startEndEdgeMap: planItem.busStop = None planItem.setAttribute("to", startEndEdgeMap[planItem.lines][1]) else: keep = False if planItem.attr_from and planItem.attr_from not in areaEdges: if planItem.lines in startEndEdgeMap: planItem.attr_from = startEndEdgeMap[planItem.lines][0] else: keep = False if newDepart is None: newDepart = float(planItem.depart) planItem.lines = planItem.intended if keep: newPlan.append(planItem) if planItem.to: break else: newPlan.append(planItem) moving.setChildList(newPlan) cut_stops(moving, busStopEdges, remaining) if not moving.getChildList(): continue moving.depart = "%.2f" % newDepart yield newDepart, moving else: if moving.name == 'vehicle': stats.num_vehicles += 1 oldDepart = moving.depart else: stats.num_flows += 1 oldDepart = moving.begin if moving.routeDistribution is not None: old_route = moving.addChild("route", {"edges": moving.routeDistribution[0].route[-1].edges}) moving.removeChild(moving.routeDistribution[0]) routeRef = None elif isinstance(moving.route, list): old_route = moving.route[0] routeRef = None else: newDepart = standaloneRoutesDepart.get(moving.route) if newDepart == 'discard': # route was already checked and discarded continue elif newDepart is not None: # route was already treated if moving.name == 'vehicle': newDepart += float(moving.depart) moving.depart = "%.2f" % newDepart else: moving.end = "%.2f" % (newDepart + float(moving.end)) newDepart += float(moving.begin) moving.begin = "%.2f" % newDepart yield newDepart, moving continue else: old_route = routeRef = standaloneRoutes[moving.route] if options.discard_exit_times: old_route.exitTimes = None routeParts = _cutEdgeList(areaEdges, oldDepart, old_route.exitTimes, old_route.edges.split(), orig_net, options, stats, options.disconnected_action) if routeParts and old_route.exitTimes is None and orig_net is None: print("Could not reconstruct new departure time for %s '%s'. Using old departure time." % (moving.name, moving.id)) old_route.exitTimes = None if routeRef and not routeParts: standaloneRoutesDepart[moving.route] = 'discard' for ix_part, (newDepart, remaining) in enumerate(routeParts): departShift = None if routeRef: departShift = newDepart - float(oldDepart) standaloneRoutesDepart[moving.route] = departShift cut_stops(moving, busStopEdges, remaining) if routeRef: cut_stops(routeRef, busStopEdges, remaining, departShift, options.defaultStopDuration) routeRef.edges = " ".join(remaining) yield -1, routeRef else: old_route.edges = " ".join(remaining) if moving.name == 'vehicle': moving.depart = "%.2f" % newDepart else: moving.begin = "%.2f" % newDepart moving.end = "%.2f" % (newDepart - float(oldDepart) + float(moving.end)) if len(routeParts) > 1: # return copies of the vehicle for each route part yield_mov = copy.deepcopy(moving) yield_mov.id = moving.id + "_part" + str(ix_part) yield newDepart, yield_mov else: yield newDepart, moving if stats.teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - stats.teleportFactorSum / stats.num_returned) else: teleports = "" print("Parsed %s vehicles, %s persons, %s flows and kept %s routes%s" % (stats.num_vehicles, stats.num_persons, stats.num_flows, stats.num_returned, teleports)) if stats.too_short > 0: msg = "Discarded %s routes because they have less than %s edges" % (stats.too_short, options.min_length) if options.min_air_dist > 0: msg += " or the air-line distance between start and end is less than %s" % options.min_air_dist print(msg) print("Number of disconnected routes: %s." % stats.multiAffectedRoutes) if options.missing_edges > 0: print("Most frequent missing edges:") counts = sorted([(v, k) for k, v in stats.missingEdgeOccurences.items()], reverse=True) for count, edge in itertools.islice(counts, options.missing_edges): print(count, edge)
def parseSignalPrograms(options, sgIndex, redYellowDur, yellowDur): programs = {} maxSgIndex = max([val for l in sgIndex.values() for val in l]) programsList = list(parse(options.ocitfile, PROGRAMLIST))[0] for program in programsList.getChild(PROGRAM): programID = textNode(program, SHORTID) import pdb cycleTime = int(textNode(program.getChild(PROGRAM_HEAD)[0], CYCLE_TIME)) # generate an allread program as starting point sigProgram = [[[] for _ in range(maxSgIndex+1)] for _ in range(cycleTime)] if program.hasChild(PROGRAM_ROW): for element in program.getChild(PROGRAM_ROW): sgID = textNode(element, SIGNALGROUP) if not sgID in sgIndex: continue switches = [] permState = None stdClosed = 'r' if element.hasChild(SWITCHING_TIME): for switch in element.getChild(SWITCHING_TIME): state = SIGNALSTATE_SUMOSTATE[textNode(switch, PHASE_SIGNAL)] t = int(textNode(switch, SWITCHING_TIME2)) switches.append((t, state)) if state != 'G': stdClosed = state elif element.hasChild(PERM_STATE): permState = SIGNALSTATE_SUMOSTATE[textNode(element, PERM_STATE)] else: raise Exception('Neither switches nor permanent signal state were given. Cannot interpret signal state for sgIndex %s (programID %s)' % (sgID, programID)) sgIndices = sgIndex[sgID] start_index = 0 assign = '' if len(switches) < 1: for sgIdx in sgIndices: for i in range(0, cycleTime): sigProgram[i][sgIdx].append(permState) else: for k, (t, state) in enumerate(switches): onset = False if state == 'G': assign = stdClosed onset = True else: assign = 'G' for sgIdx in sgIndices: for i in range(start_index, t): sigProgram[i][sgIdx].append(assign) #if programID == '5' and sgID == 'H10': # pdb.set_trace() if onset: dur = min(redYellowDur[sgIdx], redYellowDur[sgID]) for i in range(t - dur, t): if i < 0: i += cycleTime if i >= cycleTime: i %= cycleTime sigProgram[i][sgIdx] = ['u'] else: dur = min(yellowDur[sgIdx], yellowDur[sgID]) for i in range(t, t + dur): if i >= cycleTime: i %= cycleTime sigProgram[i][sgIdx] = ['y'] start_index = t + dur if not onset else t if k >= len(switches)-1: for sgIdx in sgIndices: for i in range(start_index, cycleTime): sigProgram[i][sgIdx].append(state) if not options.noGrouping: grouped = [(k, list(g)) for k, g in groupby(sigProgram)] timedProgram = [(len(g), k) for k, g in grouped] else: timedProgram = [(1, g) for g in sigProgram] for i, (t, state) in enumerate(timedProgram): state = [val if len(val) > 0 else ['o'] for val in state] #for k, s in enumerate(state): #state[k] = interpret_complex_state_simple(''.join(s)) #print(k, state) index2groups = getIndex2Groups(sgIndex) majorIndex = options.majorIndex if options.majorIndex is not None else [] state = normalizeStates(state, None, index2groups, options.minorIndex, majorIndex) timedProgram[i] = (t, state) programs[programID] = timedProgram if options.verbose: print('Created program %s' % programID) print('\n'.join(str(p) for p in timedProgram)) return programs
def parse_standalone_routes(file, into, typesMap): for element in parse(file, ('vType', 'route')): if element.name == 'vType': typesMap[element.id] = element else: into[element.id] = element
def parse_standalone_routes(file, into): for route in parse(file, 'route'): into[route.id] = route
def parsePhaseIDs(ocitfile): phaseList = list(parse(ocitfile, PHASELIST))[0] return sorted([textNode(p, SHORTID) for p in phaseList.getChild(PHASE)])
def cut_routes(aEdges, orig_net, options, busStopEdges=None): areaEdges = set(aEdges) stats = Statistics() standaloneRoutes = {} # routeID -> routeObject standaloneRoutesDepart = {} # routeID -> time or 'discard' or None vehicleTypes = {} if options.additional_input: parse_standalone_routes(options.additional_input, standaloneRoutes, vehicleTypes, options.heterogeneous) for routeFile in options.routeFiles: parse_standalone_routes(routeFile, standaloneRoutes, vehicleTypes, options.heterogeneous) for _, t in sorted(vehicleTypes.items()): yield -1, t for routeFile in options.routeFiles: print("Parsing routes from %s" % routeFile) for moving in parse(routeFile, ('vehicle', 'person', 'flow'), heterogeneous=options.heterogeneous): old_route = None if moving.name == 'person': stats.num_persons += 1 oldDepart = moving.depart newDepart = None remaining = set() newPlan = [] for planItem in moving.getChildList(): if planItem.name == "walk": routeParts = cutEdgeList(areaEdges, oldDepart, None, planItem.edges.split(), orig_net, options, stats) print(planItem, routeParts) for depart, edges in routeParts: if newDepart is None: newDepart = depart walk = copy.deepcopy(planItem) walk.edges = " ".join(edges) newPlan.append(walk) remaining.update(edges) else: newPlan.append(planItem) cut_stops(moving, busStopEdges, remaining) if not newPlan: continue moving.depart = "%.2f" % newDepart moving.setChildList(newPlan) yield newDepart, moving else: if moving.name == 'vehicle': stats.num_vehicles += 1 oldDepart = moving.depart else: stats.num_flows += 1 oldDepart = moving.begin if isinstance(moving.route, list): old_route = moving.route[0] routeRef = None else: newDepart = standaloneRoutesDepart.get(moving.route) if newDepart is 'discard': # route was already checked and discarded continue elif newDepart is not None: # route was already treated if moving.name == 'vehicle': newDepart += float(moving.depart) moving.depart = "%.2f" % newDepart else: moving.end = "%.2f" % (newDepart + float(moving.end)) newDepart += float(moving.begin) moving.begin = "%.2f" % newDepart yield newDepart, moving continue else: old_route = routeRef = standaloneRoutes[moving.route] routeParts = cutEdgeList(areaEdges, oldDepart, old_route.exitTimes, old_route.edges.split(), orig_net, options, stats) if routeParts and old_route.exitTimes is None and orig_net is None: print( "Could not reconstruct new departure time for %s '%s'. Using old departure time." % (moving.name, moving.id)) old_route.exitTimes = None if routeRef and not routeParts: standaloneRoutesDepart[moving.route] = 'discard' for ix_part, (newDepart, remaining) in enumerate(routeParts): departShift = None if routeRef: departShift = newDepart - float(oldDepart) standaloneRoutesDepart[moving.route] = departShift cut_stops(moving, busStopEdges, remaining) if routeRef: cut_stops(routeRef, busStopEdges, remaining, departShift, options.defaultStopDuration) routeRef.edges = " ".join(remaining) yield -1, routeRef else: old_route.edges = " ".join(remaining) if moving.name == 'flow': moving.begin = "%.2f" % newDepart moving.end = "%.2f" % (newDepart - float(oldDepart)) else: moving.depart = "%.2f" % newDepart if len(routeParts) > 1: # return copies of the vehicle for each route part yield_mov = copy.deepcopy(moving) yield_mov.id = moving.id + "_part" + str(ix_part) yield newDepart, yield_mov else: yield newDepart, moving if stats.teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - stats.teleportFactorSum / stats.num_returned) else: teleports = "" print("Parsed %s vehicles, %s persons, %s flows and kept %s routes%s" % (stats.num_vehicles, stats.num_persons, stats.num_flows, stats.num_returned, teleports)) if stats.too_short > 0: msg = "Discarded %s routes because they have less than %s edges" % ( stats.too_short, options.min_length) if options.min_air_dist > 0: msg += " or the air-line distance between start and end is less than %s" % options.min_air_dist print(msg) print("Number of disconnected routes: %s. Most frequent missing edges:" % stats.multiAffectedRoutes) printTop(stats.missingEdgeOccurences)
def cut_routes(aEdges, orig_net, options, busStopEdges=None, ptRoutes=None, oldPTRoutes=None, collectPT=False): areaEdges = set(aEdges) stats = Statistics() standaloneRoutes = {} # routeID -> routeObject standaloneRoutesDepart = {} # routeID -> time or 'discard' or None vehicleTypes = {} if options.additional_input: for addFile in options.additional_input.split(","): parse_standalone_routes(addFile, standaloneRoutes, vehicleTypes) for routeFile in options.routeFiles: parse_standalone_routes(routeFile, standaloneRoutes, vehicleTypes) for _, t in sorted(vehicleTypes.items()): yield -1, t for routeFile in options.routeFiles: print("Parsing routes from %s" % routeFile) for moving in parse(routeFile, (u'vehicle', u'person', u'flow'), {u"walk": (u"edges", u"busStop", u"trainStop")}): if options.verbose and stats.total( ) > 0 and stats.total() % 100000 == 0: print("%s items read" % stats.total()) old_route = None if moving.name == 'person': stats.num_persons += 1 oldDepart = moving.depart newDepart = None remaining = set() newPlan = [] isDiscoBefore = True isDiscoAfter = False for planItem in moving.getChildList(): if planItem.name == "walk": disco = "keep" if options.disconnected_action == "keep.walk" else options.disconnected_action routeParts = _cutEdgeList(areaEdges, oldDepart, None, planItem.edges.split(), orig_net, options, stats, disco) if routeParts is None: # the walk itself is disconnected and the disconnected_action says not to keep the person newPlan = [] break walkEdges = [] for depart, edges in routeParts: if newDepart is None: newDepart = depart walkEdges += edges if walkEdges: if walkEdges[-1] != planItem.edges.split()[-1]: planItem.busStop = None planItem.trainStop = None isDiscoAfter = True if walkEdges[0] != planItem.edges.split()[0]: isDiscoBefore = True remaining.update(walkEdges) planItem.edges = " ".join(walkEdges) if planItem.busStop and busStopEdges.get( planItem.busStop) not in areaEdges: planItem.busStop = None isDiscoAfter = True if planItem.trainStop and busStopEdges.get( planItem.trainStop) not in areaEdges: planItem.trainStop = None isDiscoAfter = True else: planItem = None elif planItem.name == "ride": # "busStop" / "trainStop" overrides "to" toEdge = busStopEdges.get( planItem.busStop ) if planItem.busStop else planItem.to if planItem.trainStop: toEdge = busStopEdges.get(planItem.trainStop) try: if toEdge not in areaEdges: if planItem.lines in ptRoutes: ptRoute = ptRoutes[planItem.lines] oldPTRoute = oldPTRoutes[planItem.lines] # test whether ride ends before new network if oldPTRoute.index( toEdge) < oldPTRoute.index( ptRoute[0]): planItem = None else: planItem.busStop = None planItem.trainStop = None planItem.setAttribute( "to", ptRoute[-1]) isDiscoAfter = True else: planItem = None if planItem is not None: if planItem.attr_from and planItem.attr_from not in areaEdges: if planItem.lines in ptRoutes: ptRoute = ptRoutes[planItem.lines] oldPTRoute = oldPTRoutes[ planItem.lines] # test whether ride starts after new network if oldPTRoute.index( planItem.attr_from ) > oldPTRoute.index(ptRoute[-1]): planItem = None else: planItem.setAttribute( "from", ptRoute[0]) isDiscoBefore = True else: planItem = None elif planItem.attr_from is None and len( newPlan) == 0: if planItem.lines in ptRoutes: planItem.setAttribute( "from", ptRoutes[planItem.lines][0]) else: planItem = None except ValueError as e: print("Error handling ride in '%s'" % moving.id, e) planItem = None if planItem is not None and newDepart is None and planItem.depart is not None: newDepart = parseTime(planItem.depart) planItem.lines = planItem.intended if planItem is None: isDiscoAfter = True else: newPlan.append(planItem) if len( newPlan ) > 1 and isDiscoBefore and options.disconnected_action == "discard": newPlan = [] break isDiscoBefore = isDiscoAfter isDiscoAfter = False moving.setChildList(newPlan) cut_stops(moving, busStopEdges, remaining) if not moving.getChildList(): continue if newDepart is None: newDepart = parseTime(moving.depart) if newPlan[0].name == "ride" and newPlan[0].lines == newPlan[ 0].intended: moving.depart = "triggered" else: moving.depart = "%.2f" % newDepart yield newDepart, moving else: if moving.name == 'vehicle': stats.num_vehicles += 1 oldDepart = parseTime(moving.depart) else: stats.num_flows += 1 oldDepart = parseTime(moving.begin) if moving.routeDistribution is not None: old_route = moving.addChild( "route", {"edges": moving.routeDistribution[0].route[-1].edges}) moving.removeChild(moving.routeDistribution[0]) routeRef = None elif isinstance(moving.route, list): old_route = moving.route[0] routeRef = None else: newDepart = standaloneRoutesDepart.get(moving.route) if newDepart == 'discard': # route was already checked and discarded continue elif newDepart is not None: # route was already treated if moving.name == 'vehicle': newDepart += oldDepart moving.depart = "%.2f" % newDepart else: if moving.end: moving.end = "%.2f" % (newDepart + parseTime(moving.end)) newDepart += oldDepart moving.begin = "%.2f" % newDepart if collectPT and moving.line: oldPTRoutes[moving.line] = standaloneRoutes[ moving.route].edges.split() yield newDepart, moving continue else: old_route = routeRef = standaloneRoutes[moving.route] if options.discard_exit_times: old_route.exitTimes = None if collectPT and moving.line: oldPTRoutes[moving.line] = old_route.edges.split() routeParts = _cutEdgeList(areaEdges, oldDepart, old_route.exitTimes, old_route.edges.split(), orig_net, options, stats, options.disconnected_action, moving.departEdge, moving.arrivalEdge) if options.verbose and routeParts and old_route.exitTimes is None and orig_net is None: print( "Could not reconstruct new departure time for %s '%s'. Using old departure time." % (moving.name, moving.id)) old_route.exitTimes = None if routeRef and not routeParts: standaloneRoutesDepart[moving.route] = 'discard' for ix_part, (newDepart, remaining) in enumerate(routeParts or []): departShift = cut_stops(moving, busStopEdges, remaining) if routeRef: departShift = cut_stops(routeRef, busStopEdges, remaining, newDepart - oldDepart, options.defaultStopDuration, True) standaloneRoutesDepart[moving.route] = departShift newDepart = oldDepart + departShift routeRef.edges = " ".join(remaining) yield -1, routeRef else: newDepart = max(newDepart, departShift) old_route.edges = " ".join(remaining) if moving.name == 'vehicle': moving.depart = "%.2f" % newDepart else: moving.begin = "%.2f" % newDepart if moving.end: moving.end = "%.2f" % (newDepart - oldDepart + parseTime(moving.end)) moving.departEdge = None # the cut already removed the unused edges moving.arrivalEdge = None # the cut already removed the unused edges if len(routeParts) > 1: # return copies of the vehicle for each route part yield_mov = copy.deepcopy(moving) yield_mov.id = moving.id + "_part" + str(ix_part) yield newDepart, yield_mov else: yield newDepart, moving if stats.teleportFactorSum > 0: teleports = " (avg teleportFactor %s)" % ( 1 - stats.teleportFactorSum / stats.num_returned) else: teleports = "" print("Parsed %s vehicles, %s persons, %s flows and kept %s routes%s" % (stats.num_vehicles, stats.num_persons, stats.num_flows, stats.num_returned, teleports)) if stats.too_short > 0: msg = "Discarded %s routes because they have less than %s edges" % ( stats.too_short, options.min_length) if options.min_air_dist > 0: msg += " or the air-line distance between start and end is less than %s" % options.min_air_dist print(msg) print("Number of disconnected routes: %s." % stats.multiAffectedRoutes) if options.missing_edges > 0: print("Most frequent missing edges:") counts = sorted([(v, k) for k, v in stats.missingEdgeOccurences.items()], reverse=True) for count, edge in itertools.islice(counts, options.missing_edges): print(count, edge)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: output_type = 'trips' writer = write_trip else: output_type = 'routes' writer = write_route busStopEdges = {} if options.stops_output: busStops = codecs.open(options.stops_output, 'w', encoding='utf8') busStops.write( '<additional xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/additional_file.xsd">\n' ) if options.additional_input: num_busstops = 0 kept_busstops = 0 num_taz = 0 kept_taz = 0 for busStop in parse(options.additional_input, ('busStop', 'trainStop')): num_busstops += 1 edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: kept_busstops += 1 if busStop.access: busStop.access = [ acc for acc in busStop.access if acc.lane[:-2] in edges ] busStops.write(busStop.toXML(' ').decode('utf8')) for taz in parse(options.additional_input, 'taz'): num_taz += 1 taz_edges = [e for e in taz.edges.split() if e in edges] if taz_edges: taz.edges = " ".join(taz_edges) if options.stops_output: kept_taz += 1 busStops.write(taz.toXML(' ')) if num_busstops > 0 and num_taz > 0: print("Kept %s of %s busStops and %s of %s tazs" % (kept_busstops, num_busstops, kept_taz, num_taz)) elif num_busstops > 0: print("Kept %s of %s busStops" % (kept_busstops, num_busstops)) elif num_taz > 0: print("Kept %s of %s tazs" % (kept_taz, num_taz)) if options.stops_output: busStops.write('</additional>\n') busStops.close() def write_to_file(vehicles, f): f.write( '<!-- generated with %s for %s from %s -->\n' % (os.path.basename(__file__), options.network, options.routeFiles)) f.write(( '<routes xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' + 'xsi:noNamespaceSchemaLocation="http://sumo.dlr.de/xsd/routes_file.xsd">\n' )) num_routeRefs = 0 num_vehicles = 0 for _, v in vehicles: if v.name == 'route': num_routeRefs += 1 else: num_vehicles += 1 writer(f, v) f.write('</routes>\n') if num_routeRefs > 0: print("Wrote %s standalone-routes and %s vehicles" % (num_routeRefs, num_vehicles)) else: print("Wrote %s %s" % (num_vehicles, output_type)) if options.big: # write output unsorted tmpname = options.output + ".unsorted" with codecs.open(tmpname, 'w', encoding='utf8') as f: write_to_file(cut_routes(edges, orig_net, options, busStopEdges), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges)) routes.sort(key=lambda v: v[0]) with codecs.open(options.output, 'w', encoding='utf8') as f: write_to_file(routes, f)
def main(): options = get_options() if options.verbose: print("parsing network from", options.network) net = readNet(options.network, withInternal=True) read = 0 routeInfos = {} # id-> RouteInfo skipped = set() for routeFile in options.routeFiles: if options.verbose: print("parsing routes from", routeFile) idx = 0 if options.standalone: for idx, route in enumerate(parse(routeFile, 'route')): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "routes read") addOrSkip(routeInfos, skipped, route.id, route, options.min_edges) else: if options.heterogeneous: for idx, vehicle in enumerate(parse(routeFile, 'vehicle')): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "vehicles read") addOrSkip(routeInfos, skipped, vehicle.id, vehicle.route[0], options.min_edges) else: prev = (None, None) for vehicle, route in parse_fast_nested(routeFile, 'vehicle', 'id', 'route', 'edges'): if prev[0] != vehicle.id: if options.verbose and idx > 0 and idx % 500000 == 0: print(idx, "vehicles read") if prev[0] is not None: addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges) prev = (vehicle.id, route) idx += 1 if prev[0] is not None: addOrSkip(routeInfos, skipped, prev[0], prev[1], options.min_edges) read += idx if options.verbose: print(read, "routes read", len(skipped), "short routes skipped") if options.verbose: print("calculating air distance and checking loops") for idx, ri in enumerate(routeInfos.values()): if options.verbose and idx > 0 and idx % 100000 == 0: print(idx, "routes checked") calcDistAndLoops(ri, net, options) prefix = os.path.commonprefix(options.routeFiles) duarouterOutput = prefix + '.rerouted.rou.xml' duarouterAltOutput = prefix + '.rerouted.rou.alt.xml' if os.path.exists(duarouterAltOutput) and options.reuse_routing: if options.verbose: print("reusing old duarouter file", duarouterAltOutput) else: if options.standalone: duarouterInput = prefix # generate suitable input file for duarouter duarouterInput += ".vehRoutes.xml" with open(duarouterInput, 'w') as outf: outf.write('<routes>\n') for rID, rInfo in routeInfos.items(): outf.write(' <vehicle id="%s" depart="0">\n' % rID) outf.write(' <route edges="%s"/>\n' % ' '.join(rInfo.edges)) outf.write(' </vehicle>\n') outf.write('</routes>\n') else: duarouterInput = ",".join(options.routeFiles) command = [sumolib.checkBinary('duarouter'), '-n', options.network, '-r', duarouterInput, '-o', duarouterOutput, '--no-step-log', '--routing-threads', str(options.threads), '--routing-algorithm', 'astar', '--aggregate-warnings', '1'] if options.verbose: command += ["-v"] if options.verbose: print("calling duarouter:", " ".join(command)) subprocess.call(command) for vehicle in parse(duarouterAltOutput, 'vehicle'): if vehicle.id in skipped: continue routeAlts = vehicle.routeDistribution[0].route if len(routeAlts) == 1: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 routeInfos[vehicle.id].shortest_path_distance = routeInfos[vehicle.id].length else: oldCosts = float(routeAlts[0].cost) newCosts = float(routeAlts[1].cost) assert(routeAlts[0].edges.split() == routeInfos[vehicle.id].edges) routeInfos[vehicle.id].shortest_path_distance = sumolib.route.getLength(net, routeAlts[1].edges.split()) if oldCosts <= newCosts: routeInfos[vehicle.id].detour = 0 routeInfos[vehicle.id].detourRatio = 1 if oldCosts < newCosts: sys.stderr.write(("Warning: fastest route for '%s' is slower than original route " + "(old=%s, new=%s). Check vehicle types\n") % ( vehicle.id, oldCosts, newCosts)) else: routeInfos[vehicle.id].detour = oldCosts - newCosts routeInfos[vehicle.id].detourRatio = oldCosts / newCosts implausible = [] allRoutesStats = Statistics("overall implausiblity") implausibleRoutesStats = Statistics("implausiblity above threshold") for rID in sorted(routeInfos.keys()): ri = routeInfos[rID] ri.implausibility = (options.airdist_ratio_factor * ri.airDistRatio + options.detour_factor * ri.detour + options.detour_ratio_factor * ri.detourRatio + max(0, options.min_dist / ri.shortest_path_distance - 1) + max(0, options.min_air_dist / ri.airDist - 1)) allRoutesStats.add(ri.implausibility, rID) if ri.implausibility > options.threshold or ri.edgeLoop or ri.nodeLoop: implausible.append((ri.implausibility, rID, ri)) implausibleRoutesStats.add(ri.implausibility, rID) # generate restrictions if options.restrictions_output is not None: with open(options.restrictions_output, 'w') as outf: for score, rID, ri in sorted(implausible): edges = ri.edges if options.odrestrictions and len(edges) > 2: edges = [edges[0], edges[-1]] outf.write("0 %s\n" % " ".join(edges)) # write xml output if options.xmlOutput is not None: with open(options.xmlOutput, 'w') as outf: sumolib.writeXMLHeader(outf, "$Id$", options=options) # noqa outf.write('<implausibleRoutes>\n') for score, rID, ri in sorted(implausible): edges = " ".join(ri.edges) outf.write(' <route id="%s" edges="%s" score="%s"/>\n' % ( rID, edges, score)) outf.write('</implausibleRoutes>\n') if options.ignore_routes is not None: numImplausible = len(implausible) ignored = set([r.strip() for r in open(options.ignore_routes)]) implausible = [r for r in implausible if r not in ignored] print("Loaded %s routes to ignore. Reducing implausible from %s to %s" % ( len(ignored), numImplausible, len(implausible))) # generate polygons polyOutput = prefix + '.implausible.add.xml' colorgen = Colorgen(("random", 1, 1)) with open(polyOutput, 'w') as outf: outf.write('<additional>\n') for score, rID, ri in sorted(implausible): generate_poly(options, net, rID, colorgen(), ri.edges, outf, score) outf.write('</additional>\n') sys.stdout.write('score\troute\t(airDistRatio, detourRatio, detour, shortestDist, airDist, edgeLoop, nodeLoop)\n') for score, rID, ri in sorted(implausible): # , ' '.join(ri.edges))) sys.stdout.write('%.7f\t%s\t%s\n' % (score, rID, (ri.airDistRatio, ri.detourRatio, ri.detour, ri.shortest_path_distance, ri.airDist, ri.edgeLoop, ri.nodeLoop))) print(allRoutesStats) print(implausibleRoutesStats)
def main(options): net = readNet(options.network) edges = set([e.getID() for e in net.getEdges()]) if options.orig_net is not None: orig_net = readNet(options.orig_net) else: orig_net = None print("Valid area contains %s edges" % len(edges)) if options.trips: writer = write_trip else: writer = write_route busStopEdges = {} if options.stops_output: busStops = io.open(options.stops_output, 'w', encoding="utf8") writeHeader(busStops, os.path.basename(__file__), 'additional') if options.additional_input: num_busstops = 0 kept_busstops = 0 num_taz = 0 kept_taz = 0 for busStop in parse(options.additional_input, ('busStop', 'trainStop')): num_busstops += 1 edge = busStop.lane[:-2] busStopEdges[busStop.id] = edge if options.stops_output and edge in edges: kept_busstops += 1 if busStop.access: busStop.access = [acc for acc in busStop.access if acc.lane[:-2] in edges] busStops.write(busStop.toXML(u' ')) for taz in parse(options.additional_input, 'taz'): num_taz += 1 taz_edges = [e for e in taz.edges.split() if e in edges] if taz_edges: taz.edges = " ".join(taz_edges) if options.stops_output: kept_taz += 1 busStops.write(taz.toXML(u' ')) if num_busstops > 0 and num_taz > 0: print("Kept %s of %s busStops and %s of %s tazs" % ( kept_busstops, num_busstops, kept_taz, num_taz)) elif num_busstops > 0: print("Kept %s of %s busStops" % (kept_busstops, num_busstops)) elif num_taz > 0: print("Kept %s of %s tazs" % (kept_taz, num_taz)) if options.stops_output: busStops.write(u'</additional>\n') busStops.close() def write_to_file(vehicles, f): writeHeader(f, os.path.basename(__file__), 'routes') numRefs = defaultdict(int) for _, v in vehicles: if options.trips and v.name == "vehicle": numRefs["trip"] += 1 else: numRefs[v.name] += 1 if v.name == "vType": f.write(v.toXML(u' ')) else: writer(f, v) f.write(u'</routes>\n') if numRefs: print("Wrote", ", ".join(["%s %ss" % (k[1], k[0]) for k in sorted(numRefs.items())])) else: print("Wrote nothing") startEndEdgeMap = {} if options.pt_input: allRouteFiles = options.routeFiles options.routeFiles = [options.pt_input] startEndRouteEdge = {} with io.open(options.pt_output if options.pt_output else options.pt_input + ".cut", 'w', encoding="utf8") as f: writeHeader(f, os.path.basename(__file__), 'routes') for _, v in cut_routes(edges, orig_net, options, busStopEdges): f.write(v.toXML(u' ')) if v.name == "route": routeEdges = v.edges.split() startEndRouteEdge[v.id] = (routeEdges[0], routeEdges[-1]) elif isinstance(v.route, list): routeEdges = v.route[0].edges.split() startEndEdgeMap[v.line] = (routeEdges[0], routeEdges[-1]) elif v.route is not None: startEndEdgeMap[v.line] = startEndRouteEdge[v.route] f.write(u'</routes>\n') options.routeFiles = allRouteFiles if options.big: # write output unsorted tmpname = options.output + ".unsorted" with io.open(tmpname, 'w', encoding="utf8") as f: write_to_file(cut_routes(edges, orig_net, options, busStopEdges, startEndEdgeMap), f) # sort out of memory sort_routes.main([tmpname, '--big', '--outfile', options.output]) else: routes = list(cut_routes(edges, orig_net, options, busStopEdges, startEndEdgeMap)) routes.sort(key=lambda v: v[0]) with io.open(options.output, 'w', encoding="utf8") as f: write_to_file(routes, f)
def main(options): nan = float("nan") columns = [ 'vehID', 'tripId', # tripId of current stop or set by earlier stop 'stopID', # busStop id or lane,pos 'priorStop', # busStop id or lane,pos 'arrival', # route-input 'until', # route-input ] columns2 = columns[:3] + [ 'started', # stop-output 'ended', # stop-input ] stops = [] tripIds = dict() # vehID -> lastTripId priorStops = dict() # vehID -> lastStopID for vehicle in parse(options.routeFile, ['vehicle', 'trip'], heterogeneous=True, attr_conversions=ATTR_CONVERSIONS): if vehicle.stop is not None: for stop in vehicle.stop: vehID = vehicle.id tripId = stop.getAttributeSecure("tripId", tripIds.get(vehID)) tripIds[vehID] = tripId stopID = getStopID(stop) priorStop = priorStops.get(vehID) priorStops[vehID] = stopID stops.append((vehID, tripId, stopID, priorStop, stop.getAttributeSecure("arrival", nan), stop.getAttributeSecure("until", nan))) print("Parsed %s stops" % len(stops)) simStops = [] tripIds = dict() # vehID -> lastTripId priorStops = dict() # vehID -> lastStopID for stop in parse(options.stopFile, "stopinfo", heterogeneous=True, attr_conversions=ATTR_CONVERSIONS): vehID = stop.id tripId = stop.getAttributeSecure("tripId", tripIds.get(vehID)) tripIds[vehID] = tripId stopID = getStopID(stop) priorStop = priorStops.get(vehID) priorStops[vehID] = stopID simStops.append(( vehID, tripId, stopID, # priorStop, stop.getAttributeSecure("started", nan), stop.getAttributeSecure("ended", nan))) print("Parsed %s stopinfos" % len(simStops)) dfSchedule = pd.DataFrame.from_records(stops, columns=columns) dfSim = pd.DataFrame.from_records(simStops, columns=columns2) # merge on common columns vehID, tripId, stopID df = pd.merge( dfSchedule, dfSim, on=columns[:3], how="outer", #how="inner", ) print("Found %s matches" % len(df)) if options.verbose: #print(dfSchedule) #print(dfSim) print(df) print(departDelay(df))