def main(options): if options.verbose: print('Loading net') net = sumolib.net.readNet(options.network) if options.osm_routes: # Import PT from GTFS and OSM routes if not options.bbox: BBoxXY = net.getBBoxXY() BBoxLonLat = (net.convertXY2LonLat(BBoxXY[0][0], BBoxXY[0][1]), net.convertXY2LonLat(BBoxXY[1][0], BBoxXY[1][1])) options.bbox = (BBoxLonLat[0][0], BBoxLonLat[0][1], BBoxLonLat[1][0], BBoxLonLat[1][1]) else: options.bbox = [float(coord) for coord in options.bbox.split(",")] gtfsZip = zipfile.ZipFile(sumolib.open(options.gtfs, False)) (routes, trips_on_day, shapes, stops, stop_times) = gtfs2osm.import_gtfs(options, gtfsZip) (gtfs_data, trip_list, filtered_stops, shapes, shapes_dict) = gtfs2osm.filter_gtfs(options, routes, trips_on_day, shapes, stops, stop_times) osm_routes = gtfs2osm.import_osm(options, net) (mapped_routes, mapped_stops, missing_stops, missing_lines) = gtfs2osm.map_gtfs_osm( options, net, osm_routes, # noqa gtfs_data, shapes, # noqa shapes_dict, filtered_stops) # noqa gtfs2osm.write_gtfs_osm_outputs(options, mapped_routes, mapped_stops, missing_stops, missing_lines, gtfs_data, trip_list, shapes_dict, net) else: # Import PT from GTFS if not options.skip_fcd: gtfs2fcd.main(options) edgeMap, typedNets = splitNet(options) if os.path.exists(options.mapperlib): if not options.skip_map: mapFCD(options, typedNets) routes = defaultdict(lambda: []) for o in glob.glob(os.path.join(options.map_output, "*.dat")): for line in open(o): time, edge, speed, coverage, id, minute_of_week = line.split( '\t')[:6] routes[id].append(edge) else: if options.mapperlib != "tracemapper": print( "Warning! No mapping library found, falling back to tracemapper." ) routes = traceMap(options, typedNets) if options.poly_output: generate_polygons(net, routes, options.poly_output) with open(options.additional_output, 'w', encoding="utf8") as rout: sumolib.xml.writeHeader(rout, os.path.basename(__file__), "additional") stops = map_stops(options, net, routes, rout) for vehID, edges in routes.items(): if edges: rout.write(' <route id="%s" edges="%s">\n' % (vehID, " ".join([edgeMap[e] for e in edges]))) offset = None for stop in stops[vehID]: if offset is None: offset = stop[1] rout.write( ' <stop busStop="%s" duration="%s" until="%s"/>\n' % (stop[0], options.duration, stop[1] - offset)) rout.write(' </route>\n') else: print("Warning! Empty route", vehID) rout.write('</additional>\n') filter_trips(options, routes, stops, options.route_output, options.begin, options.end)
def main(options): if options.verbose: print('Loading GTFS data "%s"' % options.gtfs) gtfsZip = zipfile.ZipFile(sumolib.open(options.gtfs, False)) routes = pd.read_csv(gtfsZip.open('routes.txt'), dtype=str) stops = pd.read_csv(gtfsZip.open('stops.txt'), dtype=str) stop_times = pd.read_csv(gtfsZip.open('stop_times.txt'), converters={'trip_id': str, 'arrival_time': time2sec, 'departure_time': time2sec, 'stop_id': str, 'stop_sequence': int}) trips = pd.read_csv(gtfsZip.open('trips.txt'), dtype=str) calendar = pd.read_csv(gtfsZip.open('calendar.txt'), dtype=str) calendar_dates = pd.read_csv(gtfsZip.open('calendar_dates.txt'), dtype=str) # currently not used: # agency = pd.read_csv(gtfsZip.open('agency.txt'), dtype=str) # transfers = pd.read_csv(gtfsZip.open('transfers.txt'), dtype=str) # Merging the tables weekday = 'monday tuesday wednesday thursday friday saturday sunday'.split( )[datetime.datetime.strptime(options.date, "%Y%m%d").weekday()] removed = calendar_dates[(calendar_dates.date == options.date) & (calendar_dates.exception_type == '2')] services = calendar[(calendar.start_date <= options.date) & (calendar.end_date >= options.date) & (calendar[weekday] == '1') & (~calendar.service_id.isin(removed.service_id))] added = calendar_dates[(calendar_dates.date == options.date) & (calendar_dates.exception_type == '1')] trips_on_day = trips[trips.service_id.isin(services.service_id) | trips.service_id.isin(added.service_id)] if 'fare_stops.txt' in gtfsZip.namelist(): zones = pd.read_csv(gtfsZip.open('fare_stops.txt'), dtype=str) stops_merged = pd.merge(pd.merge(stops, stop_times, on='stop_id'), zones, on='stop_id') else: stops_merged = pd.merge(stops, stop_times, on='stop_id') stops_merged['fare_zone'] = '' stops_merged['fare_token'] = '' stops_merged['start_char'] = '' trips_routes_merged = pd.merge(trips_on_day, routes, on='route_id') full_data_merged = pd.merge(stops_merged, trips_routes_merged, on='trip_id')[['trip_id', 'route_id', 'route_short_name', 'route_type', 'stop_id', 'stop_name', 'stop_lat', 'stop_lon', 'stop_sequence', 'fare_zone', 'fare_token', 'start_char', 'arrival_time', 'departure_time']].drop_duplicates() gtfs_modes = { # modes according to https://developers.google.com/transit/gtfs/reference/#routestxt '0': 'tram', '1': 'subway', '2': 'rail', '3': 'bus', '4': 'ship', # '5': 'cableTram', # '6': 'aerialLift', # '7': 'funicular', # modes used in Berlin and MDV see https://developers.google.com/transit/gtfs/reference/extended-route-types '100': 'rail', # DB '109': 'light_rail', # S-Bahn '400': 'subway', # U-Bahn '1000': 'ship', # Faehre # additional modes used in Hamburg '402': 'subway', # U-Bahn '1200': 'ship', # Faehre # modes used by hafas 's': 'light_rail', 'RE': 'rail', 'RB': 'rail', 'IXB': 'rail', # tbd 'ICE': 'rail_electric', 'IC': 'rail_electric', 'IRX': 'rail', # tbd 'EC': 'rail', 'NJ': 'rail', # tbd 'RHI': 'rail', # tbd 'DPN': 'rail', # tbd 'SCH': 'rail', # tbd 'Bsv': 'rail', # tbd 'KAT': 'rail', # tbd 'AIR': 'rail', # tbd 'DPS': 'rail', # tbd 'lt': 'light_rail', # tbd 'BUS': 'bus', # tbd 'Str': 'tram', # tbd 'DPF': 'rail', # tbd } # bus and tram modes from https://developers.google.com/transit/gtfs/reference/extended-route-types for i in range(700, 717): gtfs_modes[str(i)] = 'bus' for i in range(900, 907): gtfs_modes[str(i)] = 'tram' fcdFile = {} tripFile = {} if not os.path.exists(options.fcd): os.makedirs(options.fcd) seenModes = set() modes = set(options.modes.split(",") if options.modes else gtfs_modes.values()) for mode in modes: filePrefix = os.path.join(options.fcd, mode) fcdFile[mode] = io.open(filePrefix + '.fcd.xml', 'w', encoding="utf8") sumolib.writeXMLHeader(fcdFile[mode], "gtfs2fcd.py") fcdFile[mode].write(u'<fcd-export>\n') if options.verbose: print('Writing fcd file "%s"' % fcdFile[mode].name) tripFile[mode] = io.open(filePrefix + '.rou.xml', 'w') tripFile[mode].write(u"<routes>\n") timeIndex = 0 for _, trip_data in full_data_merged.groupby(['route_id']): seqs = {} for trip_id, data in trip_data.groupby(['trip_id']): stopSeq = [] buf = u"" offset = 0 firstDep = None for __, d in data.sort_values(by=['stop_sequence']).iterrows(): arrivalSec = d.arrival_time + timeIndex stopSeq.append(d.stop_id) departureSec = d.departure_time + timeIndex until = 0 if firstDep is None else departureSec - timeIndex - firstDep buf += ((u' <timestep time="%s"><vehicle id="%s" x="%s" y="%s" until="%s" ' + u'name=%s fareZone="%s" fareSymbol="%s" startFare="%s" speed="20"/></timestep>\n') % (arrivalSec - offset, trip_id, d.stop_lon, d.stop_lat, until, sumolib.xml.quoteattr(d.stop_name), d.fare_zone, d.fare_token, d.start_char)) if firstDep is None: firstDep = departureSec - timeIndex offset += departureSec - arrivalSec mode = gtfs_modes[d.route_type] if mode in modes: s = tuple(stopSeq) if s not in seqs: seqs[s] = trip_id fcdFile[mode].write(buf) timeIndex = arrivalSec tripFile[mode].write(u' <vehicle id="%s" route="%s" type="%s" depart="%s" line="%s_%s"/>\n' % (trip_id, seqs[s], mode, firstDep, d.route_short_name, seqs[s])) seenModes.add(mode) if options.gpsdat: if not os.path.exists(options.gpsdat): os.makedirs(options.gpsdat) for mode in modes: fcdFile[mode].write(u'</fcd-export>\n') fcdFile[mode].close() tripFile[mode].write(u"</routes>\n") tripFile[mode].close() if mode in seenModes: traceExporter.main(['', '--base-date', '0', '-i', fcdFile[mode].name, '--gpsdat-output', os.path.join(options.gpsdat, "gpsdat_%s.csv" % mode)]) else: os.remove(fcdFile[mode].name) os.remove(tripFile[mode].name) if options.vtype_output: with io.open(options.vtype_output, 'w', encoding="utf8") as vout: sumolib.xml.writeHeader(vout, root="additional") for mode in sorted(seenModes): vout.write(u' <vType id="%s" vClass="%s"/>\n' % (mode, "rail_urban" if mode in ("light_rail", "subway") else mode)) vout.write(u'</additional>\n')
def main(options): gtfsZip = zipfile.ZipFile(sumolib.open(options.gtfs, False)) routes, trips_on_day, shapes, stops, stop_times = gtfs2osm.import_gtfs( options, gtfsZip) stop_times['arrival_time'] = stop_times['arrival_time'].map(time2sec) stop_times['departure_time'] = stop_times['departure_time'].map(time2sec) if 'fare_stops.txt' in gtfsZip.namelist(): zones = pd.read_csv(gtfsZip.open('fare_stops.txt'), dtype=str) stops_merged = pd.merge(pd.merge(stops, stop_times, on='stop_id'), zones, on='stop_id') else: stops_merged = pd.merge(stops, stop_times, on='stop_id') stops_merged['fare_zone'] = '' stops_merged['fare_token'] = '' stops_merged['start_char'] = '' trips_routes_merged = pd.merge(trips_on_day, routes, on='route_id') full_data_merged = pd.merge(stops_merged, trips_routes_merged, on='trip_id')[[ 'trip_id', 'route_id', 'route_short_name', 'route_type', 'stop_id', 'stop_name', 'stop_lat', 'stop_lon', 'stop_sequence', 'fare_zone', 'fare_token', 'start_char', 'arrival_time', 'departure_time' ]].drop_duplicates() fcdFile = {} tripFile = {} if not os.path.exists(options.fcd): os.makedirs(options.fcd) seenModes = set() modes = set( options.modes.split(",") if options.modes else gtfs2osm.GTFS2OSM_MODES. values()) for mode in modes: filePrefix = os.path.join(options.fcd, mode) fcdFile[mode] = io.open(filePrefix + '.fcd.xml', 'w', encoding="utf8") sumolib.writeXMLHeader(fcdFile[mode], "gtfs2fcd.py") fcdFile[mode].write(u'<fcd-export>\n') if options.verbose: print('Writing fcd file "%s"' % fcdFile[mode].name) tripFile[mode] = io.open(filePrefix + '.rou.xml', 'w') tripFile[mode].write(u"<routes>\n") timeIndex = 0 for _, trip_data in full_data_merged.groupby(['route_id']): seqs = {} for trip_id, data in trip_data.groupby(['trip_id']): stopSeq = [] buf = u"" offset = 0 firstDep = None for __, d in data.sort_values(by=['stop_sequence']).iterrows(): arrivalSec = d.arrival_time + timeIndex stopSeq.append(d.stop_id) departureSec = d.departure_time + timeIndex until = 0 if firstDep is None else departureSec - timeIndex - firstDep buf += (( u' <timestep time="%s"><vehicle id="%s" x="%s" y="%s" until="%s" ' + u'name=%s fareZone="%s" fareSymbol="%s" startFare="%s" speed="20"/></timestep>\n' ) % (arrivalSec - offset, trip_id, d.stop_lon, d.stop_lat, until, sumolib.xml.quoteattr(d.stop_name), d.fare_zone, d.fare_token, d.start_char)) if firstDep is None: firstDep = departureSec - timeIndex offset += departureSec - arrivalSec mode = gtfs2osm.GTFS2OSM_MODES[d.route_type] if mode in modes: s = tuple(stopSeq) if s not in seqs: seqs[s] = trip_id fcdFile[mode].write(buf) timeIndex = arrivalSec tripFile[mode].write( u' <vehicle id="%s" route="%s" type="%s" depart="%s" line="%s_%s"/>\n' % (trip_id, seqs[s], mode, firstDep, d.route_short_name, seqs[s])) seenModes.add(mode) if options.gpsdat: if not os.path.exists(options.gpsdat): os.makedirs(options.gpsdat) for mode in modes: fcdFile[mode].write(u'</fcd-export>\n') fcdFile[mode].close() tripFile[mode].write(u"</routes>\n") tripFile[mode].close() if mode in seenModes: traceExporter.main([ '', '--base-date', '0', '-i', fcdFile[mode].name, '--gpsdat-output', os.path.join(options.gpsdat, "gpsdat_%s.csv" % mode) ]) else: os.remove(fcdFile[mode].name) os.remove(tripFile[mode].name) if options.vtype_output: with io.open(options.vtype_output, 'w', encoding="utf8") as vout: sumolib.xml.writeHeader(vout, root="additional") for mode in sorted(seenModes): vout.write(u' <vType id="%s" vClass="%s"/>\n' % (mode, gtfs2osm.OSM2SUMO_MODES[mode])) vout.write(u'</additional>\n')