def find_exits_for_platform(center, nodes): exits = [] min_distance = None for n in nodes: d = distance(center, (n['lon'], n['lat'])) if not min_distance: min_distance = d * 2 / 3 elif d < min_distance: continue too_close = False for e in exits: d = distance((e['lon'], e['lat']), (n['lon'], n['lat'])) if d < min_distance: too_close = True break if not too_close: exits.append(n) return exits
def _is_cached_city_usable(self, city): """Check if cached stations still exist in osm data and not moved far away. """ city_cache_data = self.cache[city.name] for stoparea_id, cached_stoparea in city_cache_data['stops'].items(): station_id = cached_stoparea['osm_type'][0] + str( cached_stoparea['osm_id']) city_station = city.elements.get(station_id) if not city_station or not Station.is_station( city_station, city.modes): return False station_coords = el_center(city_station) cached_station_coords = tuple(cached_stoparea[coord] for coord in ('lon', 'lat')) displacement = distance(station_coords, cached_station_coords) if displacement > DISPLACEMENT_TOLERANCE: return False return True
def process(cities, transfers, cache_path): """cities - list of City instances; transfers - list of sets of StopArea.id; cache_path - path to json-file with good cities cache or None. """ def format_colour(c): return c[1:] if c else None def find_exits_for_platform(center, nodes): exits = [] min_distance = None for n in nodes: d = distance(center, (n['lon'], n['lat'])) if not min_distance: min_distance = d * 2 / 3 elif d < min_distance: continue too_close = False for e in exits: d = distance((e['lon'], e['lat']), (n['lon'], n['lat'])) if d < min_distance: too_close = True break if not too_close: exits.append(n) return exits cache = MapsmeCache(cache_path, cities) stop_areas = {} # stoparea el_id -> StopArea instance stops = {} # stoparea el_id -> stop jsonified data networks = [] good_cities = [c for c in cities if c.is_good()] platform_nodes = {} cache.provide_stops_and_networks(stops, networks) for city in good_cities: network = {'network': city.name, 'routes': [], 'agency_id': city.id} cache.initialize_good_city(city.name, network) for route in city: routes = { 'type': route.mode, 'ref': route.ref, 'name': route.name, 'colour': format_colour(route.colour), 'route_id': uid(route.id, 'r'), 'itineraries': [], } if route.infill: routes['casing'] = routes['colour'] routes['colour'] = format_colour(route.infill) for i, variant in enumerate(route): itin = [] for stop in variant: stop_areas[stop.stoparea.id] = stop.stoparea cache.link_stop_with_city(stop.stoparea.id, city.name) itin.append( [ uid(stop.stoparea.id), round(stop.distance / SPEED_ON_LINE), ] ) # Make exits from platform nodes, if we don't have proper exits if ( len(stop.stoparea.entrances) + len(stop.stoparea.exits) == 0 ): for pl in stop.stoparea.platforms: pl_el = city.elements[pl] if pl_el['type'] == 'node': pl_nodes = [pl_el] elif pl_el['type'] == 'way': pl_nodes = [ city.elements.get('n{}'.format(n)) for n in pl_el['nodes'] ] else: pl_nodes = [] for m in pl_el['members']: if m['type'] == 'way': if ( '{}{}'.format( m['type'][0], m['ref'] ) in city.elements ): pl_nodes.extend( [ city.elements.get( 'n{}'.format(n) ) for n in city.elements[ '{}{}'.format( m['type'][0], m['ref'], ) ]['nodes'] ] ) pl_nodes = [n for n in pl_nodes if n] platform_nodes[pl] = find_exits_for_platform( stop.stoparea.centers[pl], pl_nodes ) routes['itineraries'].append( { 'stops': itin, 'interval': round( (variant.interval or DEFAULT_INTERVAL) * 60 ), } ) network['routes'].append(routes) networks.append(network) for stop_id, stop in stop_areas.items(): st = { 'name': stop.name, 'int_name': stop.int_name, 'lat': stop.center[1], 'lon': stop.center[0], 'osm_type': OSM_TYPES[stop.station.id[0]][1], 'osm_id': int(stop.station.id[1:]), 'id': uid(stop.id), 'entrances': [], 'exits': [], } for e_l, k in ((stop.entrances, 'entrances'), (stop.exits, 'exits')): for e in e_l: if e[0] == 'n': st[k].append( { 'osm_type': 'node', 'osm_id': int(e[1:]), 'lon': stop.centers[e][0], 'lat': stop.centers[e][1], 'distance': ENTRANCE_PENALTY + round( distance(stop.centers[e], stop.center) / SPEED_TO_ENTRANCE ), } ) if len(stop.entrances) + len(stop.exits) == 0: if stop.platforms: for pl in stop.platforms: for n in platform_nodes[pl]: for k in ('entrances', 'exits'): st[k].append( { 'osm_type': n['type'], 'osm_id': n['id'], 'lon': n['lon'], 'lat': n['lat'], 'distance': ENTRANCE_PENALTY + round( distance( (n['lon'], n['lat']), stop.center ) / SPEED_TO_ENTRANCE ), } ) else: for k in ('entrances', 'exits'): st[k].append( { 'osm_type': OSM_TYPES[stop.station.id[0]][1], 'osm_id': int(stop.station.id[1:]), 'lon': stop.centers[stop.id][0], 'lat': stop.centers[stop.id][1], 'distance': 60, } ) stops[stop_id] = st cache.add_stop(stop_id, st) pairwise_transfers = ( {} ) # (stoparea1_uid, stoparea2_uid) -> time; uid1 < uid2 for t_set in transfers: t = list(t_set) for t_first in range(len(t) - 1): for t_second in range(t_first + 1, len(t)): stoparea1 = t[t_first] stoparea2 = t[t_second] if stoparea1.id in stops and stoparea2.id in stops: uid1 = uid(stoparea1.id) uid2 = uid(stoparea2.id) uid1, uid2 = sorted([uid1, uid2]) transfer_time = TRANSFER_PENALTY + round( distance(stoparea1.center, stoparea2.center) / SPEED_ON_TRANSFER ) pairwise_transfers[(uid1, uid2)] = transfer_time cache.add_transfer(uid1, uid2, transfer_time) cache.provide_transfers(pairwise_transfers) cache.save() pairwise_transfers = [ (stop1_uid, stop2_uid, transfer_time) for (stop1_uid, stop2_uid), transfer_time in pairwise_transfers.items() ] result = { 'stops': list(stops.values()), 'transfers': pairwise_transfers, 'networks': networks, } return result
def process(cities, transfers, filename, cache_path): """Generate all output and save to file. :param cities: List of City instances :param transfers: List of sets of StopArea.id :param filename: Path to file to save the result :param cache_path: Path to json-file with good cities cache or None. """ # TODO: make universal cache for all processors, and apply the cache to GTFS # Keys correspond GTFS file names gtfs_data = {key: [] for key in GTFS_COLUMNS.keys()} gtfs_data["calendar"].append( dict_to_row( { "service_id": "always", "monday": 1, "tuesday": 1, "wednesday": 1, "thursday": 1, "friday": 1, "saturday": 1, "sunday": 1, "start_date": "19700101", "end_date": "30000101", }, "calendar", )) all_stops = {} # stop (stop area center or station) el_id -> stop data good_cities = [c for c in cities if c.is_good] def add_stop_gtfs(route_stop, city): """Add stop to all_stops. If it's not a station, also add parent station if it has not been added yet. Return gtfs stop_id. """ # For the case a StopArea is derived solely from railway=station # object, we generate GTFS platform (stop), station and sometimes # an entrance from the same object, so use suffixes station_id = f"{route_stop.stoparea.id}_st" platform_id = f"{route_stop.stoparea.id}_plt" if station_id not in all_stops: station_name = route_stop.stoparea.station.name station_center = round_coords(route_stop.stoparea.center) station_gtfs = { "stop_id": station_id, "stop_code": station_id, "stop_name": station_name, "stop_lat": station_center[1], "stop_lon": station_center[0], "location_type": 1, # station in GTFS terms } all_stops[station_id] = station_gtfs platform_id = f"{route_stop.stoparea.id}_plt" platform_gtfs = { "stop_id": platform_id, "stop_code": platform_id, "stop_name": station_name, "stop_lat": station_center[1], "stop_lon": station_center[0], "location_type": 0, # stop/platform in GTFS terms "parent_station": station_id, } all_stops[platform_id] = platform_gtfs osm_entrance_ids = (route_stop.stoparea.entrances | route_stop.stoparea.exits) if not osm_entrance_ids: entrance_id = f"{route_stop.stoparea.id}_egress" entrance_gtfs = { "stop_id": entrance_id, "stop_code": entrance_id, "stop_name": station_name, "stop_lat": station_center[1], "stop_lon": station_center[0], "location_type": 2, "parent_station": station_id, } all_stops[entrance_id] = entrance_gtfs else: for osm_entrance_id in osm_entrance_ids: entrance = city.elements[osm_entrance_id] entrance_id = f"{osm_entrance_id}_{route_stop.stoparea.id}" entrance_name = entrance["tags"].get("name") if not entrance_name: entrance_name = station_name ref = entrance["tags"].get("ref") if ref: entrance_name += f" {ref}" center = el_center(entrance) center = round_coords(center) entrance_gtfs = { "stop_id": entrance_id, "stop_code": entrance_id, "stop_name": entrance_name, "stop_lat": center[1], "stop_lon": center[0], "location_type": 2, "parent_station": station_id, } all_stops[entrance_id] = entrance_gtfs return platform_id # agency, routes, trips, stop_times, frequencies, shapes for city in good_cities: agency = {"agency_id": city.id, "agency_name": city.name} gtfs_data["agency"].append(dict_to_row(agency, "agency")) for city_route in city: route = { "route_id": city_route.id, "agency_id": agency["agency_id"], "route_type": 12 if city_route.mode == "monorail" else 1, "route_short_name": city_route.ref, "route_long_name": city_route.name, "route_color": format_colour(city_route.colour), } gtfs_data["routes"].append(dict_to_row(route, "routes")) for variant in city_route: shape_id = variant.id[1:] # truncate leading 'r' trip = { "trip_id": variant.id, "route_id": route["route_id"], "service_id": "always", "shape_id": shape_id, } gtfs_data["trips"].append(dict_to_row(trip, "trips")) tracks = variant.get_extended_tracks() tracks = variant.get_truncated_tracks(tracks) for i, (lon, lat) in enumerate(tracks): lon, lat = round_coords((lon, lat)) gtfs_data["shapes"].append( dict_to_row( { "shape_id": shape_id, "trip_id": variant.id, "shape_pt_lat": lat, "shape_pt_lon": lon, "shape_pt_sequence": i, }, "shapes", )) start_time = variant.start_time or DEFAULT_TRIP_START_TIME end_time = variant.end_time or DEFAULT_TRIP_END_TIME if end_time <= start_time: end_time = (end_time[0] + 24, end_time[1]) start_time = f"{start_time[0]:02d}:{start_time[1]:02d}:00" end_time = f"{end_time[0]:02d}:{end_time[1]:02d}:00" gtfs_data["frequencies"].append( dict_to_row( { "trip_id": variant.id, "start_time": start_time, "end_time": end_time, "headway_secs": variant.interval or DEFAULT_INTERVAL, }, "frequencies", )) for stop_sequence, route_stop in enumerate(variant): gtfs_platform_id = add_stop_gtfs(route_stop, city) gtfs_data["stop_times"].append( dict_to_row( { "trip_id": variant.id, "stop_sequence": stop_sequence, "shape_dist_traveled": route_stop.distance, "stop_id": gtfs_platform_id, }, "stop_times", )) # stops gtfs_data["stops"].extend( map(lambda row: dict_to_row(row, "stops"), all_stops.values())) # transfers for stoparea_set in transfers: for stoparea1 in stoparea_set: for stoparea2 in stoparea_set: if stoparea1.id < stoparea2.id: transfer_time = TRANSFER_PENALTY + round( distance(stoparea1.center, stoparea2.center) / SPEED_ON_TRANSFER) for id1, id2 in ( (stoparea1.id, stoparea2.id), (stoparea2.id, stoparea1.id), ): gtfs_data["transfers"].append( dict_to_row( { "from_stop_id": f"{id1}_st", "to_stop_id": f"{id2}_st", "transfer_type": 0, "min_transfer_time": transfer_time, }, "transfers", )) make_gtfs(filename, gtfs_data)