def create_sections_geojson_dict(G, start_time_ut=None, end_time_ut=None): multi_di_graph = combined_stop_to_stop_transit_network(G, start_time_ut=start_time_ut, end_time_ut=end_time_ut) stops = G.get_table("stops") stop_I_to_coords = {row.stop_I: [row.lon, row.lat] for row in stops.itertuples()} gjson = {"type": "FeatureCollection"} features = [] gjson["features"] = features data = list(multi_di_graph.edges(data=True)) data.sort(key=lambda el: ROUTE_TYPE_TO_ZORDER[el[2]['route_type']]) for from_stop_I, to_stop_I, data in data: feature = {"type": "Feature"} geometry = { "type": "LineString", 'coordinates': [stop_I_to_coords[from_stop_I], stop_I_to_coords[to_stop_I]] } feature['geometry'] = geometry route_I_counts = data['route_I_counts'] route_I_counts = {str(key): int(value) for key, value in route_I_counts.items()} data['route_I_counts'] = route_I_counts properties = data properties['from_stop_I'] = int(from_stop_I) properties['to_stop_I'] = int(to_stop_I) feature['properties'] = data features.append(feature) return gjson
def _write_stats(self): G = GTFS(self.day_db_path) net = combined_stop_to_stop_transit_network(G) sections = net.edges(data=True) n_links = len(sections) section_lengths = [] vehicle_kilometers_per_section = [] for from_I, to_I, data in sections: section_lengths.append(data['d']) vehicle_kilometers_per_section.append(data['n_vehicles'] * data['d'] / 1000.) stats = {"n_stops": len(G.stops(require_reference_in_stop_times=True)), "n_connections": len(G.get_transit_events()), "n_links": n_links, "network_length_m": sum(section_lengths), "link_distance_avg_m": int(sum(section_lengths) / len(section_lengths)), "vehicle_kilometers": sum(vehicle_kilometers_per_section), "buffer_center_lat": self.lat, "buffer_center_lon": self.lon, "buffer_radius_km": self.buffer_distance, "extract_start_date": self.get_weekly_extract_start_date().strftime("%Y-%m-%d") } self.__verify_stats(stats) df = pandas.DataFrame.from_dict({key:[value] for key, value in stats.items()}) df.to_csv(self.stats_fname, sep=";", columns=list(sorted(stats.keys())), index=False)
def write_combined_transit_stop_to_stop_network(gtfs, output_path, fmt=None): """ Parameters ---------- gtfs : gtfspy.GTFS output_path : str fmt: None, optional defaulting to "edg" and writing results as ".edg" files If "csv" csv files are produced instead """ if fmt is None: fmt = "edg" multi_di_graph = combined_stop_to_stop_transit_network(gtfs) _write_stop_to_stop_network_edges(multi_di_graph, output_path, fmt=fmt)
def test_combined_stop_to_stop_transit_network(self): multi_di_graph = networks.combined_stop_to_stop_transit_network( self.gtfs) self.assertIsInstance(multi_di_graph, networkx.MultiDiGraph) for from_node, to_node, data in multi_di_graph.edges(data=True): self.assertIn("route_type", data)
feeds = get_feeds_from_to_publish_tuple(city_data) pipeline = ExtractPipeline(city_data, feeds) try: day_G = GTFS(pipeline.day_db_path) trip_counts_per_day = day_G.get_trip_counts_per_day() print(trip_counts_per_day) assert len(trip_counts_per_day) <= 3 city_data_dict["Extract date"] = str(trip_counts_per_day.loc[ trip_counts_per_day['trip_counts'] == max( trip_counts_per_day['trip_counts'])].iloc[0]['date']) print(city_data_dict["Extract date"].replace(" 00:00:00", "")) city_data_dict["n_stops"] = len( day_G.stops(require_reference_in_stop_times=True)) city_data_dict["n_connections"] = len(day_G.get_transit_events()) n_links = len( combined_stop_to_stop_transit_network(day_G).edges(data=True)) city_data_dict["n_links"] = int(n_links) except FileNotFoundError as e: print("File " + pipeline.day_db_path + " was not found") city_data_dict["Extract date"] = "NaN" cities.append(city_data_dict) pickle.dump(cities, open(pickle_cache_file, 'wb'), -1) def spaces(x): try: num_as_str_reversed = str(int(x))[::-1] num_with_spaces = ',\\'.join( num_as_str_reversed[i:i + 3] for i in range(0, len(num_as_str_reversed), 3)) return num_with_spaces[::-1]
# get elementary bus events (connections) taking place within a given time interval: all_events = networks.temporal_network(g, start_time_ut=start_ut, end_time_ut=end_ut) print("Number of elementary PT events during rush hour in Kuopio: ", len(all_events)) # get elementary bus events (connections) taking place within a given time interval: tram_events = networks.temporal_network(g, start_time_ut=start_ut, end_time_ut=end_ut, route_type=route_types.TRAM) assert (len(tram_events) == 0 ) # there should be no trams in our example city (Kuopio, Finland) # construct a networkx graph print("\nConstructing a combined stop_to_stop_network") graph = networks.combined_stop_to_stop_transit_network(g, start_time_ut=start_ut, end_time_ut=end_ut) print("Number of edges: ", len(graph.edges())) print("Number of nodes: ", len(graph.nodes())) print("Example edge: ", list(graph.edges(data=True))[0]) print("Example node: ", list(graph.nodes(data=True))[0]) ################################################# # See also other functions in gtfspy.networks ! # #################################################