def get_short_quiet_paths(graph, from_latLon, to_latLon, logging=False): from_xy = geom_utils.get_xy_from_lat_lon(from_latLon) to_xy = geom_utils.get_xy_from_lat_lon(to_latLon) # find origin and destination nodes from closest edges orig_node = rt.get_nearest_node(graph, from_xy, edge_gdf, node_gdf, nts=nts, db_costs=db_costs, logging=logging) dest_node = rt.get_nearest_node(graph, to_xy, edge_gdf, node_gdf, nts=nts, db_costs=db_costs, logging=logging, orig_node=orig_node) # utils.print_duration(start_time, 'Origin & destination nodes set.') # start_time = time.time() # get shortest path path_list = [] shortest_path = rt.get_shortest_path(graph, orig_node['node'], dest_node['node'], weight='length') path_geom_noises = nw.aggregate_path_geoms_attrs(graph, shortest_path, weight='length', noises=True) path_list.append({**path_geom_noises, **{'id': 'short_p','type': 'short', 'nt': 0}}) # get quiet paths to list for nt in nts: noise_cost_attr = 'nc_'+str(nt) shortest_path = rt.get_shortest_path(graph, orig_node['node'], dest_node['node'], weight=noise_cost_attr) path_geom_noises = nw.aggregate_path_geoms_attrs(graph, shortest_path, weight=noise_cost_attr, noises=True) path_list.append({**path_geom_noises, **{'id': 'q_'+str(nt), 'type': 'quiet', 'nt': nt}}) # remove linking edges of the origin / destination nodes nw.remove_new_node_and_link_edges(graph, orig_node) nw.remove_new_node_and_link_edges(graph, dest_node) # collect quiet paths to gdf paths_gdf = gpd.GeoDataFrame(path_list, crs=from_epsg(3879)) paths_gdf = paths_gdf.drop_duplicates(subset=['type', 'total_length']).sort_values(by=['type', 'total_length'], ascending=[False, True]) # add exposures to noise levels higher than specified threshods (dBs) paths_gdf['th_noises'] = [exps.get_th_exposures(noises, [55, 60, 65, 70]) for noises in paths_gdf['noises']] # add percentages of cumulative distances of different noise levels paths_gdf['noise_pcts'] = paths_gdf.apply(lambda row: exps.get_noise_pcts(row['noises'], row['total_length']), axis=1) # add noise exposure index (same as noise cost with noise tolerance: 1) paths_gdf['nei'] = [round(exps.get_noise_cost(noises=noises, db_costs=db_costs), 1) for noises in paths_gdf['noises']] paths_gdf['nei_norm'] = paths_gdf.apply(lambda row: exps.get_nei_norm(row.nei, row.total_length, db_costs), axis=1) return paths_gdf
def set_graph_noise_costs(graph, edge_gdf, db_costs=None, nts=None): edge_nc_gdf = edge_gdf.copy() for nt in nts: edge_nc_gdf['noise_cost'] = [ exps.get_noise_cost(noises=noises, db_costs=db_costs, nt=nt) for noises in edge_nc_gdf['noises'] ] edge_nc_gdf['tot_cost'] = edge_nc_gdf.apply( lambda row: round(row['length'] + row['noise_cost'], 2), axis=1) update_edge_costs_to_graph(edge_nc_gdf, graph, nt)
def get_short_quiet_paths(graph, from_latLon, to_latLon, edge_gdf, node_gdf, nts=[], db_costs={}, remove_geom_prop=False, only_short=False, logging=True): # get origin & destination nodes from_xy = geom_utils.get_xy_from_lat_lon(from_latLon) to_xy = geom_utils.get_xy_from_lat_lon(to_latLon) # find/create origin and destination nodes orig_node = get_nearest_node(graph, from_xy, edge_gdf, node_gdf, nts=nts, db_costs=db_costs) dest_node = get_nearest_node(graph, to_xy, edge_gdf, node_gdf, nts=nts, db_costs=db_costs, orig_node=orig_node) if (orig_node is None): print('could not find origin node at', from_latLon) return None if (dest_node is None): print('could not find destination node at', to_latLon) return None # get shortest path path_list = [] shortest_path = get_shortest_path(graph, orig_node['node'], dest_node['node'], weight='length') if (shortest_path is None): print('could not find shortest path') return None if (only_short == True): return shortest_path path_geom_noises = nw.aggregate_path_geoms_attrs(graph, shortest_path, weight='length', noises=True) path_list.append({**path_geom_noises, **{'id': 'short_p','type': 'short', 'nt': 0}}) # get quiet paths to list for nt in nts: noise_cost_attr = 'nc_'+str(nt) quiet_path = get_shortest_path(graph, orig_node['node'], dest_node['node'], weight=noise_cost_attr) path_geom_noises = nw.aggregate_path_geoms_attrs(graph, quiet_path, weight=noise_cost_attr, noises=True) path_list.append({**path_geom_noises, **{'id': 'q_'+str(nt), 'type': 'quiet', 'nt': nt}}) # remove linking edges of the origin / destination nodes nw.remove_new_node_and_link_edges(graph, orig_node) nw.remove_new_node_and_link_edges(graph, dest_node) # collect quiet paths to gdf paths_gdf = gpd.GeoDataFrame(path_list, crs=from_epsg(3879)) paths_gdf = paths_gdf.drop_duplicates(subset=['type', 'total_length']).sort_values(by=['type', 'total_length'], ascending=[False, True]) # add exposures to noise levels higher than specified threshods (dBs) paths_gdf['th_noises'] = [exps.get_th_exposures(noises, [55, 60, 65, 70]) for noises in paths_gdf['noises']] # add percentages of cumulative distances of different noise levels paths_gdf['noise_pcts'] = paths_gdf.apply(lambda row: exps.get_noise_pcts(row['noises'], row['total_length']), axis=1) # calculate mean noise level paths_gdf['mdB'] = paths_gdf.apply(lambda row: exps.get_mean_noise_level(row['noises'], row['total_length']), axis=1) # calculate noise exposure index (same as noise cost but without noise tolerance coefficient) paths_gdf['nei'] = [round(exps.get_noise_cost(noises=noises, db_costs=db_costs), 1) for noises in paths_gdf['noises']] paths_gdf['nei_norm'] = paths_gdf.apply(lambda row: exps.get_nei_norm(row.nei, row.total_length, db_costs), axis=1) # gdf to dicts path_dicts = qp.get_geojson_from_q_path_gdf(paths_gdf) # group paths with nearly identical geometries unique_paths = qp.remove_duplicate_geom_paths(path_dicts, tolerance=30, remove_geom_prop=remove_geom_prop, logging=False) # calculate exposure differences to shortest path path_comps = get_short_quiet_paths_comparison_for_dicts(unique_paths) # return paths as GeoJSON (FeatureCollection)... return { 'paths': path_comps, 'shortest_path': shortest_path, 'orig_offset': orig_node['offset'], 'dest_offset': dest_node['offset'] }
def get_edge_noise_cost_attrs(nts, db_costs, edge_d, link_geom): cost_attrs = {} # estimate link noises based on link length - edge length -ratio and edge noises cost_attrs['noises'] = interpolate_link_noises(link_geom, edge_d['geometry'], edge_d['noises']) # calculate noise tolerance specific noise costs for nt in nts: noise_cost = exps.get_noise_cost(noises=cost_attrs['noises'], db_costs=db_costs, nt=nt) cost_attrs['nc_' + str(nt)] = round(noise_cost + link_geom.length, 2) noises_sum_len = exps.get_total_noises_len(cost_attrs['noises']) if ((noises_sum_len - link_geom.length) > 0.1): print('link length unmatch:', noises_sum_len, link_geom.length) return cost_attrs
#%% merge edge utils to edge gdf print('edge utils rows:', len(edge_utils_df)) print('edge gdf rows:', len(edges_subset)) edge_utils_gdf = pd.merge(edges_subset, edge_utils_df, how='left', on='edge_id') print('merged rows:', len(edge_utils_gdf)) # edge_utils_gdf.head() #%% add noise indexes to edge utils gdf edge_utils_gdf['mdB'] = edge_utils_gdf.apply( lambda row: exps.get_mean_noise_level(row['noises'], row['length']), axis=1) edge_utils_gdf['nei'] = [ round(exps.get_noise_cost(noises=noises, db_costs=db_costs), 1) for noises in edge_utils_gdf['noises'] ] edge_utils_gdf['nei_norm'] = edge_utils_gdf.apply( lambda row: exps.get_nei_norm(row.nei, row.total_length, db_costs), axis=1) #%% export edges with noise & util attributes to file edge_utils_gdf_file = edge_utils_gdf.drop( columns=['uvkey', 'noises', 'edge_id']) # edge_utils_gdf_file = edge_utils_gdf_file.query('util > 0') edge_utils_gdf_file.to_file('outputs/YKR_commutes_output/edge_stats.gpkg', layer=edges_out_file, driver='GPKG') print('exported file:', edges_out_file) #### READ & ANALYSE STREET STATS ####