def get_edge_noises_df(edge_dicts): edge_gdf_sub = nw.get_edge_gdf(edge_dicts, ['geometry', 'length', 'uvkey']) # add noise split lines as list edge_gdf_sub['split_lines'] = [ geom_utils.get_split_lines_list(line_geom, noise_polys) for line_geom in edge_gdf_sub['geometry'] ] # explode new rows from split lines column split_lines = geom_utils.explode_lines_to_split_lines( edge_gdf_sub, 'uvkey') # join noises to split lines split_line_noises = exps.get_noise_attrs_to_split_lines( split_lines, noise_polys) # aggregate noises back to edges edge_noises = exps.aggregate_line_noises(split_line_noises, 'uvkey') return edge_noises
def test_add_exposures_to_edges(): graph_proj = files.get_network_kumpula() edge_dicts = nw.get_all_edge_dicts(graph_proj) edge_gdf = nw.get_edge_gdf(graph_proj, attrs=['geometry', 'length', 'uvkey'], subset=5) edge_gdf['split_lines'] = [ geom_utils.get_split_lines_list(line_geom, noise_polys) for line_geom in edge_gdf['geometry'] ] split_lines = geom_utils.explode_lines_to_split_lines(edge_gdf, 'uvkey') split_line_noises = exps.get_noise_attrs_to_split_lines( split_lines, noise_polys) edge_noises = exps.aggregate_line_noises(split_line_noises, 'uvkey') nw.update_edge_noises_to_graph(edge_noises, graph_proj) edge_dicts = nw.get_all_edge_dicts(graph_proj) edge_d = edge_dicts[0] print(edge_d) exp_len_sum = sum(edge_d['noises'].values()) assert (edge_d['noises'], round(exp_len_sum, 1)) == ({ 65: 107.025, 70: 20.027 }, round(edge_d['length'], 1))
from shapely.geometry import Point from multiprocessing import current_process, Pool import utils.DT_API as DT_routing import utils.DT_utils as DT_utils import utils.geometry as geom_utils import utils.times as times import utils.files as files import utils.utils as utils import utils.commutes as commutes_utils import utils.networks as nw import ast #%% read graph graph = files.get_network_full_noise() print('Graph of', graph.size(), 'edges read.') edge_gdf = nw.get_edge_gdf(graph, attrs=['geometry', 'length', 'noises']) node_gdf = nw.get_node_gdf(graph) print('Network features extracted.') edge_gdf = edge_gdf[['uvkey', 'geometry', 'noises']] edges_sind = edge_gdf.sindex nodes_sind = node_gdf.sindex print('Spatial index built.') #%% read YKR work commute data commutes = pd.read_csv('data_ykr/T06_tma_e_TOL2008_2016_hel.csv') # commutes['axyind'] = [int(xyind) for xyind in commutes['axyind']] # commutes['txyind'] = [int(xyind) for xyind in commutes['txyind']] # commutes = commutes.loc[commutes['akunta'] == 91] # commutes = commutes.loc[commutes['sp'] == 0] # commutes.to_csv('data_ykr/T06_tma_e_TOL2008_2016_hel.csv') commutes['geom_home'] = commutes.apply(lambda row: Point(row['ax'], row['ay']),
graph, { edge_d['uvkey']: { 'geometry': edge_d['geometry'], 'length': edge_d['length'] } }) utils.print_duration(start_time, 'Missing edge geometries added.', round_n=1) #%% 3.1 Remove unwalkable streets & tunnels from the graph [query graph for filtering] print('Query unwalkable network...') graph_filt = nw.get_unwalkable_network(extent_poly_wgs=aoi_poly) filt_edge_dicts = nw.get_all_edge_dicts(graph_filt, by_nodes=False) nw.add_missing_edge_geometries(graph_filt, filt_edge_dicts) #%% 3.2 Remove unwalkable streets & tunnels from the graph [prepare networks for comparison] filt_edge_gdf = nw.get_edge_gdf(graph_filt, by_nodes=True) # add osmid as string to unwalkable (filter) edge gdfs filt_edge_gdf['osmid_str'] = [ nw.osmid_to_string(osmid) for osmid in filt_edge_gdf['osmid'] ] print('Found', len(filt_edge_gdf), 'unwalkable edges within the extent.') ## save tunnel edge gdf to file filt_edges_file = filt_edge_gdf.drop( ['oneway', 'access', 'osmid', 'uvkey', 'service', 'junction', 'lanes'], axis=1, errors='ignore') filt_edges_filename = graph_name + '_tunnel_edges' filt_edges_file.to_file('data/networks.gpkg', layer=filt_edges_filename, driver="GPKG") print('exported', filt_edges_filename, 'to data/networks.gpkg')