Пример #1
0
def get_home_walk_gdf(axyind):
    start_time = time.time()
    work_rows = home_groups.get_group(axyind)
    home_walks_g = commutes_utils.get_home_work_walks(axyind=axyind,
                                                      work_rows=work_rows,
                                                      districts=districts_gdf,
                                                      datetime=datetime,
                                                      walk_speed=walk_speed,
                                                      subset=False,
                                                      logging=True,
                                                      graph=graph,
                                                      edge_gdf=edge_gdf,
                                                      node_gdf=node_gdf)
    if (not isinstance(home_walks_g, pd.DataFrame)):
        if (home_walks_g == None):
            print('No work destinations found for:', axyind, 'skipping...')
            return None
    error = commutes_utils.validate_home_stops(home_walks_g)
    if (error != None):
        print(error)
    # add column that tells if the stop geometry is outside of the extent of Helsinki
    home_walks_g['outside_hel'] = [
        outside_hel_extent(geom) for geom in home_walks_g['DT_dest_Point']
    ]
    home_walks_g_to_file = home_walks_g.drop(
        columns=['DT_geom', 'DT_dest_Point'])
    home_walks_g_to_file.to_csv(
        'outputs/YKR_commutes_output/home_stops/axyind_' + str(axyind) +
        '.csv')
    utils.print_duration(
        start_time,
        str(len(home_walks_g)) + ' home stops got for: ' + str(axyind) + '.\n')
    return home_walks_g
import utils.quiet_paths as qp
import utils.exposures as exps
import utils.utils as utils

#%% 1. Set graph extent, name and output folder
graph_name = 'hel-v3'
# graph_name = 'kumpula-v3'
out_dir = 'graphs'
# aoi_poly = files.get_koskela_kumpula_box()
aoi_poly = files.get_hel_poly(WGS84=True, buffer_m=1000)

#%% 2.1 Get undirected projected graph
print('\nGraph to construct:', graph_name)
start_time = time.time()
graph = nw.get_walkable_network(extent_poly_wgs=aoi_poly)
utils.print_duration(start_time, 'Graph acquired.', round_n=1)

#%% 2.2 Delete unnecessary edge attributes and get edges as dictionaries
nw.delete_unused_edge_attrs(
    graph, save_attrs=['uvkey', 'length', 'geometry', 'noises', 'osmid'])
edge_dicts = nw.get_all_edge_dicts(graph, attrs=['geometry'], by_nodes=False)
print('Got all edge dicts:', len(edge_dicts))

#%% 2.3 Add missing edge geometries to graph
start_time = time.time()


def get_edge_geoms(edge_dict):
    return nw.get_missing_edge_geometries(graph, edge_dict)

start_time = time.time()
nts = qp.get_noise_tolerances()
db_costs = qp.get_db_costs()
# graph = files.get_network_full_noise()
graph = files.get_network_kumpula_noise()
print('Graph of', graph.size(), 'edges read.')
edge_gdf = nw.get_edge_gdf(graph, attrs=['geometry', 'length', 'noises'])
node_gdf = nw.get_node_gdf(graph)
print('Network features extracted.')
nw.set_graph_noise_costs(graph, edge_gdf, db_costs=db_costs, nts=nts)
edge_gdf = edge_gdf[['uvkey', 'geometry', 'noises']]
print('Noise costs set.')
edges_sind = edge_gdf.sindex
nodes_sind = node_gdf.sindex
print('Spatial index built.')
utils.print_duration(start_time, 'Network initialized.')

def get_od_path_stats(graph, od_dict, logging=False):
    paths = get_short_quiet_paths(graph, od_dict['orig_latLon'], od_dict['dest_latLon'], logging=logging)
    sp = paths[paths['type'] == 'short']
    qp = paths[paths['type'] == 'quiet']
    sp_count = len(sp)
    qp_count = len(qp)
    sp_len = round(sp['total_length'].sum(), 1)
    qp_len_sum = round(qp['total_length'].sum(), 1)
    all_noises = exps.aggregate_exposures(list(paths['noises']))
    noise_total_len = round(exps.get_total_noises_len(all_noises), 1)
    stats = { 'sp_count': sp_count, 'qp_count': qp_count, 'sp_len': sp_len, 'qp_len_sum': qp_len_sum, 'noise_total_len': noise_total_len }
    return stats

#%% read test OD pairs
Пример #4
0
def get_short_quiet_paths(from_lat, from_lon, to_lat, to_lon):
    start_time = time.time()
    from_latLon = {'lat': float(from_lat), 'lon': float(from_lon)}
    to_latLon = {'lat': float(to_lat), 'lon': float(to_lon)}
    print('from:', from_latLon)
    print('to:', to_latLon)
    from_xy = geom_utils.get_xy_from_lat_lon(from_latLon)
    to_xy = geom_utils.get_xy_from_lat_lon(to_latLon)
    # find/create origin and destination nodes
    orig_node = rt.get_nearest_node(graph,
                                    from_xy,
                                    edge_gdf,
                                    node_gdf,
                                    nts=nts,
                                    db_costs=db_costs)
    dest_node = rt.get_nearest_node(graph,
                                    to_xy,
                                    edge_gdf,
                                    node_gdf,
                                    nts=nts,
                                    db_costs=db_costs,
                                    orig_node=orig_node)
    if (orig_node is None):
        print('could not find origin node at', from_latLon)
        return jsonify({'error': 'Origin not found'})
    if (dest_node is None):
        print('could not find destination node at', to_latLon)
        return jsonify({'error': 'Destination not found'})
    utils.print_duration(start_time, 'Origin & destination nodes set.')
    # get shortest path
    start_time = time.time()
    path_list = []
    shortest_path = rt.get_shortest_path(graph,
                                         orig_node['node'],
                                         dest_node['node'],
                                         weight='length')
    if (shortest_path is None):
        return jsonify({'error': 'Could not find paths'})
    path_geom_noises = nw.aggregate_path_geoms_attrs(graph,
                                                     shortest_path,
                                                     weight='length',
                                                     noises=True)
    path_list.append({
        **path_geom_noises,
        **{
            'id': 'short_p',
            'type': 'short',
            'nt': 0
        }
    })
    # get quiet paths to list
    for nt in nts:
        noise_cost_attr = 'nc_' + str(nt)
        shortest_path = rt.get_shortest_path(graph,
                                             orig_node['node'],
                                             dest_node['node'],
                                             weight=noise_cost_attr)
        path_geom_noises = nw.aggregate_path_geoms_attrs(
            graph, shortest_path, weight=noise_cost_attr, noises=True)
        path_list.append({
            **path_geom_noises,
            **{
                'id': 'q_' + str(nt),
                'type': 'quiet',
                'nt': nt
            }
        })
    utils.print_duration(start_time, 'Routing done.')
    start_time = time.time()
    # remove linking edges of the origin / destination nodes
    nw.remove_new_node_and_link_edges(graph, orig_node)
    nw.remove_new_node_and_link_edges(graph, dest_node)
    # collect quiet paths to gdf
    paths_gdf = gpd.GeoDataFrame(path_list, crs=from_epsg(3879))
    paths_gdf = paths_gdf.drop_duplicates(
        subset=['type', 'total_length']).sort_values(
            by=['type', 'total_length'], ascending=[False, True])
    # add exposures to noise levels higher than specified threshods (dBs)
    paths_gdf['th_noises'] = [
        exps.get_th_exposures(noises, [55, 60, 65, 70])
        for noises in paths_gdf['noises']
    ]
    # add percentages of cumulative distances of different noise levels
    paths_gdf['noise_pcts'] = paths_gdf.apply(
        lambda row: exps.get_noise_pcts(row['noises'], row['total_length']),
        axis=1)
    # calculate mean noise level
    paths_gdf['mdB'] = paths_gdf.apply(lambda row: exps.get_mean_noise_level(
        row['noises'], row['total_length']),
                                       axis=1)
    # calculate noise exposure index (same as noise cost but without noise tolerance coefficient)
    paths_gdf['nei'] = [
        round(exps.get_noise_cost(noises=noises, db_costs=db_costs), 1)
        for noises in paths_gdf['noises']
    ]
    paths_gdf['nei_norm'] = paths_gdf.apply(
        lambda row: exps.get_nei_norm(row.nei, row.total_length, db_costs),
        axis=1)
    # gdf to dicts
    path_dicts = qp.get_geojson_from_q_path_gdf(paths_gdf)
    # group paths with nearly identical geometries
    unique_paths = qp.remove_duplicate_geom_paths(path_dicts,
                                                  tolerance=30,
                                                  logging=False)
    # calculate exposure differences to shortest path
    path_comps = rt.get_short_quiet_paths_comparison_for_dicts(unique_paths)
    # return paths as GeoJSON (FeatureCollection)
    utils.print_duration(start_time, 'Processed paths.')
    return jsonify(path_comps)