def test_network_saving_loading(): # save graph as shapefile and geopackage G = ox.graph_from_place(place1, network_type="drive") ox.save_graph_shapefile(G) ox.save_graph_geopackage(G) # save/load graph as graphml file ox.save_graphml(G, gephi=True) ox.save_graphml(G, gephi=False) filepath = os.path.join(ox.settings.data_folder, "graph.graphml") G = ox.load_graphml(filepath, node_type=str) # test osm xml output default_all_oneway = ox.settings.all_oneway ox.settings.all_oneway = True G = ox.graph_from_point(location_point, dist=500, network_type="drive") ox.save_graph_xml(G, merge_edges=False) # test osm xml output merge edges ox.save_graph_xml(G, merge_edges=True, edge_tag_aggs=[("length", "sum")]) # test osm xml output from gdfs nodes, edges = ox.graph_to_gdfs(G) ox.save_graph_xml([nodes, edges]) # test ordered nodes from way df = pd.DataFrame({ "u": [54, 2, 5, 3, 10, 19, 20], "v": [76, 3, 8, 10, 5, 20, 15] }) ordered_nodes = ox.io._get_unique_nodes_ordered_from_way(df) assert ordered_nodes == [2, 3, 10, 5, 8] ox.settings.all_oneway = default_all_oneway
def getChangchunTreet(): treet = ox.graph_from_place( ['南关区,长春,中国', '朝阳区,长春,中国', '二道区,长春,中国', '绿园区,长春,中国', '宽城区,长春,中国'], network_type='drive') treet = ox.project_graph(treet) ox.save_graph_shapefile(treet, filename='test2') ox.plot_graph(treet)
def test_network_saving_loading(): # save graph as shapefile and geopackage G = ox.graph_from_place(place1, network_type="drive") ox.save_graph_shapefile(G) ox.save_graph_geopackage(G) # save/load graph as graphml file ox.save_graphml(G, gephi=True) ox.save_graphml(G, gephi=False) filepath = os.path.join(ox.settings.data_folder, "graph.graphml") G2 = ox.load_graphml(filepath) # verify everything in G is equivalent in G2 for (n1, d1), (n2, d2) in zip(G.nodes(data=True), G2.nodes(data=True)): assert n1 == n2 assert d1 == d2 for (u1, v1, k1, d1), (u2, v2, k2, d2) in zip(G.edges(keys=True, data=True), G2.edges(keys=True, data=True)): assert u1 == u2 assert v1 == v2 assert k1 == k2 assert tuple(d1.keys()) == tuple(d2.keys()) assert tuple(d1.values()) == tuple(d2.values()) for (k1, v1), (k2, v2) in zip(G.graph.items(), G2.graph.items()): assert k1 == k2 assert v1 == v2 assert tuple(G.graph["streets_per_node"].keys()) == tuple( G2.graph["streets_per_node"].keys()) assert tuple(G.graph["streets_per_node"].values()) == tuple( G2.graph["streets_per_node"].values()) # test custom data types nd = {"osmid": str} ed = {"length": str, "osmid": float} G2 = ox.load_graphml(filepath, node_dtypes=nd, edge_dtypes=ed) # test osm xml output default_all_oneway = ox.settings.all_oneway ox.settings.all_oneway = True G = ox.graph_from_point(location_point, dist=500, network_type="drive") ox.save_graph_xml(G, merge_edges=False) # test osm xml output merge edges ox.save_graph_xml(G, merge_edges=True, edge_tag_aggs=[("length", "sum")]) # test osm xml output from gdfs nodes, edges = ox.graph_to_gdfs(G) ox.save_graph_xml([nodes, edges]) # test ordered nodes from way df = pd.DataFrame({ "u": [54, 2, 5, 3, 10, 19, 20], "v": [76, 3, 8, 10, 5, 20, 15] }) ordered_nodes = ox.io._get_unique_nodes_ordered_from_way(df) assert ordered_nodes == [2, 3, 10, 5, 8] ox.settings.all_oneway = default_all_oneway
def test_network_saving_loading(): # save/load graph as shapefile and graphml file G = ox.graph_from_place('Piedmont, California, USA') G_projected = ox.project_graph(G) ox.save_graph_shapefile(G_projected) ox.save_graphml(G_projected) ox.save_graphml(G_projected, filename='gephi.graphml', gephi=True) G2 = ox.load_graphml('graph.graphml') # convert graph to node/edge GeoDataFrames and back again gdf_edges = ox.graph_to_gdfs(G, nodes=False, edges=True, fill_edge_geometry=False) gdf_nodes, gdf_edges = ox.graph_to_gdfs(G, nodes=True, edges=True, node_geometry=True, fill_edge_geometry=True) G3 = ox.gdfs_to_graph(gdf_nodes, gdf_edges) # find graph nodes nearest to some set of points X = gdf_nodes['x'].head() Y = gdf_nodes['y'].head() nn1 = ox.get_nearest_nodes(G, X, Y) nn2 = ox.get_nearest_nodes(G, X, Y, method='kdtree') nn3 = ox.get_nearest_nodes(G, X, Y, method='balltree')
def test_network_saving_loading(): # save/load graph as shapefile and graphml file G = ox.graph_from_place('Piedmont, California, USA') G_projected = ox.project_graph(G) ox.save_graph_shapefile(G_projected) ox.save_graphml(G_projected) ox.save_graphml(G_projected, filename='gephi.graphml', gephi=True) G2 = ox.load_graphml('graph.graphml') G3 = ox.load_graphml('graph.graphml', node_type=str) # convert graph to node/edge GeoDataFrames and back again gdf_edges = ox.graph_to_gdfs(G, nodes=False, edges=True, fill_edge_geometry=False) gdf_nodes, gdf_edges = ox.graph_to_gdfs(G, nodes=True, edges=True, node_geometry=True, fill_edge_geometry=True) G4 = ox.gdfs_to_graph(gdf_nodes, gdf_edges) # find graph nodes nearest to some set of points X = gdf_nodes['x'].head() Y = gdf_nodes['y'].head() nn1 = ox.get_nearest_nodes(G, X, Y) nn2 = ox.get_nearest_nodes(G, X, Y, method='kdtree') nn3 = ox.get_nearest_nodes(G, X, Y, method='balltree') # find graph edges nearest to some set of points ne1 = ox.get_nearest_edges(G, X, Y) ne2 = ox.get_nearest_edges(G, X, Y, method='kdtree') ne3 = ox.get_nearest_edges(G, X, Y, method='kdtree', dist=50)
def get_data(G, district_name): # print(nx.info(G)) print(f'File Path = {PATH}') # save as graphml path = f'{PATH}/{district_name}.xml' ox.save_graphml(G, path) # save as .osm path = f'{PATH}/{district_name}.osm' ox.config(all_oneway=True) if not os.path.exists(path): ox.save_graph_xml(G, filepath=path) # save as folium html path = f'{PATH}/{district_name}_folium.html' if not os.path.exists(path): map_folium = ox.folium.plot_graph_folium(G) map_folium.save(path) # save as SVG path = f'{PATH}/{district_name}_image.svg' fig, ax = ox.plot_graph(G, show=False, save=True, close=True, filepath=path) # save graph as a shapefile and .csv path = f'{PATH}/{district_name}_shape' ox.save_graph_shapefile(G, filepath=path) make_adjacency_matrix(district_name) clean_csv(district_name) make_adjacency_required_matrix(district_name)
def test_graph_save_load(): # save graph as shapefile and geopackage G = ox.graph_from_place(place1, network_type="drive") ox.save_graph_shapefile(G) ox.save_graph_geopackage(G, directed=False) # save/load geopackage and convert graph to/from node/edge GeoDataFrames fp = ".temp/data/graph-dir.gpkg" ox.save_graph_geopackage(G, filepath=fp, directed=True) gdf_nodes1 = gpd.read_file(fp, layer="nodes").set_index("osmid") gdf_edges1 = gpd.read_file(fp, layer="edges").set_index(["u", "v", "key"]) G2 = ox.graph_from_gdfs(gdf_nodes1, gdf_edges1) G2 = ox.graph_from_gdfs(gdf_nodes1, gdf_edges1, graph_attrs=G.graph) gdf_nodes2, gdf_edges2 = ox.graph_to_gdfs(G2) assert set(gdf_nodes1.index) == set(gdf_nodes2.index) == set(G.nodes) == set(G2.nodes) assert set(gdf_edges1.index) == set(gdf_edges2.index) == set(G.edges) == set(G2.edges) # create random boolean graph/node/edge attributes attr_name = "test_bool" G.graph[attr_name] = False bools = np.random.randint(0, 2, len(G.nodes)) node_attrs = {n: bool(b) for n, b in zip(G.nodes, bools)} nx.set_node_attributes(G, node_attrs, attr_name) bools = np.random.randint(0, 2, len(G.edges)) edge_attrs = {n: bool(b) for n, b in zip(G.edges, bools)} nx.set_edge_attributes(G, edge_attrs, attr_name) # save/load graph as graphml file ox.save_graphml(G, gephi=True) ox.save_graphml(G, gephi=False) filepath = Path(ox.settings.data_folder) / "graph.graphml" G2 = ox.load_graphml( filepath, graph_dtypes={attr_name: ox.io._convert_bool_string}, node_dtypes={attr_name: ox.io._convert_bool_string}, edge_dtypes={attr_name: ox.io._convert_bool_string}, ) # verify everything in G is equivalent in G2 assert tuple(G.graph.keys()) == tuple(G2.graph.keys()) assert tuple(G.graph.values()) == tuple(G2.graph.values()) z = zip(G.nodes(data=True), G2.nodes(data=True)) for (n1, d1), (n2, d2) in z: assert n1 == n2 assert tuple(d1.keys()) == tuple(d2.keys()) assert tuple(d1.values()) == tuple(d2.values()) z = zip(G.edges(keys=True, data=True), G2.edges(keys=True, data=True)) for (u1, v1, k1, d1), (u2, v2, k2, d2) in z: assert u1 == u2 assert v1 == v2 assert k1 == k2 assert tuple(d1.keys()) == tuple(d2.keys()) assert tuple(d1.values()) == tuple(d2.values()) # test custom data types nd = {"osmid": str} ed = {"length": str, "osmid": float} G2 = ox.load_graphml(filepath, node_dtypes=nd, edge_dtypes=ed)
def wkt_to_graph(wkt_list, im_file, conf, out_graph_file): min_subgraph_length_pix = 300 verbose = False super_verbose = False make_plots = False save_shapefiles = False pickle_protocol = 4 if (len(wkt_list) == 0) or (wkt_list[0] == 'LINESTRING EMPTY'): return None try: G = wkt_to_G(wkt_list, im_file=im_file, min_subgraph_length_pix=min_subgraph_length_pix, verbose=super_verbose) if len(G.nodes()) == 0: return None except Exception as e: print('Exception in wkt_to_G: {}, {}'.format(str(e), out_graph_file)) return None node = list(G.nodes())[-1] if verbose: print(node, 'random node props:', G.nodes[node]) # print an edge edge_tmp = list(G.edges())[-1] if verbose: print(edge_tmp, "random edge props:", G.edges([edge_tmp[0], edge_tmp[1]])) #G.edge[edge_tmp[0]][edge_tmp[1]]) nx.write_gpickle(G, out_graph_file, protocol=pickle_protocol) # save shapefile as well? if save_shapefiles: ox.save_graph_shapefile(G, filename=image_id.split('.')[0], folder=graph_dir, encoding='utf-8') # plot, if desired if make_plots: outfile_plot = 'debug_ox.png' if verbose: print("Plotting graph...") print("outfile_plot:", outfile_plot) ox.plot_graph( G, fig_height=9, fig_width=9, #save=True, filename=outfile_plot, margin=0.01) ) #plt.tight_layout() plt.savefig(outfile_plot, dpi=400)
def test_network_saving_loading(): G = ox.graph_from_place('Piedmont, California, USA') G_projected = ox.project_graph(G) ox.save_graph_shapefile(G_projected) ox.save_graphml(G_projected) G2 = ox.load_graphml('graph.graphml') gdf_edges = ox.graph_to_gdfs(G, nodes=False, edges=True, fill_edge_geometry=False) gdf_nodes, gdf_edges = ox.graph_to_gdfs(G, nodes=True, edges=True, node_geometry=True, fill_edge_geometry=True) G3 = ox.gdfs_to_graph(gdf_nodes, gdf_edges)
def exportBuildingCoordinates(point, name): # point must be a tuple (lat,long) buildings = ox.buildings_from_point(point=point, distance=1800) centroidList = buildings.centroid placeCoordCSV = open(name + "_LatLongs.csv", "w") for i in centroidList: placeCoordCSV.write(str(i.y) + ',' + str(i.x) + '\n') placeCoordCSV.close() network = ox.graph_from_point(point, distance=1800) ox.save_graphml(network, filename=name + '.grpahml', folder='network') ox.save_graph_shapefile(network, filename=name, folder='network')
def __init__(self): start_time = time.time() G = ox.graph_from_place("Florianópolis, Brazil", network_type="drive") #fig, ax = ox.plot_graph(G) ox.save_graphml(G, filepath=arquivo_graphml) ox.save_graph_shapefile(G, filepath=diretorio_shape_network) end_time = time.time() print("Tempo de execucao = %s segundos." % (end_time - start_time))
def test_network_saving_loading(): with httmock.HTTMock(get_mock_response_content('overpass-response-7.json.gz')): G = ox.graph_from_place('Piedmont, California, USA') G_projected = ox.project_graph(G) ox.save_graph_shapefile(G_projected) ox.save_graphml(G_projected) G2 = ox.load_graphml('graph.graphml') gdf_edges = ox.graph_to_gdfs(G, nodes=False, edges=True, fill_edge_geometry=False) gdf_nodes, gdf_edges = ox.graph_to_gdfs(G, nodes=True, edges=True, node_geometry=True, fill_edge_geometry=True) G3 = ox.gdfs_to_graph(gdf_nodes, gdf_edges)
def GetRoads(city, ntype='all_private'): """ Load road network data (shapefile) locally. If not available, download the data through OpenStreetMap Nominatim API first. Parameters ---------- city : string The name of the city (or place) of interest. ntype : string The type of street network to get. {'walk', 'bike', 'drive', 'drive_service', 'all', 'all_private'} Returns ------- G_nodes : geopandas.geodataframe.GeoDataFrame The GeoDataFrame of the nodes of city road network. G_edges : geopandas.geodataframe.GeoDataFrame The GeoDataFrame of the edges of city road network. Notes ----- If data download is unsuccessful, check query results on the Nominatim web page and see if available results exist. """ # load road network from local data try: G_nodes = gpd.read_file("data/" + city + "/nodes/nodes.shp") G_edges = gpd.read_file("data/" + city + "/edges/edges.shp") print("Existing local data of " + city + " is loaded as shapefiles\n") # download from OpenStreetMap if local data is not available except: # try different query results print("Trying to download the network of " + city + " through OSM Nominatim\n") n = 1 while n <= 5: try: G = ox.graph_from_place(query=city, network_type=ntype, which_result=n) break except ValueError: n += 1 ox.save_graph_shapefile(G, filename=city, folder=None, encoding='utf-8') G_nodes = gpd.read_file("data/" + city + "/nodes/nodes.shp") G_edges = gpd.read_file("data/" + city + "/edges/edges.shp") print("Data of " + city + " is downloaded, saved, and loaded as shapefiles\n") return G_nodes, G_edges
def __init__(self): start_time = time.time() # get a graph for some city G = ox.graph_from_place("Florianópolis, Brazil", network_type="drive") # fig, ax = ox.plot_graph(G) # save graph to disk as geopackage (for GIS) or graphml file (for gephi etc) # ox.save_graph_geopackage(G, filepath='./data/mynetwork.gpkg') ox.save_graphml(G, filepath=arquivo_graphml) ox.save_graph_shapefile(G, filepath=diretorio_shape_network) end_time = time.time() print("Tempo de execução = %s segundos." % (end_time - start_time))
def infer_travel_time(params): '''Get an estimate of the average speed and travel time of each edge in the graph from the mask and conversion dictionary For each edge, get the geometry in pixel coords For each point, get the neareast neighbors in the maks and infer the local speed''' G_, mask, conv_dict, min_z, dx, dy, \ percentile, \ max_speed_band, use_weighted_mean, \ variable_edge_speed, \ verbose, \ out_file, pickle_protocol, \ save_shapefiles, im_root, graph_dir_out \ = params mph_to_mps = 0.44704 # miles per hour to meters per second for i, (u, v, edge_data) in enumerate(G_.edges(data=True)): tot_hours, mean_speed_mph, length_miles = \ get_edge_time_properties(mask, edge_data, conv_dict, min_z=min_z, dx=dx, dy=dy, percentile=percentile, max_speed_band=max_speed_band, use_weighted_mean=use_weighted_mean, variable_edge_speed=variable_edge_speed, verbose=verbose) # update edges edge_data['Travel Time (h)'] = tot_hours edge_data['inferred_speed_mph'] = np.round(mean_speed_mph, 2) edge_data['length_miles'] = length_miles edge_data['inferred_speed_mps'] = np.round(mean_speed_mph * mph_to_mps, 2) edge_data['travel_time_s'] = np.round(3600. * tot_hours, 3) G = G_.to_undirected() # save graph nx.write_gpickle(G, out_file, protocol=pickle_protocol) # save shapefile as well? if save_shapefiles: G_out = G ox.save_graph_shapefile(G_out, filename=im_root, folder=graph_dir_out, encoding='utf-8') return G_
def get_street_network(query_dict, output_folder, area_name): G = ox.graph_from_place(query_dict, network_type='drive_service') G_projected = ox.project_graph(G) ox.save_graph_shapefile(G_projected, filename=os.path.join(output_folder, area_name)) try: shutil.rmtree(os.path.join(output_folder, area_name)) except OSError: pass shutil.move(os.path.join('data', area_name), os.path.join(output_folder)) shutil.rmtree('data') ox.save_graphml(G, filename=area_name + '.graphml', folder=os.path.join(output_folder, area_name)) return G
def test_graph_save_load(): # save graph as shapefile and geopackage G = ox.graph_from_place(place1, network_type="drive") ox.save_graph_shapefile(G) ox.save_graph_geopackage(G, directed=False) # save/load geopackage and convert graph to/from node/edge GeoDataFrames fp = ".temp/data/graph-dir.gpkg" ox.save_graph_geopackage(G, filepath=fp, directed=True) gdf_nodes1 = gpd.read_file(fp, layer="nodes").set_index("osmid") gdf_edges1 = gpd.read_file(fp, layer="edges").set_index(["u", "v", "key"]) G2 = ox.graph_from_gdfs(gdf_nodes1, gdf_edges1, graph_attrs=G.graph) gdf_nodes2, gdf_edges2 = ox.graph_to_gdfs(G2) assert set(gdf_nodes1.index) == set(gdf_nodes2.index) == set(G.nodes) == set(G2.nodes) assert set(gdf_edges1.index) == set(gdf_edges2.index) == set(G.edges) == set(G2.edges) # save/load graph as graphml file ox.save_graphml(G, gephi=True) ox.save_graphml(G, gephi=False) filepath = Path(ox.settings.data_folder) / "graph.graphml" G2 = ox.load_graphml(filepath) # verify everything in G is equivalent in G2 for (n1, d1), (n2, d2) in zip(G.nodes(data=True), G2.nodes(data=True)): assert n1 == n2 assert d1 == d2 for (u1, v1, k1, d1), (u2, v2, k2, d2) in zip( G.edges(keys=True, data=True), G2.edges(keys=True, data=True) ): assert u1 == u2 assert v1 == v2 assert k1 == k2 assert tuple(d1.keys()) == tuple(d2.keys()) assert tuple(d1.values()) == tuple(d2.values()) for (k1, v1), (k2, v2) in zip(G.graph.items(), G2.graph.items()): assert k1 == k2 assert v1 == v2 # test custom data types nd = {"osmid": str} ed = {"length": str, "osmid": float} G2 = ox.load_graphml(filepath, node_dtypes=nd, edge_dtypes=ed)
def simple_get_roads(city): """ Use osmnx to get a simplified version of open street maps for the city Writes osm_nodes and osm_ways shapefiles to MAP_FP Args: city Returns: None, but creates the following shape files: osm_ways.shp - the simplified road network osm_nodes.shp - the intersections and dead ends And creates the following directory: all_nodes - containing edges and nodes directories for the unsimplified road network """ G1 = ox.graph_from_place(city, network_type='drive', simplify=False) G = ox.simplify_graph(G1) # Label endpoints streets_per_node = ox.count_streets_per_node(G) for node, count in list(streets_per_node.items()): if count <= 1: G.nodes()[node]['dead_end'] = True # osmnx creates a directory for the nodes and edges # Store all nodes, since they can be other features ox.save_graph_shapefile(G1, filename='all_nodes', folder=MAP_FP) # Store simplified network ox.save_graph_shapefile(G, filename='temp', folder=MAP_FP) # Copy and remove temp directory tempdir = os.path.join(MAP_FP, 'temp') for filename in os.listdir(os.path.join(tempdir, 'edges')): name, extension = filename.split('.') shutil.move(os.path.join(tempdir, 'edges', filename), os.path.join(MAP_FP, 'osm_ways.' + extension)) for filename in os.listdir(os.path.join(tempdir, 'nodes')): name, extension = filename.split('.') shutil.move(os.path.join(tempdir, 'nodes', filename), os.path.join(MAP_FP, 'osm_nodes.' + extension)) shutil.rmtree(tempdir)
# Create place boundary shapefiles from OpenStreetMap Bracciano_shp = ox.gdf_from_place('Bracciano, Italy') ox.save_gdf_shapefile(Bracciano_shp) # using NetworkX to calculate the shortest path between two random nodes route = nx.shortest_path(B, np.random.choice(B.nodes), np.random.choice(B.nodes)) ox.plot_graph_route(B, route, fig_height=10, fig_width=10) # save street network as GraphML file B_projected = ox.project_graph(B) ox.save_graphml(B_projected, filename='network_Bracciano_6km_epgs4326.graphml') # save street network as ESRI shapefile (includes NODES and EDGES) ox.save_graph_shapefile(B_projected, filename='networkBracciano-shape') #street network from bounding box #G = ox.graph_from_bbox(42.2511, 41.3860, 11.6586, 13.4578, network_type='drive_service') # G=ox.graph_from_address('Rome, Italy',distance=60000,network_type='drive') G = ox.graph_from_address('Rome, Italy', distance=6000, network_type='drive') Bracciano = ox.graph_from_address('Bracciano, Italy', distance=6000, network_type='drive') # Bracciano = ox.graph_from_address('Bracciano, Italy',distance=6000) #G_projected = ox.project_graph(G) ox.plot_graph(Bracciano) # ox.save_graphml(G, filename='networkRM_Provincia_60km_epgs4326.graphml')
def simple_get_roads(config): """ Use osmnx to get a simplified version of open street maps for the city Writes osm_nodes and osm_ways shapefiles to MAP_FP Args: city Returns: None, but creates the following shape files: osm_ways.shp - the simplified road network osm_nodes.shp - the intersections and dead ends And creates the following directory: all_nodes - containing edges and nodes directories for the unsimplified road network """ # confirm if a polygon is available for this city, which determines which # graph function is appropriate print("searching nominatim for " + str(config['city']) + " polygon") polygon_pos = find_osm_polygon(config['city']) if (polygon_pos is not None): print("city polygon found in OpenStreetMaps at position " + str(polygon_pos) + ", building graph of roads within " + "specified bounds") G1 = ox.graph_from_place(config['city'], network_type='drive', simplify=False, which_result=polygon_pos) else: # City & lat+lng+radius required from config to graph from point if ('city' not in list(config.keys()) or config['city'] is None): sys.exit('city is required in config file') if ('city_latitude' not in list(config.keys()) or config['city_latitude'] is None): sys.exit('city_latitude is required in config file') if ('city_longitude' not in list(config.keys()) or config['city_longitude'] is None): sys.exit('city_longitude is required in config file') if ('city_radius' not in list(config.keys()) or config['city_radius'] is None): sys.exit('city_radius is required in config file') print("no city polygon found in OpenStreetMaps, building graph of " + "roads within " + str(config['city_radius']) + "km of city " + str(config['city_latitude']) + " / " + str(config['city_longitude'])) G1 = ox.graph_from_point( (config['city_latitude'], config['city_longitude']), distance=config['city_radius'] * 1000, network_type='drive', simplify=False) G = ox.simplify_graph(G1) # Label endpoints streets_per_node = ox.count_streets_per_node(G) for node, count in list(streets_per_node.items()): if count <= 1: G.nodes()[node]['dead_end'] = True # osmnx creates a directory for the nodes and edges # Store all nodes, since they can be other features ox.save_graph_shapefile(G1, filename='all_nodes', folder=MAP_FP) # Store simplified network ox.save_graph_shapefile(G, filename='temp', folder=MAP_FP) # Copy and remove temp directory tempdir = os.path.join(MAP_FP, 'temp') for filename in os.listdir(os.path.join(tempdir, 'edges')): name, extension = filename.split('.') shutil.move(os.path.join(tempdir, 'edges', filename), os.path.join(MAP_FP, 'osm_ways.' + extension)) for filename in os.listdir(os.path.join(tempdir, 'nodes')): name, extension = filename.split('.') shutil.move(os.path.join(tempdir, 'nodes', filename), os.path.join(MAP_FP, 'osm_nodes.' + extension)) shutil.rmtree(tempdir)
place_names = ['Ho Chi Minh City, Vietnam', #'Beijing, China', #'Jakarta, Indonesia', 'London, UK', 'Los Angeles, California, USA', 'Manila, Philippines', #'Mexico City, Mexico', 'New Delhi, India', 'Sao Paulo, Brazil', 'New York, New York, USA', 'Seoul', 'Singapore', #'Tokyo, Japan', #'Nairobi, Kenya', #'Bangalore, India' ] # In this for-loop, we save all the shapefiles for the valid cities. for city in place_names: city_admin_20kmbuff = ox.gdf_from_place(city, gdf_name = 'global_cities', buffer_dist = 20000) fig, ax = ox.plot_shape(city_admin_20kmbuff) ox.save_gdf_shapefile(city_admin_20kmbuff, filename = city) # In this for-loop, we save all the street networks for the valid cities. for city in place_names: grid = ox.graph_from_place(city, network_type = 'drive', retain_all = True) grid_projected = ox.project_graph(grid) ox.save_graph_shapefile(grid_projected, filename = city + '_grid') ox.plot_graph(grid_projected)
#indir = "data/vector/city_boundaries/" for city in cities: name = os.path.basename(city).split('.')[0] place = gpd.read_file(city) place_simple = place.unary_union # disolving boundaries based on attributes # Use retain_all if you want to keep all disconnected subgraphs (e.g. when your places aren't adjacent) G = ox.graph_from_polygon(place_simple, network_type='drive', retain_all=True) G_projected = ox.project_graph(G) # save the shapefile to disk #name = os.path.basename("beijing.shp")).split(".")[0] # make better place_names ox.save_graph_shapefile(G_projected, filename=name) area = ox.project_gdf(place).unary_union.area stats = ox.basic_stats(G, area=area) # save to file: def ensure_dir(file_path): directory = os.path.dirname(file_path) if not os.path.exists(directory): os.makedirs(directory) path = os.path.join('data/vector/city_networks/', name) ensure_dir(path) with open(path + '_stats.json', 'wb') as f: json.dump(stats, f)
def add_travel_time_dir(graph_dir, mask_dir, conv_dict, graph_dir_out, min_z=128, dx=4, dy=4, percentile=90, use_totband=True, use_weighted_mean=True, variable_edge_speed=False, mask_prefix='', save_shapefiles=True, verbose=False): '''Update graph properties to include travel time for entire directory''' pickle_protocol = 4 # 4 is most recent, python 2.7 can't read 4 #logger1.info("Updating graph properties to include travel time") #logger1.info(" Writing to: " + str(graph_dir_out)) os.makedirs(graph_dir_out, exist_ok=True) image_names = sorted([z for z in os.listdir(mask_dir) if z.endswith('.tif')]) for i,image_name in enumerate(image_names): im_root = image_name.split('.')[0] image_id = 'AOI' + im_root.split('AOI')[-1] if len(mask_prefix) > 0: im_root = im_root.split(mask_prefix)[-1] out_file = os.path.join(graph_dir_out, image_id + '.gpickle') #if (i % 1) == 0: #logger1.info("\n" + str(i+1) + " / " + str(len(image_names)) + " " + image_name + " " + im_root) mask_path = os.path.join(mask_dir, image_name) graph_path = os.path.join(graph_dir, image_id + '.gpickle') if not os.path.exists(graph_path): #logger1.info(" ", i, "DNE, skipping: " + str(graph_path)) return # continue #if verbose: #logger1.info("mask_path: " + mask_path) #logger1.info("graph_path: " + graph_path) mask = skimage.io.imread(mask_path) G_raw = nx.read_gpickle(graph_path) # see if it's empty if len(G_raw.nodes()) == 0: nx.write_gpickle(G_raw, out_file, protocol=pickle_protocol) continue G = infer_travel_time(G_raw, mask, conv_dict, min_z=min_z, dx=dx, dy=dy, percentile=percentile, use_totband=use_totband, use_weighted_mean=use_weighted_mean, variable_edge_speed=variable_edge_speed, verbose=verbose) G = G.to_undirected() # save graph ##logger1.info("Saving graph to directory: " + graph_dir) #out_file = os.path.join(graph_dir_out, image_name.split('.')[0] + '.gpickle') nx.write_gpickle(G, out_file, protocol=pickle_protocol) # save shapefile as well? if save_shapefiles: G_out = G # ox.simplify_graph(G.to_directed()) if len(G_out.edges()) == 0: continue #logger1.info("Saving shapefile to directory: {}".format(graph_dir_out)) ox.save_graph_shapefile(G_out, filename=image_id, folder=graph_dir_out, encoding='utf-8') # ox.save_graph_shapefile(G_out, filename=im_root, folder=graph_dir_out, # encoding='utf-8') #out_file2 = os.path.join(graph_dir, image_id.split('.')[0] + '.graphml') #ox.save_graphml(G, image_id.split('.')[0] + '.graphml', folder=graph_dir) return
bike - get all streets and paths that cyclists can use all - download all non-private OSM streets and paths all_private - download all OSM streets and paths, including private-access ones ''' import osmnx as ox import geopandas as gpd # get the streets network from the name of a place # place = 'Piedmont, California, USA' # G = ox.graph_from_place(place, network_type='drive') # ox.save_graph_shapefile(G, filepath='Output/piedmont') # get the streets network from a bounding box Area = gpd.read_file('Input/Namajavira_4326.shp') minx, miny, maxx, maxy = Area.geometry.total_bounds G = ox.graph_from_bbox(miny, maxy, minx, maxx, network_type='drive') ox.save_graph_shapefile(G, filepath='Output/streets') ############ Requests from query ########### import requests import json overpass_url = "http://overpass-api.de/api/interpreter" overpass_query = """ [out:json]; area["ISO3166-1"="DE"][admin_level=2]; (node["amenity"="biergarten"](area); way["amenity"="biergarten"](area); rel["amenity"="biergarten"](area); ); out center; """
for place in places: name = (place.replace(",","").replace(" ","")) # make better place_names print('working on: ', name) #make a geodataframe of the street network (outline) from openstreetmap place names # use retain_all if you want to keep all disconnected subgraphs (e.g. when your places aren't adjacent) G = ox.graph_from_place(place, network_type='drive', retain_all=True) G = ox.project_graph(G) #make a geodataframe of the shape (outline) from openstreetmap place names gdf = ox.gdf_from_place(place) gdf = ox.project_gdf(gdf) ox.save_graph_shapefile(G, filename=name) print(name, ' has crs:' ) gdf.to_crs({'init': 'epsg:3395'}) # Confirm big step of projection change # calculate basic stats for the shape # TODO adjust this to calculate stats based on neighborhoods stats = ox.basic_stats(G, area=gdf['geometry'].area[0]) print('area', gdf['area'][0] / 10**6, 'sqkm') # save to file: def ensure_dir(file_path): directory = os.path.dirname(file_path) if not os.path.exists(directory):
ox.__version__ ##Area by lat/long points listed below: north, south, east, west = 40.0680, 40.0135, -111.7046, -111.7771 ''' place = 'Payson, Utah, USA' gdf = ox.gdf_from_place(place) #gdf.loc[0, 'geometry'] ox.save_gdf_shapefile(gdf, filename='place-shape2', folder='data')''' print('hi') B = ox.graph_from_bbox(north, south, east, west, network_type='drive') ox.save_graph_shapefile(B, filename='the-place', folder='data') print('hi hi') '''B = ox.graph_from_bbox(north, south, east, west, network_type='drive') gdf_nodes, gdf_edges = ox.graph_to_gdfs( B, nodes=True, edges=True, node_geometry=True, fill_edge_geometry=True) ox.save_gdf_shapefile(gdf_nodes, filename='the_places_nodes', folder='data') ox.save_gdf_shapefile(gdf_edges, filename='the_places_edges', folder='data') ''' ''' north, south, east, west = 40.0680, 40.0135, -111.7046, -111.7771 gdf = ox.gdf_from_bbox(north, south, east, west, network_type='drive')
def main(): global logger1 # min_subgraph_length_pix = 300 min_spur_length_m = 0.001 # default = 5 local = False #True verbose = True super_verbose = False make_plots = False #True save_shapefiles = True #False pickle_protocol = 4 # 4 is most recent, python 2.7 can't read 4 # local if local: albu_path = '/Users/avanetten/Documents/cosmiq/apls/albu_inference_mod' path_images = '/Users/avanetten/Documents/cosmiq/spacenet/data/spacenetv2/AOI_2_Vegas_Test/400m/RGB-PanSharpen' res_root_dir = os.path.join(albu_path, 'results/2m_4fold_512_30e_d0.2_g0.2_AOI_2_Vegas_Test') csv_file = os.path.join(res_root_dir, 'wkt_submission.csv') graph_dir = os.path.join(res_root_dir, 'graphs') log_file = os.path.join(res_root_dir, 'wkt_to_G.log') #os.makedirs(graph_dir, exist_ok=True) try: os.makedirs(graph_dir) except: pass # deployed on dev box else: parser = argparse.ArgumentParser() parser.add_argument('config_path') args = parser.parse_args() with open(args.config_path, 'r') as f: cfg = json.load(f) config = Config(**cfg) # outut files res_root_dir = os.path.join(config.path_results_root, config.test_results_dir) path_images = os.path.join(config.path_data_root, config.test_data_refined_dir) csv_file = os.path.join(res_root_dir, config.wkt_submission) graph_dir = os.path.join(res_root_dir, config.graph_dir) log_file = os.path.join(res_root_dir, 'wkt_to_G.log') os.makedirs(graph_dir, exist_ok=True) min_subgraph_length_pix = config.min_subgraph_length_pix min_spur_length_m = config.min_spur_length_m console, logger1 = make_logger.make_logger(log_file, logger_name='log') # ############################################################################### # # https://docs.python.org/3/howto/logging-cookbook.html#logging-to-multiple-destinations # # set up logging to file - see previous section for more details # logging.basicConfig(level=logging.DEBUG, # format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', # datefmt='%m-%d %H:%M', # filename=log_file, # filemode='w') # # define a Handler which writes INFO messages or higher to the sys.stderr # console = logging.StreamHandler() # console.setLevel(logging.INFO) # # set a format which is simpler for console use # formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') # #formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') # # tell the handler to use this format # console.setFormatter(formatter) # # add the handler to the root logger # logging.getLogger('').addHandler(console) # logger1 = logging.getLogger('log') # logger1.info("log file: {x}".format(x=log_file)) # ############################################################################### # csv_file = os.path.join(res_root_dir, 'merged_wkt_list.csv') # graph_dir = os.path.join(res_root_dir, 'graphs') # #os.makedirs(graph_dir, exist_ok=True) # try: # os.makedirs(graph_dir) # except: # pass # read in wkt list logger1.info("df_wkt at: {}".format(csv_file)) #print ("df_wkt at:", csv_file) df_wkt = pd.read_csv(csv_file) # columns=['ImageId', 'WKT_Pix']) # iterate through image ids and create graphs t0 = time.time() image_ids = np.sort(np.unique(df_wkt['ImageId'])) print("image_ids:", image_ids) print("len image_ids:", len(image_ids)) for i,image_id in enumerate(image_ids): #if image_id != 'AOI_2_Vegas_img586': # continue out_file = os.path.join(graph_dir, image_id.split('.')[0] + '.gpickle') logger1.info("\n{x} / {y}, {z}".format(x=i+1, y=len(image_ids), z=image_id)) #print ("\n") #print (i, "/", len(image_ids), image_id) # for geo referencing, im_file should be the raw image if config.num_channels == 3: im_file = os.path.join(path_images, 'RGB-PanSharpen_' + image_id + '.tif') else: im_file = os.path.join(path_images, 'MUL-PanSharpen_' + image_id + '.tif') #im_file = os.path.join(path_images, image_id) if not os.path.exists(im_file): im_file = os.path.join(path_images, image_id + '.tif') # filter df_filt = df_wkt['WKT_Pix'][df_wkt['ImageId'] == image_id] wkt_list = df_filt.values #wkt_list = [z[1] for z in df_filt_vals] # print a few values logger1.info("\n{x} / {y}, num linestrings: {z}".format(x=i+1, y=len(image_ids), z=len(wkt_list))) #print ("\n", i, "/", len(image_ids), "num linestrings:", len(wkt_list)) if verbose: print ("image_file:", im_file) print (" wkt_list[:2]", wkt_list[:2]) if (len(wkt_list) == 0) or (wkt_list[0] == 'LINESTRING EMPTY'): G = nx.MultiDiGraph() nx.write_gpickle(G, out_file, protocol=pickle_protocol) continue # create graph t1 = time.time() G = wkt_to_G(wkt_list, im_file=im_file, min_subgraph_length_pix=min_subgraph_length_pix, min_spur_length_m=min_spur_length_m, verbose=super_verbose) t2 = time.time() if verbose: logger1.info("Time to create graph: {} seconds".format(t2-t1)) #print ("Time to create graph:", t2-t1, "seconds") if len(G.nodes()) == 0: nx.write_gpickle(G, out_file, protocol=pickle_protocol) continue # print a node node = list(G.nodes())[-1] print (node, "random node props:", G.nodes[node]) # print an edge edge_tmp = list(G.edges())[-1] #print (edge_tmp, "random edge props:", G.edges([edge_tmp[0], edge_tmp[1]])) #G.edge[edge_tmp[0]][edge_tmp[1]]) print (edge_tmp, "random edge props:", G.get_edge_data(edge_tmp[0], edge_tmp[1])) # save graph logger1.info("Saving graph to directory: {}".format(graph_dir)) #print ("Saving graph to directory:", graph_dir) nx.write_gpickle(G, out_file, protocol=pickle_protocol) # save shapefile as well? if save_shapefiles: logger1.info("Saving shapefile to directory: {}".format(graph_dir)) try: ox.save_graph_shapefile(G, filename=image_id.split('.')[0] , folder=graph_dir, encoding='utf-8') except: print("Cannot save shapefile...") #out_file2 = os.path.join(graph_dir, image_id.split('.')[0] + '.graphml') #ox.save_graphml(G, image_id.split('.')[0] + '.graphml', folder=graph_dir) # plot, if desired if make_plots: print ("Plotting graph...") outfile_plot = os.path.join(graph_dir, image_id) print ("outfile_plot:", outfile_plot) ox.plot_graph(G, fig_height=9, fig_width=9, #save=True, filename=outfile_plot, margin=0.01) ) #plt.tight_layout() plt.savefig(outfile_plot, dpi=400) #if i > 30: # break tf = time.time() logger1.info("Time to run wkt_to_G.py: {} seconds".format(tf - t0))
def RadRoads(city, ntype='all_private', limit=5): """ In a given city (or geographical area) in OSM: find the straightest and curviest roads by name; find the shortest and longest roads by name. Parameters ---------- city : string The name of the city (or place) of interest. ntype : string The type of street network to get. {'walk', 'bike', 'drive', 'drive_service', 'all', 'all_private'} limit : integer Number of top records to be listed in each category. Returns ------- 1. name and length of the shortest roads and a dataframe of a top list 2. name and length of the longest roads and a dataframe of a top list 3. name and sinuosity info of the straightest road and a dataframe of a top list 4. name and sinuosity info of the curviest road and a dataframe of a top list 5. network graph plot of the given city with top roads marked with colors """ # load data # load road network from local data try: G_nodes = gpd.read_file("data/" + city + "/nodes/nodes.shp") G_edges = gpd.read_file("data/" + city + "/edges/edges.shp") print("Existing local data of " + city + " is loaded as shapefiles\n") # download from OpenStreetMap if local data is not available except: # try different query results print("Trying to download the network of " + city + " through OSM Nominatim\n") n = 1 while n <= 5: try: G = ox.graph_from_place(query=city, network_type=ntype, which_result=n) break except ValueError: n += 1 ox.save_graph_shapefile(G, filename=city, folder=None, encoding='utf-8') G_nodes = gpd.read_file("data/" + city + "/nodes/nodes.shp") G_edges = gpd.read_file("data/" + city + "/edges/edges.shp") print("Data of " + city + " is downloaded, saved, and loaded as shapefiles\n") ################################################################## # NOTICE: 'length' specified with method call may cause problems # ################################################################## # combine road segments and aggregate the total lengths G_edges['length'] = G_edges['length'].astype('float') dict_v = {'length': 'sum', 'highway': 'first', 'oneway': 'first'} table = G_edges.groupby('name').agg(dict_v).reset_index() # remove messy segments w/o names table = table[table['name'] != ''] table.dropna(how='any', inplace=True) ### LENGTH ### # calculate shortest and longest roads short = table.sort_values(by='length', ascending=True).head(limit) long = table.sort_values(by='length', ascending=False).head(limit) # extract road names roads = list(G_edges['name'].unique()) roads.remove('') ### remove messy segements without names ### rnames = [] dist_d = [] dist_l = [] sinuosity = [] ### SINUOSITY ### # calculate sinuosity for each road # create a dataframe containing all the segments of a road for each road for i, r in enumerate(roads): df_road = G_edges[G_edges['name'] == roads[i]] # list all the nodes road_nodes = list(df_road['from'].values) + list(df_road['to'].values) # count all the nodes tdict = dict(Counter(road_nodes)) tdf = pd.DataFrame(list(tdict.items()), columns=['node', 'count']) # select nodes that only occur once (terminals of a road) tdf_sub = tdf[tdf['count'] == 1] if len(tdf_sub) != 2: continue ### skip roads with more than two terminal nodes for now ### else: # extract coordinates of the two terminal nodes from the city nodes graph G_nodes_term = G_nodes[list( map(lambda n: n in list(tdf_sub['node'].values), list(G_nodes['osmid'])))] coord1 = list(G_nodes_term.iloc[0, :]['geometry'].coords)[0] coord2 = list(G_nodes_term.iloc[1, :]['geometry'].coords)[0] p1 = coord1[1], coord1[0] p2 = coord2[1], coord2[0] # calculate shortest Distance between two nodes d_d = vincenty(p1, p2).meters # calculate actual route Length d_l = df_road['length'].astype('float', error='coerce').sum() # calculate sinuosity sinu = d_l / d_d # append all values to lists rnames.append(r) # road name dist_d.append(d_d) # shortest Distance dist_l.append(d_l) # actual Length sinuosity.append(sinu) #sinuosity # create a dataframe with sinuosity data df_sinu = pd.DataFrame({ 'name': rnames, 'distance': dist_d, 'length': dist_l, 'sinuosity': sinuosity }) # calculate straightest and curviest roads straight = df_sinu.sort_values('sinuosity', ascending=True).head(limit) curve = df_sinu.sort_values('sinuosity', ascending=False).head(limit) straight_0 = straight.iloc[0] curve_0 = curve.iloc[0] # print out output print('Shortest road: {:s} ({:.2f} meters)\n'.format( short.iloc[0]['name'], short.iloc[0]['length'])) print(short, "\n") print('Longest road: {:s} ({:.2f} meters)\n'.format( long.iloc[0]['name'], long.iloc[0]['length'])) print(long, "\n") print( 'Straightest road: {:s}\nroad dist.: {:.2f}\nshortest dist.: {:.2f}\nsinuosity: {:.5f}\n' .format(straight.iloc[0]['name'], straight.iloc[0]['length'], straight.iloc[0]['distance'], straight.iloc[0]['sinuosity'])) print(straight, "\n") print( 'Curviest road: {:s}\nroad dist.: {:.2f}\nshortest dist.: {:.2f}\nsinuosity: {:.5f}\n' .format(curve.iloc[0]['name'], curve.iloc[0]['length'], curve.iloc[0]['distance'], curve.iloc[0]['sinuosity'])) print(curve, "\n") # plot the graph of the area fig, ax = pl.subplots(figsize=(10, 10)) G_edges.plot(color='silver', ax=ax) G_edges[G_edges['name'] == straight.iloc[0]['name']].plot( color='limegreen', ax=ax, label='straightest') G_edges[G_edges['name'] == curve.iloc[0]['name']].plot(color='gold', ax=ax, label='curviest') G_edges[G_edges['name'] == short.iloc[0]['name']].plot(color='steelblue', ax=ax, label='shortest') G_edges[G_edges['name'] == long.iloc[0]['name']].plot(color='indianred', ax=ax, label='longest') pl.legend(fontsize='medium') pl.show()
def download_osm_networks( output_dir, polygon=None, bbox=None, data_crs=None, net_types=["drive", "walk", "bike"], pickle_save=False, suffix="", overwrite=False ): """ Download an OpenStreetMap network within the area defined by a polygon feature class or a bounding box. Args: output_dir (str): Path, Path to output directory. Each modal network (specified by `net_types`) is saved to this directory within an epoynmous folder as a shape file. If `pickle_save` is True, pickled graph objects are also stored in this directory in the appropriate subfolders. polygon (str): Path, default=None; Path to study area polygon(s) shapefile. If provided, the polygon features define the area from which to fetch OSM features and `bbox` is ignored. See module notes for performance and suggestions on usage. bbox (dict): default=None; A dictionary with keys 'south', 'west', 'north', and 'east' of EPSG:4326-style coordinates, defining a bounding box for the area from which to fetch OSM features. Only required when `study_area_polygon_path` is not provided. See module notes for performance and suggestions on usage. data_crs (int): integer value representing an EPSG code net_types (list): [String,...], default=["drive", "walk", "bike"] A list containing any or all of "drive", "walk", or "bike", specifying the desired OSM network features to be downloaded. pickle_save (bool): default=False; If True, the downloaded OSM networks are saved as python `networkx` objects using the `pickle` module. See module notes for usage. suffix (str): default=""; Downloaded datasets may optionally be stored in folders with a suffix appended, differentiating networks by date, for example. overwrite (bool): if set to True, delete the existing copy of the network(s) Returns: G (dict): A dictionary of networkx graph objects. Keys are mode names based on `net_types`; values are graph objects. """ # Validation of inputs # TODO: separate polygon and bbox validation bounding_box = validate_inputs( study_area_poly=polygon, bbox=bbox, data_crs=data_crs ) # - ensure Network types are valid and formatted correctly net_types = validate_network_types(network_types=net_types) output_dir = validate_directory(output_dir) # Fetch network features mode_nets = {} for net_type in net_types: print("") net_folder = f"{net_type}_{suffix}" print(f"OSMnx '{net_type.upper()}' network extraction") print("-- extracting a composed network by bounding box...") g = ox.graph_from_bbox( north=bounding_box["north"], south=bounding_box["south"], east=bounding_box["east"], west=bounding_box["west"], network_type=net_type, retain_all=True, ) if net_type in ["walk", "bike"]: g = dl_help.trim_components(graph=g) # Pickle if requested if pickle_save: print("-- saving the composed network as pickle") out_f = os.path.join(output_dir, net_folder, "osmnx_composed_net.p") with open(out_f, "wb") as pickle_file: pickle.dump(g, pickle_file) print("---- saved to: {}".format(out_f)) # 2. Saving as shapefile print("-- saving network shapefile...") out_f = os.path.join(output_dir, net_folder) check_overwrite_path(output=out_f, overwrite=overwrite) ox.save_graph_shapefile(G=g, filepath=out_f) # need to change this directory print("---- saved to: " + out_f) # 3. Add the final graph to the dictionary of networks mode_nets[net_type] = g return mode_nets
def infer_travel_time(params): '''Get an estimate of the average speed and travel time of each edge in the graph from the mask and conversion dictionary For each edge, get the geometry in pixel coords For each point, get the neareast neighbors in the maks and infer the local speed''' G_, mask, conv_dict, min_z, dx, dy, \ percentile, \ max_speed_band, use_weighted_mean, \ variable_edge_speed, \ verbose, \ out_file,\ save_shapefiles, im_root, graph_dir_out \ = params print("im_root:", im_root) mph_to_mps = 0.44704 # miles per hour to meters per second pickle_protocol = 4 for i, (u, v, edge_data) in enumerate(G_.edges(data=True)): if verbose: #(i % 100) == 0: logger1.info("\n" + str(i) + " " + str(u) + " " + str(v) + " " \ + str(edge_data)) if (i % 1000) == 0: logger1.info(str(i) + " / " + str(len(G_.edges())) + " edges") tot_hours, mean_speed_mph, length_miles = \ get_edge_time_properties(mask, edge_data, conv_dict, min_z=min_z, dx=dx, dy=dy, percentile=percentile, max_speed_band=max_speed_band, use_weighted_mean=use_weighted_mean, variable_edge_speed=variable_edge_speed, verbose=verbose) # update edges edge_data['Travel Time (h)'] = tot_hours edge_data['inferred_speed_mph'] = np.round(mean_speed_mph, 2) edge_data['length_miles'] = length_miles edge_data['inferred_speed_mps'] = np.round(mean_speed_mph * mph_to_mps, 2) edge_data['travel_time_s'] = np.round(3600. * tot_hours, 3) # edge_data['travel_time'] = np.round(3600. * tot_hours, 3) G = G_.to_undirected() # save graph #logger1.info("Saving graph to directory: " + graph_dir) #out_file = os.path.join(graph_dir_out, image_name.split('.')[0] + '.gpickle') nx.write_gpickle(G, out_file, protocol=pickle_protocol) # save shapefile as well? if save_shapefiles: # print("save shapefiles") # print("current crs:", G.graph['crs']) G_out = G logger1.info("Saving shapefile to directory: {}".format(graph_dir_out)) ox.save_graph_shapefile(G_out, filename=im_root, folder=graph_dir_out, encoding='utf-8') return G_
def simple_get_roads(config, mapfp): """ Use osmnx to get a simplified version of open street maps for the city Writes osm_nodes and osm_ways shapefiles to mapfp Args: config object Returns: None This function creates the following files features.geojson - traffic signals, crosswalks and intersections osm_ways.shp - the simplified road network osm_nodes.shp - the intersections and dead ends """ ox.settings.useful_tags_path.append('cycleway') G1 = get_graph(config) G = ox.simplify_graph(G1) # Label endpoints streets_per_node = ox.count_streets_per_node(G) for node, count in list(streets_per_node.items()): if count <= 1: G.nodes()[node]['dead_end'] = True G1.nodes()[node]['dead_end'] = True # osmnx creates a directory for the nodes and edges # Store all nodes, since they can be other features # Get relevant node features out of the unsimplified graph nodes, data = zip(*G1.nodes(data=True)) gdf_nodes = geopandas.GeoDataFrame(list(data), index=nodes) node_feats = gdf_nodes[gdf_nodes['highway'].isin( ['crossing', 'traffic_signals'])] intersections = gdf_nodes[gdf_nodes['dead_end'] == True] names = {'traffic_signals': 'signal', 'crossing': 'crosswalk'} features = [] for _, row in node_feats.iterrows(): features.append( geojson.Feature( geometry=geojson.Point((row['x'], row['y'])), id=row['osmid'], properties={'feature': names[row['highway']]}, )) for _, row in intersections.iterrows(): features.append( geojson.Feature( geometry=geojson.Point((row['x'], row['y'])), id=row['osmid'], properties={'feature': 'intersection'}, )) features = geojson.FeatureCollection(features) with open(os.path.join(mapfp, 'features.geojson'), "w") as f: json.dump(features, f) # Store simplified network ox.save_graph_shapefile(G, filename='temp', folder=mapfp) # Copy and remove temp directory tempdir = os.path.join(mapfp, 'temp') for filename in os.listdir(os.path.join(tempdir, 'edges')): _, extension = filename.split('.') shutil.move(os.path.join(tempdir, 'edges', filename), os.path.join(mapfp, 'osm_ways.' + extension)) for filename in os.listdir(os.path.join(tempdir, 'nodes')): _, extension = filename.split('.') shutil.move(os.path.join(tempdir, 'nodes', filename), os.path.join(mapfp, 'osm_nodes.' + extension)) shutil.rmtree(tempdir)
# impute edge (driving) speeds and calculate edge traversal times G = ox.add_edge_speeds(G) G = ox.add_edge_travel_times(G) # you can convert MultiDiGraph to/from geopandas GeoDataFrames gdf_nodes, gdf_edges = ox.graph_to_gdfs(G) G = ox.graph_from_gdfs(gdf_nodes, gdf_edges, graph_attrs=G.graph) # convert MultiDiGraph to DiGraph to use nx.betweenness_centrality function # choose between parallel edges by minimizing travel_time attribute value D = ox.utils_graph.get_digraph(G, weight="travel_time") # calculate node betweenness centrality, weighted by travel time bc = nx.betweenness_centrality(D, weight="travel_time", normalized=True) nx.set_node_attributes(G, values=bc, name="bc") # plot the graph, coloring nodes by betweenness centrality nc = ox.plot.get_node_colors_by_attr(G, "bc", cmap="plasma") fig, ax = ox.plot_graph(G, bgcolor="k", node_color=nc, node_size=50, edge_linewidth=2, edge_color="#333333") # save graph to shapefile, geopackage, or graphml ox.save_graph_shapefile(G, filepath="./graph_shapefile/") ox.save_graph_geopackage(G, filepath="./graph.gpkg") ox.save_graphml(G, filepath="./graph.graphml")
import networkx as nx import osmnx as ox import requests import matplotlib.cm as cm import matplotlib.colors as colors ox.config(use_cache=True, log_console=False) # Enable cache for storing json data and enable console output for debugging #ox.__version__ # get a graph for some city G = ox.graph_from_place('Winter Haven, Florida, USA', network_type='all_private') fig, ax = ox.plot_graph(G, node_size=10, node_color='#66cc66') ox.save_graph_shapefile(G, filename='wh.jpg') width = fig.dpi height = fig.dpi print("Height is "+str(height)+" Width is "+str(width)) # Determine the area in square meters G_proj = ox.project_graph(G) nodes_proj = ox.graph_to_gdfs(G_proj, edges = False); area_meters = nodes_proj.unary_union.convex_hull.area print("Area = " + str(area_meters));
def add_travel_time_img(image_name, pickle_protocol, graph_dir, mask_dir, conv_dict, graph_dir_out, min_z=128, dx=4, dy=4, percentile=90, use_totband=True, use_weighted_mean=True, variable_edge_speed=False, mask_prefix='', save_shapefiles=True, verbose=False): im_root = image_name.split('.')[0] if len(mask_prefix) > 0: im_root = im_root.split(mask_prefix)[-1] out_file = os.path.join(graph_dir_out, im_root + '.gpickle') # if (i % 1) == 0: # logger1.info("\n" + str(1) + " / " + str(228) + " " + image_name + " " + im_root) mask_path = os.path.join(mask_dir, image_name) graph_path = os.path.join(graph_dir, im_root + '.gpickle') if not os.path.exists(graph_path): # logger1.info(" ", i, "DNE, skipping: " + str(graph_path)) return # continue if verbose: logger1.info("mask_path: " + mask_path) logger1.info("graph_path: " + graph_path) mask = skimage.io.imread(mask_path) G_raw = nx.read_gpickle(graph_path) # see if it's empty if len(G_raw.nodes()) == 0: nx.write_gpickle(G_raw, out_file, protocol=pickle_protocol) return G = infer_travel_time(G_raw, mask, conv_dict, min_z=min_z, dx=dx, dy=dy, percentile=percentile, use_totband=use_totband, use_weighted_mean=use_weighted_mean, variable_edge_speed=variable_edge_speed, verbose=verbose) G = G.to_undirected() # save graph # logger1.info("Saving graph to directory: " + graph_dir) # out_file = os.path.join(graph_dir_out, image_name.split('.')[0] + '.gpickle') nx.write_gpickle(G, out_file, protocol=pickle_protocol) # save shapefile as well? if save_shapefiles: G_out = G # ox.simplify_graph(G.to_directed()) logger1.info("Saving shapefile to directory: {}".format(graph_dir_out)) ox.save_graph_shapefile(G_out, filename=im_root, folder=graph_dir_out, encoding='utf-8')