def draw_polygon_by_bbox(bbox=[113.93306,22.57437, 113.9383, 22.58037]): # extract the roads of intrest from shapely.geometry import LineString coords = [bbox[:2], [bbox[0], bbox[3]], bbox[2:], [bbox[2], bbox[1]], bbox[:2]] area = gpd.GeoDataFrame( [{'name': 'presetation area', 'geometry':LineString(coords)}] ) return area
def generateStream(): times = [] dataChannel = {} dataWeather = {} days = ['extracts_{}'.format(location)] #['8','9','10','11','12','13','14'] points = {} for c in patterns.keys(): points[c] = [] categories = [] boundaries = [] levels = [] # lats = lats.to_numpy() # lons = lons.to_numpy() # for w in weathers for subdir in days: print(subdir) for rootDay,subdirDays,fileDays in os.walk("{}/{}/".format(dataset,subdir)): subdirDays = sorted(subdirDays,key=sortTime) for subdirDay in subdirDays: print(subdirDay) for c in patterns.keys(): for level in ["low","normal","high"]: try: dataChannel[c] = pd.read_csv("{}/{}/{}/{}{}_{}_data.csv".format(dataset,subdir,subdirDay,location,c,subdirDay),header=None).to_numpy() indexs = getIndexLevel(dataChannel[c],c,level) #np.where(dataChannel[c] > thresholds[c][patterns[c]]) if indexs[0].size == 0: continue latsByPattern = lats[indexs] lonsByPattern = lons[indexs] points = np.dstack((lonsByPattern,latsByPattern)) boundary = alphashape.alphashape(points[0],50.0) except: continue times.append(subdirDay) categories.append(c) boundaries.append(boundary) levels.append(level) df = pd.DataFrame({ 'time' : times, 'category' : categories, 'boundary' : boundaries, 'value' : levels }) gdf = gpd.GeoDataFrame(df,geometry='boundary') gdf.to_csv('{}/Satellite_Events_Stream_Standard_in_{}.csv'.format(thresh_dir,subdir),index=None)
def df(self): if self._df is None: region_upper = self.config.region.upper() df = pd.read_csv( file_path( self.config, SUMMARY, f'{region_upper}_polygons_solution_{self.config.band}.csv') ) geometry = [ shapely.wkt.loads(x) for x in df['PolygonWKT_Geo'].values ] self._df = gpd.GeoDataFrame(df, crs={'init': 'epsg:4326'}, geometry=geometry) self._df['sq_ft'] = self._df.geometry.apply(area_in_square_feet) return self._df[self.data_filter(self._df)]
def read_datastore(ckan, rid, rows=10000): records = [] # is_geospatial = False has_more = True while has_more: result = ckan.action.datastore_search(id=rid, limit=rows, offset=len(records)) records += result["records"] has_more = len(records) < result["total"] df = pd.DataFrame(records).drop("_id", axis=1) if "geometry" in df.columns: df["geometry"] = df["geometry"].apply(lambda x: shape(json.loads(x))) df = gpd.GeoDataFrame(df, crs="epsg:4326") return df, [x for x in result["fields"] if x["id"] != "_id"]
buffered_nodes = gdf_nodes.buffer(tolerance).unary_union if isinstance(buffered_nodes, Polygon): # if only a single node results, make it iterable so we can turn it into # a GeoSeries buffered_nodes = [buffered_nodes] # get the centroids of the merged intersection polygons unified_intersections = gpd.GeoSeries(list(buffered_nodes)) intersection_centroids = unified_intersections.centroid # return intersection_centroids # (end of osmnx.simplify.clean_intersections function) # name the series intersection_centroids.name = "centroid" # joining the nodes to their buffers gdf_buffers = gpd.GeoDataFrame(intersection_centroids, geometry=unified_intersections) gdf_buffers.crs = gdf_nodes.crs # for some reason the coordinate system gets lost gdf_nodes_joined = gpd.sjoin(gdf_nodes, gdf_buffers, how="left", op="within") # change the geometry of the nodes to the centroids gdf_nodes_joined = gdf_nodes_joined.set_geometry("centroid") gdf_nodes_joined = gdf_nodes_joined.drop(columns=["geometry"]) # gdf_nodes_joined.to_file(filename="test.shp") # export the merged nodes as a shapefile # (to verify a reasonable tolerance value is selected) # now update the node ids on the edges gdf_edges = ox.graph_to_gdfs(G, nodes=False) # on the edges table: the to_node column is called "u"; the from_node column is "v" # on the nodes table: old node_id is "osmid"; new node_id is "index_right" # first join wrt the to_nodes gdf_edges_joined = gdf_edges.merge(gdf_nodes_joined,
from shapely import geometry from geopandas import gpd import urllib import matplotlib.pyplot as plt # from main import * from PIL import Image route = gpd.read_file( "../output/for_Presentation_留仙洞.geojson" ) points = gpd.GeoDataFrame(route[['RID']].merge(DB_panos, on='RID')) # points.query( "DIR != 0" ).reset_index().to_file( '../output/points_liuxiandong_presetation.geojson', driver="GeoJSON" ) points.query( "DIR != 0", inplace=True ) points.info() def draw_polygon_by_bbox(bbox=[113.93306,22.57437, 113.9383, 22.58037]): # extract the roads of intrest from shapely.geometry import LineString coords = [bbox[:2], [bbox[0], bbox[3]], bbox[2:], [bbox[2], bbox[1]], bbox[:2]] area = gpd.GeoDataFrame( [{'name': 'presetation area', 'geometry':LineString(coords)}] ) return area def get_staticimage(id, heading, folder=pano_dir): file_name = f"{folder}/{id}.jpg" if os.path.exists(file_name):
plt.plot(matrix[0], matrix[1], maker='o') for index, coord in enumerate(matrix[0]): plt.text(coord, matrix[1][index], str(index)) meters = 100 x_original_point = 41.4352 y_x_original_point = 34.5555 mx = x * meters + x_original_point my = y * -meters + y_x_original_point mxy = list(zip(mx, my)) picture_df = gpd.GeoDataFrame({"id": range(0, len(mxy))}, crs="EPSG:3857", geometry=[Point(resu) for resu in mxy]) #picture_df['geometry'] = picture_df['geometry'].to_crs(epsg=4326) picture_df.to_file("route.geojson", driver='GeoJSON', encoding='utf-8') SERVICE = 'https://router.hereapi.com/v8/routes?apiKey=RUT4mbP2bY7ndeADMB8NZJyKfCPZMobqePrHQc6KgpQ&transportMode=pedestrian&return=polyline' file = open('route.geojson') data = geojson.load(file).copy() file.close() coords_list = [ feature['geometry']['coordinates'] for feature in data['features'] ]
def clean_intersections_graph(G, tolerance=15, dead_ends=False): """ Clean-up intersections comprising clusters of nodes by merging them and returning a modified graph. Divided roads are represented by separate centerline edges. The intersection of two divided roads thus creates 4 nodes, representing where each edge intersects a perpendicular edge. These 4 nodes represent a single intersection in the real world. This function cleans them up by buffering their points to an arbitrary distance, merging overlapping buffers, and taking their centroid. For best results, the tolerance argument should be adjusted to approximately match street design standards in the specific street network. Parameters ---------- G : networkx multidigraph tolerance : float nodes within this distance (in graph's geometry's units) will be dissolved into a single intersection dead_ends : bool if False, discard dead-end nodes to return only street-intersection points Returns ---------- Networkx graph with the new aggregated vertices and induced edges """ # if dead_ends is False, discard dead-end nodes to only work with edge # intersections if not dead_ends: if 'streets_per_node' in G.graph: streets_per_node = G.graph['streets_per_node'] else: streets_per_node = count_streets_per_node(G) dead_end_nodes = [ node for node, count in streets_per_node.items() if count <= 1 ] G = G.copy() G.remove_nodes_from(dead_end_nodes) # create a GeoDataFrame of nodes, buffer to passed-in distance, merge # overlaps gdf_nodes, gdf_edges = graph_to_gdfs(G) buffered_nodes = gdf_nodes.buffer(tolerance).unary_union if isinstance(buffered_nodes, Polygon): # if only a single node results, make it iterable so we can turn it into # a GeoSeries buffered_nodes = [buffered_nodes] # Buffer points by tolerance and union the overlapping ones gdf_nodes, gdf_edges = graph_to_gdfs(G) buffered_nodes = gdf_nodes.buffer(15).unary_union unified_intersections = gpd.GeoSeries(list(buffered_nodes)) unified_gdf = gpd.GeoDataFrame(unified_intersections).rename(columns={ 0: 'geometry' }).set_geometry('geometry') unified_gdf.crs = gdf_nodes.crs ### Merge original nodes with the aggregated shapes intersections = gpd.sjoin(gdf_nodes, unified_gdf, how="right", op='intersects') intersections['geometry_str'] = intersections['geometry'].map( lambda x: str(x)) intersections['new_osmid'] = intersections.groupby( 'geometry_str')['index_left'].transform('min').astype(str) intersections['num_osmid_agg'] = intersections.groupby( 'geometry_str')['index_left'].transform('count') ### Create temporary lookup with the agg osmid and the new one lookup = intersections[intersections['num_osmid_agg'] > 1][[ 'osmid', 'new_osmid', 'num_osmid_agg' ]] lookup = lookup.rename(columns={'osmid': 'old_osmid'}) intersections = intersections[intersections['osmid'].astype(str) == intersections['new_osmid']] intersections = intersections.set_index('index_left') ### Make everything else similar to original node df intersections = intersections[gdf_nodes.columns] intersections['geometry'] = intersections.geometry.centroid intersections['x'] = intersections.geometry.x intersections['y'] = intersections.geometry.y del intersections.index.name intersections.gdf_name = gdf_nodes.gdf_name # Replace aggregated osimid with the new ones # 3 cases - 1) none in lookup, 2) either u or v in lookup, 3) u and v in lookup # Ignore case 1. Append case 3 to case 2. ignore distance but append linestring. # removed .astype(str) from merger after u a nd v agg_gdf_edges = pd.merge( gdf_edges.assign(u=gdf_edges.u), lookup.rename(columns={ 'new_osmid': 'new_osmid_u', 'old_osmid': 'old_osmid_u' }), left_on='u', right_on='old_osmid_u', how='left') agg_gdf_edges = pd.merge( agg_gdf_edges.assign(v=agg_gdf_edges.v), lookup.rename(columns={ 'new_osmid': 'new_osmid_v', 'old_osmid': 'old_osmid_v' }), left_on='v', right_on='old_osmid_v', how='left') # Remove all u-v edges that are between the nodes that are aggregated together (case 3) agg_gdf_edges_c3 = agg_gdf_edges[( (agg_gdf_edges['new_osmid_v'].notnull()) & (agg_gdf_edges['new_osmid_u'].notnull()) & (agg_gdf_edges['new_osmid_u'] == agg_gdf_edges['new_osmid_v']))] agg_gdf_edges = agg_gdf_edges[~agg_gdf_edges.index.isin(agg_gdf_edges_c3. index)] # Create a self loop containing all the joint geometries of the aggregated nodes where both u and v are agg # Set onway to false to prevent duplication if someone were to create bidrectional edges agg_gdf_edges_int = agg_gdf_edges_c3[~( (agg_gdf_edges_c3['new_osmid_u'] == agg_gdf_edges_c3['u']) | (agg_gdf_edges_c3['new_osmid_v'] == agg_gdf_edges_c3['v']))] agg_gdf_edges_int = agg_gdf_edges_int.dissolve( by=['new_osmid_u', 'new_osmid_v']).reset_index() agg_gdf_edges_int['u'] = agg_gdf_edges_int['new_osmid_u'] agg_gdf_edges_int['v'] = agg_gdf_edges_int['new_osmid_v'] agg_gdf_edges_int = agg_gdf_edges_int[gdf_edges.columns] agg_gdf_edges_int['oneway'] = False # Simplify by removing edges that do not involve the chosen agg point # at least one of them must contain the new u or new v agg_gdf_edges_c3 = agg_gdf_edges_c3[ (agg_gdf_edges_c3['new_osmid_u'] == agg_gdf_edges_c3['u']) | (agg_gdf_edges_c3['new_osmid_v'] == agg_gdf_edges_c3['v'])] agg_gdf_edges_c3 = agg_gdf_edges_c3[[ 'geometry', 'u', 'v', 'new_osmid_u', 'new_osmid_v' ]] agg_gdf_edges_c3.columns = [ 'old_geometry', 'old_u', 'old_v', 'new_osmid_u', 'new_osmid_v' ] # Merge back the linestring for case 2 # Ignore u and v if they are on the merging / agg node # Copy over the linestring only on the old node subset_gdf = agg_gdf_edges_c3[ agg_gdf_edges_c3['new_osmid_v'] != agg_gdf_edges_c3['old_v']] agg_gdf_edges = pd.merge(agg_gdf_edges, subset_gdf[['old_geometry', 'old_v']], how='left', left_on='u', right_on='old_v') geom = agg_gdf_edges[['geometry', 'old_geometry']].values.tolist() agg_gdf_edges['geometry'] = [ linemerge([r[0], r[1]]) if isinstance(r[1], (LineString, MultiLineString)) else r[0] for r in geom ] agg_gdf_edges.drop(['old_geometry', 'old_v'], axis=1, inplace=True) # If new osmid matches on u, merge in the existing u-v string # where u is the aggregated vertex and v is the old one to be removed subset_gdf = agg_gdf_edges_c3[ agg_gdf_edges_c3['new_osmid_u'] != agg_gdf_edges_c3['old_u']] agg_gdf_edges = pd.merge(agg_gdf_edges, subset_gdf[['old_geometry', 'old_u']], how='left', left_on='v', right_on='old_u') geom = agg_gdf_edges[['geometry', 'old_geometry']].values.tolist() agg_gdf_edges['geometry'] = [ linemerge([r[0], r[1]]) if isinstance(r[1], (LineString, MultiLineString)) else r[0] for r in geom ] agg_gdf_edges.drop(['old_geometry', 'old_u'], axis=1, inplace=True) agg_gdf_edges['u'] = np.where(agg_gdf_edges['new_osmid_u'].notnull(), agg_gdf_edges['new_osmid_u'], agg_gdf_edges['u']) agg_gdf_edges['v'] = np.where(agg_gdf_edges['new_osmid_v'].notnull(), agg_gdf_edges['new_osmid_v'], agg_gdf_edges['v']) agg_gdf_edges = agg_gdf_edges[gdf_edges.columns] agg_gdf_edges = gpd.GeoDataFrame(pd.concat( [agg_gdf_edges, agg_gdf_edges_int], ignore_index=True), crs=agg_gdf_edges.crs) agg_gdf_edges['u'] = agg_gdf_edges['u'].astype(np.int64) agg_gdf_edges['v'] = agg_gdf_edges['v'].astype(np.int64) return gdfs_to_graph(intersections, agg_gdf_edges)