def get_bbox(polys, write=None, prj=None): ''' parameter -polys: GeoDataFrame or GeoSeries -write: filename. If not given will not save file. === return -bbox: GeoDataFrame ''' W, S, E, N = polys.total_bounds bbox = Polygon([(W, S), (W, N), (E, N), (E, S)]) bbox = gpd.GeoDataFrame({'geometry':[bbox]}) if prj: bbox.crs = prj else: bbox.crs=polys.crs if write: bbox.to_file(filename = write) return bbox
def get_country_geometries(country_names=None, extent=None, resolution=10): """Returns a GeoDataFrame with natural earth multipolygons of the specified countries, resp. the parts of the countries that lie within the specified extent. If no arguments are given, simply returns the whole natural earth dataset. Take heed: we assume WGS84 as the CRS unless the Natural Earth download utility from cartopy starts including the projection information. (They are saving a whopping 147 bytes by omitting it.) Same goes for UTF. Parameters: country_names (list, optional): list with ISO3 names of countries, e.g ['ZWE', 'GBR', 'VNM', 'UZB'] extent (tuple, optional): (min_lon, max_lon, min_lat, max_lat) assumed to be in the same CRS as the natural earth data. resolution (float, optional): 10, 50 or 110. Resolution in m. Default: 10m Returns: GeoDataFrame """ resolution = nat_earth_resolution(resolution) shp_file = shapereader.natural_earth(resolution=resolution, category='cultural', name='admin_0_countries') nat_earth = geopandas.read_file(shp_file, encoding='UTF-8') if not nat_earth.crs: nat_earth.crs = NE_CRS if country_names: if isinstance(country_names, str): country_names = [country_names] out = nat_earth[nat_earth.ISO_A3.isin(country_names)] elif extent: bbox = Polygon([ (extent[0], extent[2]), (extent[0], extent[3]), (extent[1], extent[3]), (extent[1], extent[2]) ]) bbox = geopandas.GeoSeries(bbox) bbox.crs = nat_earth.crs bbox = geopandas.GeoDataFrame({'geometry': bbox}) out = geopandas.overlay(nat_earth, bbox, how="intersection") else: out = nat_earth return out
with open('completed.pkl', 'wb') as com_pickle: pickle.dump(completed_rides, com_pickle) rides = pickle.load(open('geo_pickle1.pkl', 'rb')) result = pd.concat(rides) geo_result = geopandas.GeoSeries(result.apply(Point)) counties = geopandas.GeoSeries.from_file('counties.json') counties_bounds = counties.total_bounds four_points = [(counties_bounds[0], counties_bounds[1]), (counties_bounds[0], counties_bounds[3]), (counties_bounds[2], counties_bounds[3]), (counties_bounds[2], counties_bounds[1])] poly = Polygon(p for p in four_points) poly.crs = {'init': 'espg:4326'} counties.crs = {'init': 'epsg:4326'} geo_result.crs = {'init': 'epsg:4326'} geo_result_1 = geo_result.where(geo_result.within(poly)) base = counties.plot(color='white', edgecolor='black') geo_result_1.plot(ax=base, marker='o', color='red', markersize=1) matplotlib.pyplot.show()
'Multi-feature polygon detected. Only the first feature will be used to subset the GEDI data.' ) ROI = ROI.geometry[0] except: print( 'error: unable to read input geojson file or the file was not found' ) sys.exit(2) else: ROI = ROI.replace("'", "") ROI = ROI.split(',') ROI = [float(r) for r in ROI] try: ROI = Polygon([(ROI[1], ROI[0]), (ROI[3], ROI[0]), (ROI[3], ROI[2]), (ROI[1], ROI[2])]) ROI.crs = 'EPSG:4326' except: print( 'error: unable to read input bounding box coordinates, the required format is: ul_lat,ul_lon,lr_lat,lr_lon' ) sys.exit(2) # Keep the exact input geometry for the final clip to ROI finalClip = gp.GeoDataFrame([1], geometry=[ROI], crs='EPSG:4326') # Format and set input/working directory from user-defined arg if args.dir[-1] != '/' and args.dir[-1] != '\\': inDir = args.dir.strip("'").strip('"') + os.sep else: inDir = args.dir
def find_poly(user_analysis,Date_Ini, Date_Fin, shape_folder): if user_analysis == 'no': alldb = firebase.get('coordinatesUser/', None) pending = [] for item in alldb.items(): #itera por la bd usuario = item[0] for item_terreno in item[1].values(): try: if item_terreno['status'] == "Pendiente": pending.append(dict({"user" : usuario , "terrain": item_terreno['uid'], "timestamp" : item_terreno['timestamp'], "name":item_terreno['name']})) except: pass pending = sorted(pending, key = lambda i: i['timestamp'],reverse=True) user_analysis = pending[0]['user']+"/"+pending[0]['terrain'] #name = pending[0]['name'] ''' if (Date_Ini == 'no'): request_date = firebase.get('coordinatesUser/'+user_analysis+'/timestamp', None) #Conseguir fecha time_window = firebase.get('coordinatesUser/'+user_analysis+'/years', None) #Conseguir ventana de tiempo Date_Ini = time.strftime('%Y-%m-%d', time.gmtime((int(request_date)/1000) - (int(time_window))*31536000)) Date_Fin = time.strftime('%Y-%m-%d', time.gmtime(int(request_date)/1000)) ''' result = firebase.get('/coordinatesUser/'+user_analysis+'/Coordenadas', None) name = firebase.get('coordinatesUser/'+user_analysis+'/name', None) #Conseguir name lote lote_aoi = Polygon(result) polygons = [] polygons.append(Polygon(lote_aoi)) lote_aoi = gpd.GeoDataFrame(gpd.GeoSeries(poly for poly in polygons), columns=['geometry']) lote_aoi.crs = {'init':'epsg:4326', 'no_defs': True} #epsg:4326 is standard world coordinates #Conversión de coordenadas especificas locales lote_aoi_loc= lote_aoi.to_crs(32618) lote_aoi_loc["x"] = lote_aoi_loc.centroid.map(lambda p: p.x) lote_aoi_loc["y"] = lote_aoi_loc.centroid.map(lambda p: p.y) lote_aoi["x"] = float(lote_aoi.centroid.map(lambda p: p.x)) lote_aoi["y"] = float(lote_aoi.centroid.map(lambda p: p.y)) minx = float(lote_aoi_loc["x"])- (767.5*10) maxx = float(lote_aoi_loc["x"])+ (767.5*10) miny = float(lote_aoi_loc["y"])- (767.5*10) maxy = float(lote_aoi_loc["y"])+ (767.5*10) lote_aoi_loc["area"] = float(lote_aoi_loc.area) lote_aoi["area"] = lote_aoi_loc["area"] #copy area from local in meters #agregar nombre de lote lote_aoi["name"]=name lote_aoi_loc["name"]=name cols=lote_aoi.columns.tolist() cols=cols[-1:]+cols[:-1] #reorder column names lote_aoi = lote_aoi[cols] lote_aoi_loc = lote_aoi_loc[cols] #Creación del shape respecto a coordenas de vértices analysis_area = user_analysis.split("/")[1] w = shapefile.Writer(shape_folder+analysis_area+'/big_box') w.field('name', 'C') w.poly([ [[minx,miny], [minx,maxy], [maxx,maxy], [maxx,miny], [minx,miny]] ]) w.record('polygon') w.close() #creacion de bounding box respecot a vertices, para cloud_finder inProj = Proj(init='epsg:32618') outProj = Proj(init='epsg:4326') x1,y1 = transform(inProj,outProj,minx,miny) x2,y2 = transform(inProj,outProj,maxx,maxy) bbox_coords_wgs84 = [x1,y2,x2,y1] bounding_box = BBox(bbox_coords_wgs84, crs=CRS.WGS84) return lote_aoi, lote_aoi_loc, minx,maxx,miny,maxy, bounding_box, user_analysis,analysis_area, Date_Ini, Date_Fin