def main(argv): """Go Main""" huc12 = argv[1] pgconn = psycopg2.connect(database='idep') df = read_postgis(""" SELECT ST_Transform(geom, 4326) as geo from huc12 where scenario = 0 and huc_12 = %s """, pgconn, params=(huc12, ), geom_col='geo') row = df.iloc[0] (fig, ax) = plt.subplots(1, 1, figsize=(10.24, 7.68), subplot_kw=dict(projection=ShadedReliefESRI().crs)) padding = 0.01 for poly in row['geo']: bnds = poly.bounds ax.set_extent([bnds[0] - padding, bnds[2] + padding, bnds[1] - padding, bnds[3] + padding]) ax.add_geometries([poly], ccrs.PlateCarree(), edgecolor='k', facecolor='None', lw=2) df = read_postgis(""" SELECT ST_Transform(geom, 4326) as geo, fpath from flowpaths where scenario = 0 and huc_12 = %s """, pgconn, params=(huc12, ), geom_col='geo', index_col='fpath') for fpath, row in df.iterrows(): points = row['geo'].xy ax.plot(points[0], points[1], color='k', lw=2., transform=ccrs.PlateCarree()) # ax.add_image(GoogleTiles(), 4) print(ax.get_extent()) fig.savefig('test.png')
def main(): """Go """ pgconn = get_dbconn('coop', user='******') df = gpd.read_postgis(""" WITH data as ( SELECT station, year, avg((high+low)/2.) as avg_temp, sum(precip) as tot_precip, sum(snow) as tot_snow from alldata_ia where year >= 1998 and year < 2016 and station != 'IA0000' and substr(station, 3, 1) != 'C' GROUP by station, year ), agg as ( SELECT * from data WHERE tot_snow > 0 ) select a.*, t.geom from agg a JOIN stations t on (a.station = t.id) WHERE t.network = 'IACLIMATE' """, pgconn, geom_col='geom', index_col=None) df['cycles'] = 0 for station in tqdm.tqdm(df['station'].unique()): uri = ("http://iem.local/plotting/auto/plot/121/network:IACLIMATE::" "station:%s::thres1:30-32::dpi:100.csv") % (station, ) req = requests.get(uri) ldf = pd.read_csv(StringIO(req.content), index_col='year') ldf['total'] = ldf['30-32f'] + ldf['30-32s'] for year in range(1998, 2016): val = ldf.loc[year]['total'] df.loc[((df['year'] == year) & (df['station'] == station)), 'cycles'] = val for year in range(1998, 2016): df2 = df[df['year'] == year] df2.to_file('iowaclimate_%s.shp' % (year, )) shutil.copyfile('/mesonet/data/gis/meta/4326.prj', 'iowaclimate_%s.prj' % (year, ))
def main(): """Go Main""" pgconn = get_dbconn('postgis') df = read_postgis(""" select geom, issue from bot_warnings where wfo = 'PUB' """, pgconn, geom_col='geom', crs={'init': 'epsg:4326', 'no_defs': True}) bounds = df['geom'].total_bounds # bounds = [-102.90293903, 40.08745967, -97.75622311, 43.35172981] bbuf = 0.25 mp = MapPlot(sector='custom', west=bounds[0] - bbuf, south=bounds[1] - bbuf, east=bounds[2] + bbuf, north=bounds[3] + bbuf, continentalcolor='white', title='Bot Issued Tornado Warnings [2008-2018] for PUB', subtitle='%s warnings plotted' % (len(df.index), )) crs_new = ccrs.Mercator() crs = ccrs.PlateCarree() new_geometries = [crs_new.project_geometry(ii, src_crs=crs) for ii in df['geom'].values] mp.draw_cwas() mp.ax.add_geometries(new_geometries, crs=crs_new, edgecolor='r', facecolor='None', alpha=1., lw=0.5, zorder=10) mp.postprocess(filename='test.png')
def main(): """Go Main""" pgconn = get_dbconn('postgis') df = read_postgis(""" select geom, issue from sbw where wfo = 'PUB' and phenomena = 'TO' and significance = 'W' and status = 'NEW' and issue > '2007-10-01' and issue < '2019-01-01' """, pgconn, geom_col='geom', crs={'init': 'epsg:4326', 'no_defs': True}) bounds = df['geom'].total_bounds # bounds = [-102.90293903, 40.08745967, -97.75622311, 43.35172981] bbuf = 0.25 mp = MapPlot( sector='custom', west=bounds[0] - bbuf, south=bounds[1] - bbuf, east=bounds[2] + bbuf, north=bounds[3] + bbuf, continentalcolor='white', # '#b3242c', title='NWS Pueblo Issued Tornado Warnings [2008-2018]', subtitle='%s warnings plotted' % (len(df.index), )) crs_new = ccrs.Mercator() crs = ccrs.PlateCarree() new_geometries = [crs_new.project_geometry(ii, src_crs=crs) for ii in df['geom'].values] # mp.draw_cwas() mp.ax.add_geometries(new_geometries, crs=crs_new, lw=0.5, edgecolor='red', facecolor='None', alpha=1, zorder=5) mp.drawcounties() mp.postprocess(filename='test.png')
def main(): """Go Main Go.""" pgconn = get_dbconn('idep') df = read_postgis(""" SELECT st_transform(geom, 4326) as geom, huc_12, fpath from flowpaths WHERE scenario = 0 """, pgconn) df.to_file("flowpaths.shp")
def get_geodataframe(queryset, modification=None, crs=austria_mgd): query = queryset.query.sql_with_params() if modification: query = (modification, query[1]) return geopandas.read_postgis(query[0], connection, geom_col='geometry', params=query[1], index_col='id', crs=crs)
def test_read_postgis_default(self): con = connect('test_geopandas') if con is None or not create_db(self.df): raise pytest.skip() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con) finally: con.close() validate_boro_df(df)
def export_csv(form): """ Creates CSV file(s) in the export/csv folder, filename begins with current date.""" import pandas.io.sql as psql import geopandas as gpd date_string = DateMapper.current_date_for_filename() path = app.config['EXPORT_FOLDER_PATH'] + '/csv/' if form.zip.data: path = '/tmp/' + date_string + '_openatlas_csv_export' if os.path.exists(path): shutil.rmtree(path) # pragma: no cover os.makedirs(path) tables = { 'model_class': ['id', 'name', 'code'], 'model_class_inheritance': ['id', 'super_code', 'sub_code'], 'model_entity': ['id', 'name', 'description', 'class_code'], 'model_link': ['id', 'property_code', 'domain_id', 'range_id', 'description'], 'model_link_property': ['id', 'property_code', 'domain_id', 'range_id'], 'model_property': ['id', 'code', 'range_class_code', 'domain_class_code', 'name', 'name_inverse'], 'model_property_inheritance': ['id', 'super_code', 'sub_code'], 'gis_point': ['id', 'entity_id', 'name', 'description', 'type'], 'gis_polygon': ['id', 'entity_id', 'name', 'description', 'type']} for table, fields in tables.items(): if getattr(form, table).data: if form.timestamps.data: fields.append('created') fields.append('modified') if table in ['gis_point'] and form.gis_format.data == 'coordinates': fields.append("ST_X(geom) || ' ' || ST_Y(geom) AS coordinates") if table in ['gis_polygon'] and form.gis_format.data == 'coordinates': fields.append(""" ST_X(public.ST_PointOnSurface(geom)) || ' ' || ST_Y(public.ST_PointOnSurface(geom)) AS polygon_center_point""") elif table in ['gis_point', 'gis_polygon']: fields.append('geom') sql = "SELECT {fields} FROM {table};".format( fields=','.join(fields), table=table.replace('_', '.', 1)) if table in ['gis_point', 'gis_polygon'] and form.gis_format.data == 'wkt': data_frame = gpd.read_postgis(sql, g.db) else: data_frame = psql.read_sql(sql, g.db) file_path = path + '/{date}_{name}.csv'.format(date=date_string, name=table) data_frame.to_csv(file_path, index=False) if form.zip.data: info = 'CSV export from: {host}\n'. format(host=request.headers['Host']) info += 'Created: {date} by {user}\nOpenAtlas version: {version}'.format( date=date_string, user=current_user.username, version=app.config['VERSION']) with open(path + '/info.txt', "w") as file: print(info, file=file) zip_file = app.config['EXPORT_FOLDER_PATH'] + '/csv/' + date_string + '_csv' shutil.make_archive(zip_file, 'zip', path) shutil.rmtree(path) return
def test_read_postgis_default(self): con = tests.util.connect('test_geopandas') if con is None or not tests.util.create_db(self.df): raise unittest.case.SkipTest() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con) finally: con.close() tests.util.validate_boro_df(self, df)
def do_polygon(ctx): """polygon workflow""" pgconn = get_dbconn('postgis') griddelta = 0.02 west = -134 north = 49.5 south = 24.5 east = -60.1 lons = np.arange(west, east, griddelta) lats = np.arange(south, north, griddelta) YSZ = len(lats) XSZ = len(lons) lons, lats = np.meshgrid(lons, lats) affine = Affine(griddelta, 0., west, 0., 0 - griddelta, north) ones = np.ones((int(YSZ), int(XSZ))) counts = np.zeros((int(YSZ), int(XSZ))) df = read_postgis(""" WITH data as ( SELECT ST_Forcerhr(ST_Buffer(geom, 0.0005)) as geom, rank() OVER ( PARTITION by wfo, eventid, extract(year from updated) ORDER by updated) from sbw where phenomena = 'FF' and is_emergency ) select * from data where rank = 1 """, pgconn, geom_col='geom', index_col=None) # print df, sts, ets, west, east, south, north zs = zonal_stats(df['geom'], ones, affine=affine, nodata=-1, all_touched=True, raster_out=True) for i, z in enumerate(zs): aff = z['mini_raster_affine'] mywest = aff.c mynorth = aff.f raster = np.flipud(z['mini_raster_array']) x0 = int((mywest - west) / griddelta) y1 = int((mynorth - south) / griddelta) dy, dx = np.shape(raster) x1 = x0 + dx y0 = y1 - dy if x0 < 0 or x1 >= XSZ or y0 < 0 or y1 >= YSZ: # print raster.mask.shape, west, x0, x1, XSZ, north, y0, y1, YSZ continue counts[y0:y1, x0:x1] += np.where(raster.mask, 0, 1) maxv = np.max(counts) if maxv < 8: bins = np.arange(1, 8, 1) else: bins = np.linspace(1, maxv + 3, 10, dtype='i') ctx['bins'] = bins ctx['data'] = counts ctx['lats'] = lats ctx['lons'] = lons
def dohuc(huc): """Do what we need to do for this huc""" cursor = PGCONN.cursor() zdbf = Dbf5("zst%s.dbf" % (huc, ), codec='utf-8') zst = zdbf.to_dataframe() zst.columns = ['value', 'count', 'area', 'max', 'fpath', 'gridorder'] zst.sort_values(['fpath', 'gridorder'], inplace=True, ascending=True) # print(zst) df = read_postgis(""" SELECT fpath, fid, st_transform(geom, 4326) as geo, huc_12 from flowpaths where huc_12 = %s and scenario = 0 """, PGCONN, params=(huc, ), geom_col='geo', index_col='fpath') for col in ['F0_lon', 'F0_lat', 'F0_elev']: df[col] = None for gorder in range(1, 7): df['G%s_elev' % (gorder, )] = None df['G%s_len' % (gorder, )] = None df['G%s_contribarea' % (gorder, )] = None for ofe in range(1, 18): df['ofe%s_pos' % (ofe, )] = None for fpath, row in df.iterrows(): # 1) lat/lon of 'F0' first point (df.at[fpath, 'F0_lon'], df.at[fpath, 'F0_lat']) = np.asarray(row['geo'].xy)[:, 0] # 3) elevation of F0 and G1 through G6 nodes for gorder in range(1, 7): # Contributing area df.at[fpath, 'G%s_contribarea' % (gorder, )] = find_ca(zst, fpath, gorder) cursor.execute(""" select max(elevation), min(elevation), max(length) from flowpaths p JOIN flowpath_points t on (p.fid = t.flowpath) where p.scenario = %s and huc_12 = %s and fpath = %s """, (SCEN2CODE[gorder], huc, fpath)) row2 = cursor.fetchone() df.at[fpath, 'F0_elev'] = row2[0] # overwrite each time df.at[fpath, 'G%s_elev' % (gorder, )] = row2[1] # 4) horizontal distance from F0 to G1-G6 df.at[fpath, 'G%s_len' % (gorder, )] = row2[2] # 5) OFE positions along this path slpfn = "/i/%s/slp/%s/%s/%s_%s.slp" % (SCEN2CODE[6], huc[:8], huc[8:], huc, fpath) lines = open(slpfn).readlines() ofes = int(lines[5]) pos = 0 for ofe, ln in enumerate(range(7, 7 + ofes * 2, 2)): pos += float(lines[ln].split()[1]) df.at[fpath, "ofe%s_pos" % (ofe + 1, )] = pos del df['geo'] del df['fid'] # 6) Generic properties, perhaps can ignore? return df
def test_read_postgis_custom_geom_col(self): con = connect('test_geopandas') geom_col = "the_geom" if con is None or not create_postgis(self.df, geom_col=geom_col): raise pytest.skip() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con, geom_col=geom_col) finally: con.close() validate_boro_df(df)
def test_read_postgis_custom_geom_col(self): con = tests.util.connect('test_geopandas') if con is None or not tests.util.create_db(self.df): raise unittest.case.SkipTest() try: sql = """SELECT borocode, boroname, shape_leng, shape_area, geom AS __geometry__ FROM nybb;""" df = read_postgis(sql, con, geom_col='__geometry__') finally: con.close() tests.util.validate_boro_df(self, df)
def test_read_postgis_default(self): con = tests.util.connect('test_geopandas') if con is None or not tests.util.create_db(self.df): raise unittest.case.SkipTest() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con) finally: if PANDAS_NEW_SQL_API: # It's not really a connection, it's an engine con = con.connect() con.close() tests.util.validate_boro_df(self, df)
def test_read_postgis_binary(self, df_nybb): """Tests that geometry read as binary is accepted.""" try: con = connect_spatialite() except Exception: raise pytest.skip() else: geom_col = df_nybb.geometry.name create_spatialite(con, df_nybb) sql = 'SELECT ogc_fid, borocode, boroname, shape_leng, shape_area, ST_AsBinary("{0}") AS "{0}" FROM nybb'.format(geom_col) df = read_postgis(sql, con, geom_col=geom_col) validate_boro_df(df) finally: if 'con' in locals(): con.close()
def test_read_postgis_custom_geom_col(self): con = connect('test_geopandas') if con is None or not create_db(self.df): raise pytest.skip() try: sql = """SELECT borocode, boroname, shape_leng, shape_area, geom AS __geometry__ FROM nybb;""" df = read_postgis(sql, con, geom_col='__geometry__') finally: con.close() validate_boro_df(df)
def test_read_postgis_default(self): con = connect('test_geopandas') if con is None or not create_postgis(self.df): raise pytest.skip() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con) finally: con.close() validate_boro_df(df) # no crs defined on the created geodatabase, and none specified # by user; should not be set to 0, as from get_srid failure assert df.crs is None
def geoselect(self, sql, geom_col="geom"): """ Select PostgreSQL / PostGIS data in a GeoPandas GeoDataFrame. """ if self.debug is True: self.info("Geoselect - %s" %sql) gdf = gpd.read_postgis(sql, self.con, geom_col=geom_col) if len(gdf) == 0: self.warning("... Table vide") if self.debug is True: self.info("... Requête executée") return gdf
def test_read_postgis_override_srid(self): """Tests that a user specified CRS overrides the geodatabase SRID.""" orig_crs = self.df.crs created = create_postgis(self.df, srid=4269) con = connect('test_geopandas') if con is None or not created: raise pytest.skip() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con, crs=orig_crs) finally: con.close() validate_boro_df(df) assert(df.crs == orig_crs)
def test_append_before_table_exists(self, engine_postgis, df_nybb): """ Tests that insert works with if_exists='append' when table does not exist yet. """ engine = engine_postgis table = "nybb" # If table exists, delete it before trying to write with defaults drop_table_if_exists(engine, table) write_postgis(df_nybb, con=engine, name=table, if_exists="append") # Check that the row order matches sql = "SELECT * FROM {table};".format(table=table) df = read_postgis(sql, engine, geom_col="geometry") validate_boro_df(df)
def test_read_postgis_binary(self): """Tests that geometry read as binary is accepted.""" try: con = connect_spatialite() except Exception: raise pytest.skip() else: geom_col = self.df.geometry.name create_spatialite(con, self.df) sql = 'SELECT ogc_fid, borocode, boroname, shape_leng, shape_area, ST_AsBinary("{0}") AS "{0}" FROM nybb'.format( geom_col) df = read_postgis(sql, con, geom_col=geom_col) validate_boro_df(df) finally: if 'con' in locals(): con.close()
def test_read_postgis_select_geom_as(self): """Tests that a SELECT {geom} AS {some_other_geom} works.""" con = connect('test_geopandas') orig_geom = "geom" out_geom = "the_geom" if con is None or not create_postgis(self.df, geom_col=orig_geom): raise pytest.skip() try: sql = """SELECT borocode, boroname, shape_leng, shape_area, {} as {} FROM nybb;""".format(orig_geom, out_geom) df = read_postgis(sql, con, geom_col=out_geom) finally: con.close() validate_boro_df(df)
def test_read_postgis_override_srid(self): """Tests that a user specified CRS overrides the geodatabase SRID.""" orig_crs = self.df.crs created = create_postgis(self.df, srid=4269) con = connect('test_geopandas') if con is None or not created: raise pytest.skip() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con, crs=orig_crs) finally: con.close() validate_boro_df(df) assert (df.crs == orig_crs)
def test_write_postgis_replace_when_table_exists(self, engine_postgis, df_nybb): """ Tests that replacing a table is possible when: if_replace='replace'. """ engine = engine_postgis table = "nybb" # Ensure table exists write_postgis(df_nybb, con=engine, name=table, if_exists="replace") # Overwrite write_postgis(df_nybb, con=engine, name=table, if_exists="replace") # Validate sql = "SELECT * FROM {table};".format(table=table) df = read_postgis(sql, engine, geom_col="geometry") validate_boro_df(df)
def get_points_geom(geom): gdf = gpd.read_file(geom) geometry = gdf.geometry.values[0] Session = app.get_persistent_store_database('layers', as_sessionmaker=True) session = Session() int_geom = (wkt.dumps(geometry)) query = session.query(Points).filter( Points.geometry.intersects(int_geom)).statement points_gdf = gpd.read_postgis(sql=query, con=session.bind, geom_col='geometry') points_json = points_gdf.to_json() session.close() return points_json
def test_read_postgis_get_srid(self): """Tests that an SRID can be read from a geodatabase (GH #451).""" crs = {"init": "epsg:4269"} df_reproj = self.df.to_crs(crs) created = create_postgis(df_reproj, srid=4269) con = connect('test_geopandas') if con is None or not created: raise pytest.skip() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con) finally: con.close() validate_boro_df(df) assert(df.crs == crs)
def test_read_postgis_get_srid(self, df_nybb): """Tests that an SRID can be read from a geodatabase (GH #451).""" crs = "epsg:4269" df_reproj = df_nybb.to_crs(crs) created = create_postgis(df_reproj, srid=4269) con = connect("test_geopandas") if con is None or not created: raise pytest.skip() try: sql = "SELECT * FROM nybb;" df = read_postgis(sql, con) finally: con.close() validate_boro_df(df) assert df.crs == crs
def test_read_postgis_null_geom(self, df_nybb): """Tests that geometry with NULL is accepted.""" try: con = connect_spatialite() except Exception: raise pytest.skip() else: geom_col = df_nybb.geometry.name df_nybb.geometry.iat[0] = None create_spatialite(con, df_nybb) sql = 'SELECT ogc_fid, borocode, boroname, shape_leng, shape_area, AsEWKB("{0}") AS "{0}" FROM nybb'.format( geom_col) df = read_postgis(sql, con, geom_col=geom_col) validate_boro_df(df) finally: if 'con' in locals(): con.close()
def main(): """Go Main Go.""" pgconn = get_dbconn("idep") df = read_postgis( """ select f.huc_12, count(*) as fps, st_transform(h.simple_geom, 4326) as geo from flowpaths f JOIN huc12 h on (f.huc_12 = h.huc_12) WHERE f.scenario = 0 and h.scenario = 0 GROUP by f.huc_12, geo ORDER by fps ASC """, pgconn, index_col=None, geom_col="geo", ) bins = np.arange(1, 42, 2) cmap = plt.get_cmap("copper") cmap.set_over("white") cmap.set_under("thistle") norm = mpcolors.BoundaryNorm(bins, cmap.N) mp = MapPlot( continentalcolor="thistle", nologo=True, sector="custom", south=36.8, north=45.0, west=-99.2, east=-88.9, subtitle="", title=("DEP HUCs with <40 Flowpaths (%.0f/%.0f %.2f%%)" % ( len(df[df["fps"] < 40].index), len(df.index), len(df[df["fps"] < 40].index) / len(df.index) * 100.0, )), ) for _i, row in df.iterrows(): c = cmap(norm([row["fps"]]))[0] arr = np.asarray(row["geo"].exterior) points = mp.ax.projection.transform_points(ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec="None", zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.draw_colorbar(bins, cmap, norm, title="Count") mp.postprocess(filename="/tmp/huc12_cnts.png")
def importSHP_from_DB(self, tableName): ## DB DATA IMPORT sql = (f"select * from {tableName}") # engine = create_engine(f"postgresql://{user}:{password}@localhost:{port}/{database}") SHP = gpd.read_postgis(sql, self.connector, geom_col="geom", crs=None, index_col=None, coerce_float=True, parse_dates=None, params=None, chunksize=None) SHP.to_file(f'/home/mateus/Desktop/Shapefile tratado/{tableName}', driver='ESRI Shapefile', schema=None, index=None) return
def find_5near_stations(lon, lat): """ Find 5 closest Indego Bike Stations. """ engine = get_sql_engine() bikestation5 = text( """ SELECT name, "addressStreet" as address, "bikesAvailable" as available_bikes, geom, ST_X(geom) as lon, ST_Y(geom)as lat, ST_Distance(ST_SetSRID(ST_MakePoint(:lon, :lat), 4326)::geography, geom::geography) AS distance FROM indego_rt1130 ORDER BY 7 ASC LIMIT 5 """ ) near_bike = gpd.read_postgis(bikestation5, con=engine, params={"lon": lon, "lat": lat}) return near_bike
def find_5near_takeouts(lon, lat): """ Find 5 closest Chinese Takeouts. """ engine = get_sql_engine() ctakeouts5 = text( """ SELECT "NAME" as name, "ADDRESS" as address, geom, ST_X(geom) as lon, ST_Y(geom)as lat, ST_Distance(ST_SetSRID(ST_MakePoint(:lon, :lat), 4326)::geography, geom::geography) AS distance FROM chinese_takeout ORDER BY 6 ASC LIMIT 5 """ ) near_takeouts = gpd.read_postgis(ctakeouts5, con=engine, params={"lon": lon, "lat": lat}) return near_takeouts
def get_layer_csv(layer_name, layer_type): Session = app.get_persistent_store_database('layers', as_sessionmaker=True) session = Session() if layer_type == 'points': query = session.query(Points).filter(Points.layer_name == layer_name).statement else: query = session.query(Polygons).filter(Polygons.layer_name == layer_name).statement layer_gdf = gpd.read_postgis(sql=query, con=session.bind, geom_col='geometry') layer_df = json_normalize(layer_gdf['attr_dict']) final_df = pd.concat([layer_gdf, layer_df], axis=1).drop(['attr_dict', 'id'], axis=1) session.close() return final_df
def get_neighborhood_buildings(nname): """Get all buildings for a neighborhood""" engine = get_sql_engine() vacant_buildings = text(""" SELECT "ADDRESS" as address, "BLDG_DESC" as building_description, "OPA_ID" as opa_id, v.geom as geom FROM vacant_buildings as v JOIN philadelphia_neighborhoods as n ON ST_Intersects(v.geom, n.geom) WHERE n.neighborhood_name = :nname """) buildings = gpd.read_postgis(vacant_buildings, con=engine, params={"nname": nname}) return buildings
def get_values(self, wkt_filter=None): where_clause = "WHERE location IS NOT NULL" if wkt_filter is not None: where_clause += \ " AND ST_Intersects(location, " \ "ST_GeomFromEWKT('SRID=4326;{}'))".format( wkt_filter ) sql = "SELECT * FROM {}.{} {};".format( settings.NIAMOTO_DIMENSIONS_SCHEMA, self.name, where_clause) with Connector.get_connection() as connection: df = gpd.read_postgis( sql, connection, index_col='id', geom_col='location', ) return df
def get_geojson( organization_id: int, db: Session = Depends(get_db), ) -> Dict: """ generate geojson from organization """ organization_in_db = organization.get(db, id=organization_id) if not organization_in_db: raise HTTPException(status_code=404, detail="Organization not found") sql = f"SELECT * FROM public.tree WHERE organization_id = {organization_in_db.id}" df = gpd.read_postgis(sql, db.bind) data = df.to_json() response = json.loads(data) return response
def test_read_postgis_custom_geom_col(self): con = connect('test_geopandas') if con is None or not create_db(self.df): raise unittest.case.SkipTest() try: sql = """SELECT borocode, boroname, shape_leng, shape_area, geom AS __geometry__ FROM nybb;""" df = read_postgis(sql, con, geom_col='__geometry__') finally: if PANDAS_NEW_SQL_API: # It's not really a connection, it's an engine con = con.connect() con.close() validate_boro_df(self, df)
def handler(begints, endts, wfo, only_new, ph): """Handler""" pgconn = get_dbconn("postgis") begints = begints.replace(tzinfo=timezone.utc) endts = endts.replace(tzinfo=timezone.utc) params = {"begints": begints, "endts": endts} wfolimiter = "" statuslimiter = "" phlimiter = "" if ph is not None: params["ph"] = tuple(ph) phlimiter = "AND phenomena IN :ph " if wfo is not None: params["wfo"] = tuple(wfo) wfolimiter = " and wfo in :wfo " if only_new: statuslimiter = " and status = 'NEW' " df = read_postgis( text( f""" SELECT issue at time zone 'UTC' as utc_issue, expire at time zone 'UTC' as utc_expire, polygon_begin at time zone 'UTC' as utc_polygon_begin, polygon_end at time zone 'UTC' as utc_polygon_end, w.phenomena || '.' || w.significance as ph_sig, w.wfo, eventid, phenomena, significance, null as nws_color, null as event_label, status, geom from sbw w WHERE w.polygon_begin >= :begints and w.polygon_begin < :endts {wfolimiter} {statuslimiter} {phlimiter} ORDER by w.polygon_begin ASC """ ), pgconn, geom_col="geom", params=params, index_col=None, ) df["nws_color"] = df["ph_sig"].apply(NWS_COLORS.get) df["event_label"] = df["ph_sig"].apply( lambda x: get_ps_string(*x.split(".")) ) return df
def find_5near_hospitals(lon, lat): """ Find 5 closest hospitals. """ engine = get_sql_engine() hospital5 = text( """ SELECT "HOSPITAL_NAME" AS name, "STREET_ADDRESS" as address, "PHONE_NUMBER" as contact, geom, ST_X(geom) AS lon, ST_Y(geom) AS lat, ST_Distance(ST_SetSRID(ST_MakePoint(:lon, :lat), 4326)::geography, geom::geography) AS distance FROM philly_hospital ORDER BY 7 ASC LIMIT 5 """ ) near_hospital = gpd.read_postgis(hospital5, con=engine, params={"lon": lon, "lat": lat}) return near_hospital
def import_donnes_base(ref_connexion): """ OUvrir une connexion vers le servuer de reference et recuperer les données en entree : ref_connexion : string de reference de la connexion, selon le midule Outils , fichier Id_connexions, et module Connexion_transferts en sortie : df : dataframe telle que telchargées depuis la bdd """ with ct.ConnexionBdd('local_otv') as c: requete = """with jointure as ( select t.*, v1.cnt nb_intrsct_src, st_astext(v1.the_geom) as src_geom, v2.cnt as nb_intrsct_tgt, st_astext(v2.the_geom) as tgt_geom from public.traf2015_bdt17_ed15_l t left join public.traf2015_bdt17_ed15_l_vertices_pgr v1 on t.source=v1.id left join public.traf2015_bdt17_ed15_l_vertices_pgr v2 on t.target=v2.id ) select j.* from jointure j, zone_test_agreg z where st_intersects(z.geom, j.geom)""" df = gp.read_postgis(requete, c.connexionPsy) return df
def get_target_fields(): target_fields = gpd.read_postgis('select * from target_fields;', engine, geom_col='geometry') logger.debug("target_fields:\n%s", target_fields) target_fields = target_fields[target_fields.caption.map( lambda caption: caption is None or ('пар' not in caption))] # logger.debug("target_fields.head(20).to_string():\n%s", target_fields.head(20).to_string()) target_fields.drop(columns=['class_number', 'flregion'], inplace=True) logger.debug("target_fields.head(20).to_string():\n%s", target_fields.head(20).to_string()) logger.debug("target_fields.shape: %s", target_fields.shape) # for column in target_fields.columns: # unique_vals = target_fields[column].unique() # logger.debug("%s: %s", column, len(unique_vals)) # if len(unique_vals) < 20: # for unique_val in unique_vals: # logger.debug("unique_val: %s", unique_val) return target_fields
def get_fields_data(): fields_data = gpd.read_postgis('select * from geometry_yield_by_year;', engine, geom_col='geometry') logger.debug("fields_data:\n%s", fields_data.head(20).to_string()) fields_data = fields_data[(fields_data.year >= 2016) & (fields_data.year <= 2019)].copy() fields_data = fields_data[ (fields_data.cult == 'Пшеница мягкая яровая (Triticum aestivum L)') | (fields_data.cult == 'Пшеница твердая яровая (Triticum durum Desf)' )].copy() fields_data: pd.DataFrame = fields_data[ (fields_data.region == 'Костанайская область') | (fields_data.region == 'Северо-Казахстанская область') | (fields_data.region == 'Акмолинская область')].copy() # describe(fields_data) fields_data.dropna(subset=['yield'], inplace=True) logger.debug("fields_data:\n%s", fields_data) return fields_data
def find_5near_markets(lon, lat): """ Find 5 closest farmers markets. """ engine = get_sql_engine() fmarkets5 = text( """ SELECT "NAME" as name, "ADDRESS" as address, "TIME" as time, geom, ST_X(geom) as lon, ST_Y(geom)as lat, ST_Distance(ST_SetSRID(ST_MakePoint(:lon, :lat), 4326)::geography, geom::geography) AS distance FROM farmers_markets ORDER BY 7 ASC LIMIT 5 """ ) near_markets = gpd.read_postgis(fmarkets5, con=engine, params={"lon": lon, "lat": lat}) return near_markets
def main(argv): """Go!""" sts = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]), int(argv[5])) ets = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[6]), int(argv[7])) pgconn = get_dbconn('nwa') df = read_postgis(""" SELECT to_char(issue at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') as utc_issue, to_char(expire at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') as utc_expire, geom, case when emergency then 'T' else 'F' end as emergency, team from nwa_warnings WHERE issue >= %s and issue < %s """, pgconn, params=(sts, ets), geom_col='geom') df.to_file('workshop%s.shp' % (sts.year, )) print("Wrote %s records to shapefile" % (len(df.index), ))
def handler(pe, duration, days): """Handle the request, return dict""" pgconn = get_dbconn("iem") sql = f""" WITH data as ( SELECT c.station, c.valid, c.value, ST_x(geom) as lon, ST_Y(geom) as lat, geom, row_number() OVER (PARTITION by c.station) from current_shef c JOIN stations s on (c.station = s.id) WHERE physical_code = '{pe}' and duration = '{duration}' and valid >= now() - '{days} days'::interval and value > -9999 ) SELECT station, to_char(valid at time zone 'UTC', 'YYYY-MM-DDThh24:MI:SSZ') as utc_valid, value, lon, lat, geom from data where row_number = 1 """ df = read_postgis(sql, pgconn, geom_col="geom") return df
def get_water_bodies(db, case_study_name, values=['water', 'wetland']): """Get the geometries with `water` or `wetland` values for the key `natural` in the OSM database for a given case study. """ query = f""" SELECT osm_polygon.way AS geom, osm_polygon.natural AS value FROM osm_polygon, datafusion WHERE osm_polygon.natural IN ('water', 'wetland') AND datafusion.name = '{case_study_name}' AND ST_Intersects(osm_polygon.way, datafusion.geom) """ water_bodies = gpd.read_postgis(query, db) water_bodies.crs = {'init': 'epsg:4326'} return water_bodies
def main(): query = "select id as geo_id,type as geo_type,geom_valid from movoto.geographic_boundary where type='CITY' and geom_valid is not null" geo_df = gpd.read_postgis(query, con=engine, geom_col="geom_valid", crs='epsg:4326') # geo_df = pd.read_parquet("./geo.parquet") print("Geography Boundary data Loaded", "Moving For Transformation") # geo_df.rename(columns={"geom": "geom_valid", "id": "geo_id", "type": "geo_type"}, inplace=True) print("Geography Boundary Transformed") print(geo_df.dtypes) # process(geo_df.iloc[0:1000, :]) parallelProcessing(geo_df, num_processes) end = time.time() total_time = str(((end - start) / 60)) print( "#######################################################################################################################################################################" ) print("Total Time Taken: ", total_time + " minutes")
def test_write_postgis_default(self, df_nybb): """Tests that GeoDataFrame can be written to PostGIS with defaults.""" engine = connect_engine("test_geopandas") if engine is None: raise pytest.skip() table = "nybb" # If table exists, delete it before trying to write with defaults drop_table_if_exists(engine, table) try: # Write to db write_postgis(df_nybb, con=engine, name=table, if_exists="fail") # Validate sql = "SELECT * FROM {table};".format(table=table) df = read_postgis(sql, engine, geom_col="geometry") validate_boro_df(df) finally: engine.dispose()
def get_zipcode_stations(add): """Get all stations for a zipcode""" name=get_zipcode_names(add) engine = get_sql_engine() neighborhood_stations = text( """ SELECT "name" as name, "addressStreet" as address, "bikesAvailable" as available_bikes, v.geom as geom, ST_X(v.geom) as lon, ST_Y(v.geom)as lat FROM indego_rt1130 as v JOIN philly_zipcode as n ON ST_Intersects(v.geom, n.geom) WHERE n.code = :name """ ) stations = gpd.read_postgis(neighborhood_stations, con=engine, params={"name": name}) return stations
def test_write_postgis_replace_when_table_exists(self, df_nybb): """ Tests that replacing a table is possible when: if_replace='replace'. """ engine = connect_engine("test_geopandas") if engine is None: raise pytest.skip() table = "nybb" try: write_postgis(df_nybb, con=engine, name=table, if_exists="replace") # Validate sql = "SELECT * FROM {table};".format(table=table) df = read_postgis(sql, engine, geom_col="geometry") validate_boro_df(df) except ValueError as e: raise e finally: engine.dispose()
def read_spatialite(sql, conn, geom_col='geometry', crs=None, index_col=None, coerce_float=True, params=None, db_path=None): """ Wrap :py:func:`geopandas.read_postgis()` and allow to read from spatialite. Returns ------- gdf: GeoDataframe Exemple ------- >>> # With a connection object (conn) already instancied : >>> gdf = read_spatialite("SELECT PK_UID, pop_t, gdp FROM countries", conn, geom_col="GEOM") >>> # Without being already connected to the database : >>> gdf = read_spatialite("SELECT PK_UID, pop_t, gdp FROM countries", None, geom_col="GEOM", db_path='/home/mthh/tmp/db.sqlite') """ from geopandas import read_postgis if '*' in sql: raise ValueError('Column names have to be specified') if not conn and db_path: conn = db_connect(db_path) elif not conn: raise ValueError( 'A connection object or a path to the DB have to be provided') if sql.lower().find('select') == 0 and sql.find(' ') == 6: sql = sql[:7] \ + "HEX(ST_AsBinary({0})) as {0}, ".format(geom_col) + sql[7:] else: raise ValueError( 'Unable to understand the query') return read_postgis( sql, conn, geom_col=geom_col, crs=crs, index_col=index_col, coerce_float=coerce_float, params=params )
def plotIndicadorSemiarido(indicator_id, title, pessimist): f, ax = plt.subplots(1, figsize=(20, 20), frameon=False) ax.set_axis_off() plt.axis('off') f.set_size_inches(160, 160) values = gpd.read_postgis(sqlvalues.format(indicator_id), conn, geom_col='geom', coerce_float=False) if len(values) == 0: return gpd.plotting.plot_polygon_collection(ax=ax, geoms=values['geom'], color=values['color'], linewidth=8.0, edgecolor='#d7d7d7') minx, miny, maxx, maxy = values.total_bounds r = 1.5 # ax.set_title(title, fontsize=20) # f.suptitle(title, fontsize=20) dx = maxx - minx dy = maxy - miny ax.set_xlim(minx - dx / r, maxx + dx / r) ax.set_ylim(miny - dy / r, maxy + dy / r) # legenda = ax.get_legend() # ax.legend(title='Legenda') fname = r'd:\temp\sismoi\{2}-{0}_{1}.png'.format( title.replace(' ', '_').replace('/', '_'), '1-verde' if pessimist == 0.0 else '1-vermelho', indicator_id) f.savefig(fname, facecolor='#8fc8ff', pad_inches=0, bbox_inches='tight', dpi=dpi) plt.close(f) print('{0} salvo.'.format(fname)) crop(fname, (10, 225, 1500, 1175), fname)
def geo_select(self, sql, geom_col='geom'): """ Like select function put also extract geometry in WKT thanks to GeoPandas. NOTE: Output geometry column is always renamed geom. :return: Pandas DataFrame """ con = pg.connect(self.conn_string) geo_dataframe = gpd.read_postgis(sql, con, geom_col=geom_col) # Rename geometry column if geom_col != "geom": geo_dataframe.rename(columns={geom_col: "geom"}, inplace=True) geo_dataframe.set_geometry('geom', inplace=True) # Must declare the new geometry field. # Find the srid of the geometry field and define it in the GeoDataFrame sql2 = "select ST_SRID(%(geom_column)s) from (%(original_query)s) as a limit 1;" data = {"geom_column": AsIs(geom_col), "original_query": AsIs(sql.replace(";", ""))} srid = psql.read_sql(sql2, con, params=data).values[0][0] geo_dataframe.crs = {'init': 'epsg:%s' % srid, 'no_defs': True} con.close() return geo_dataframe
def main(): """Go Main Go.""" pgconn = get_dbconn('idep') df = read_postgis(""" select f.huc_12, count(*) as fps, st_transform(h.simple_geom, 4326) as geo from flowpaths f JOIN huc12 h on (f.huc_12 = h.huc_12) WHERE f.scenario = 0 and h.scenario = 0 GROUP by f.huc_12, geo ORDER by fps ASC """, pgconn, index_col=None, geom_col='geo') bins = np.arange(1, 42, 2) cmap = plt.get_cmap('copper') cmap.set_over('white') cmap.set_under('thistle') norm = mpcolors.BoundaryNorm(bins, cmap.N) mp = MapPlot( continentalcolor='thistle', nologo=True, sector='custom', south=36.8, north=45.0, west=-99.2, east=-88.9, subtitle='', title=('DEP HUCs with <40 Flowpaths (%.0f/%.0f %.2f%%)' % ( len(df[df['fps'] < 40].index), len(df.index), len(df[df['fps'] < 40].index) / len(df.index) * 100. ))) for _i, row in df.iterrows(): c = cmap(norm([row['fps'], ]))[0] arr = np.asarray(row['geo'].exterior) points = mp.ax.projection.transform_points( ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec='None', zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.draw_colorbar( bins, cmap, norm, title='Count') mp.postprocess(filename='/tmp/huc12_cnts.png')
import psycopg2 from geopandas import read_postgis from pandas.io.sql import read_sql years = 8.0 pgconn = psycopg2.connect(database='idep', host='iemdb', user='******') # Get the initial geometries df = read_postgis(""" SELECT huc_12, geom from huc12 WHERE states ~* 'IA' and scenario = 0 """, pgconn, index_col='huc_12', crs='EPSG:5070') titles2 = {0: 'base', 7: '4yr', 9: '3yr', 10: '4yrnt', # notill 11: '3yrnt'} # notill for scenario in titles2.keys(): df2 = read_sql(""" SELECT r.huc_12, sum(avg_loss) * 4.463 / %s as detach, sum(avg_delivery) * 4.463 / %s as delivery, sum(avg_runoff) / 25.4 / %s as runoff from results_by_huc12 r , huc12 h WHERE r.huc_12 = h.huc_12 and h.states ~* 'IA' and r.scenario = %s and h.scenario = 0 and r.valid < '2016-01-01' and r.valid > '2008-01-01' GROUP by r.huc_12 """, pgconn, params=(years, years, years, scenario), index_col='huc_12') p = titles2[scenario]
nocaption=True, title='DEP %s %s' % (V2NAME[v], title), caption='Daily Erosion Project') # Check that we have data for this date! cursor.execute(""" SELECT value from properties where key = 'last_date_0' """) lastts = datetime.datetime.strptime(cursor.fetchone()[0], '%Y-%m-%d') floor = datetime.date(2007, 1, 1) df = read_postgis(""" WITH data as ( SELECT huc_12, sum("""+v+""") as d from results_by_huc12 WHERE scenario = %s and valid >= %s and valid <= %s GROUP by huc_12) SELECT ST_Transform(simple_geom, 4326) as geo, coalesce(d.d, 0) as data from huc12 i LEFT JOIN data d ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s and i.states ~* 'IA' """, pgconn, params=(scenario, ts, ts2, scenario), geom_col='geo', index_col=None) df['data'] = df['data'] * V2MULTI[v] if df['data'].max() < 0.01: bins = [0.01, 0.02, 0.03, 0.04, 0.05] else: bins = V2RAMP[v] norm = mpcolors.BoundaryNorm(bins, cmap.N) patches = [] #m.ax.add_geometries(df['geo'], ccrs.PlateCarree()) for i, row in df.iterrows():
def main(argv): """Go Main Go""" v = argv[1] agg = argv[2] ts = datetime.date(2008, 1, 1) ts2 = datetime.date(2017, 12, 31) scenario = 0 # suggested for runoff and precip if V2UNITS[v] in ['mm', 'inches']: colors = ['#ffffa6', '#9cf26d', '#76cc94', '#6399ba', '#5558a1'] # suggested for detachment elif v in ['avg_loss', 'avg_loss_metric']: colors = ['#cbe3bb', '#c4ff4d', '#ffff4d', '#ffc44d', '#ff4d4d', '#c34dee'] # suggested for delivery elif v in ['avg_delivery', 'avg_delivery_metric']: colors = ['#ffffd2', '#ffff4d', '#ffe0a5', '#eeb74d', '#ba7c57', '#96504d'] cmap = mpcolors.ListedColormap(colors, 'james') cmap.set_under('white') cmap.set_over('black') pgconn = get_dbconn('idep') title = "for %s" % (ts.strftime("%-d %B %Y"),) if ts != ts2: title = "between %s and %s" % (ts.strftime("%-d %b %Y"), ts2.strftime("%-d %b %Y")) mp = MapPlot(axisbg='#EEEEEE', nologo=True, sector='iowa', nocaption=True, title=("DEP %s %s %s" ) % (V2NAME[v.replace("_metric", "")], "Yearly Average" if agg == 'avg' else 'Total', title), caption='Daily Erosion Project') df = read_postgis(""" WITH data as ( SELECT huc_12, extract(year from valid) as yr, sum(""" + v.replace("_metric", "") + """) as d from results_by_huc12 WHERE scenario = %s and valid >= %s and valid <= %s GROUP by huc_12, yr), agg as ( SELECT huc_12, """ + agg + """(d) as d from data GROUP by huc_12) SELECT ST_Transform(simple_geom, 4326) as geo, coalesce(d.d, 0) as data from huc12 i LEFT JOIN agg d ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s and i.states ~* 'IA' """, pgconn, params=(scenario, ts, ts2, scenario), geom_col='geo', index_col=None) df['data'] = df['data'] * V2MULTI[v] if df['data'].max() < 0.01: bins = [0.01, 0.02, 0.03, 0.04, 0.05] else: bins = np.array(V2RAMP[v]) * (10. if agg == 'sum' else 1.) norm = mpcolors.BoundaryNorm(bins, cmap.N) # m.ax.add_geometries(df['geo'], ccrs.PlateCarree()) for _, row in df.iterrows(): c = cmap(norm([row['data'], ]))[0] arr = np.asarray(row['geo'].exterior) points = mp.ax.projection.transform_points(ccrs.Geodetic(), arr[:, 0], arr[:, 1]) p = Polygon(points[:, :2], fc=c, ec='k', zorder=2, lw=0.1) mp.ax.add_patch(p) mp.drawcounties() mp.drawcities() lbl = [round(_, 2) for _ in bins] u = "%s, Avg: %.2f" % (V2UNITS[v], df['data'].mean()) mp.draw_colorbar(bins, cmap, norm, clevlabels=lbl, units=u, title="%s :: %s" % (V2NAME[v], V2UNITS[v])) plt.savefig('%s_%s_%s%s.png' % (ts.year, ts2.year, v, "_sum" if agg == 'sum' else ''))