def del_topoerror_shps(db, shps, epsg, outfolder): """ Remove topological errors from Feature Class data using PostGIS """ import os from glass.pys import obj_to_lst from glass.ng.prop.sql import cols_name from glass.ng.sql.q import q_to_ntbl from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp shps = obj_to_lst(shps) TABLES = shp_to_psql(db, shps, srsEpsgCode=epsg, api="shp2pgsql") NTABLE = [q_to_ntbl( db, "nt_{}".format(t), "SELECT {cols}, ST_MakeValid({tbl}.geom) AS geom FROM {tbl}".format( cols = ", ".join(["{}.{}".format(TABLES[t], x) for x in cols_name( db, TABLES[t], sanitizeSpecialWords=None ) if x != 'geom']), tbl=TABLES[t] ), api='psql' ) for t in range(len(TABLES))] for t in range(len(NTABLE)): dbtbl_to_shp(db, NTABLE[t], "geom", os.path.join( outfolder, TABLES[t]), tableIsQuery=None, api='pgsql2shp')
def st_dissolve(db, table, geomColumn, outTable, whrClause=None, diss_cols=None, outTblIsFile=None, api='sqlite'): """ Dissolve a Polygon table API options: * sqlite * psql """ from glass.pys import obj_to_lst diss_cols = obj_to_lst(diss_cols) if diss_cols else None geomcol = "geometry" if api == 'sqlite' else 'geom' sql = ("SELECT{selCols} ST_UnaryUnion(ST_Collect({geom})) AS {gout} " "FROM {tbl}{whr}{grpBy}").format( selCols="" if not diss_cols else " {},".format(", ".join(diss_cols)), geom=geomColumn, tbl=table, whr="" if not whrClause else " WHERE {}".format(whrClause), grpBy="" if not diss_cols else " GROUP BY {}".format( ", ".join(diss_cols)), gout=geomcol) if outTblIsFile: if api == 'sqlite': from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, sql, outTable, api_gis='ogr') elif api == 'psql': from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp(db, table, geomColumn, outTable, api='pgsql2shp', tableIsQuery=True) else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTable, sql, api='ogr2ogr' if api == 'sqlite' else 'psql') return outTable
def st_buffer(db, inTbl, bfDist, geomCol, outTbl, bufferField="geometry", whrClause=None, dissolve=None, cols_select=None, outTblIsFile=None): """ Using Buffer on PostGIS Data """ from glass.pys import obj_to_lst dissolve = obj_to_lst(dissolve) if dissolve != "ALL" else "ALL" SEL_COLS = "" if not cols_select else ", ".join(obj_to_lst(cols_select)) DISS_COLS = "" if not dissolve or dissolve == "ALL" else ", ".join( dissolve) GRP_BY = "" if not dissolve else "{}, {}".format(SEL_COLS, DISS_COLS) if \ SEL_COLS != "" and DISS_COLS != "" else SEL_COLS \ if SEL_COLS != "" else DISS_COLS if DISS_COLS != "" else "" Q = ( "SELECT{sel}{spFunc}{geom}, {_dist}{endFunc} AS {bf} " "FROM {t}{whr}{grpBy}" ).format( sel = " " if not cols_select else " {}, ".format(SEL_COLS), spFunc="ST_Buffer(" if not dissolve else \ "ST_UnaryUnion(ST_Collect(ST_Buffer(", geom=geomCol, _dist=bfDist, endFunc=")" if not dissolve else ")))", t=inTbl, grpBy=" GROUP BY {}".format(GRP_BY) if GRP_BY != "" else "", whr="" if not whrClause else " WHERE {}".format(whrClause), bf=bufferField ) if not outTblIsFile: from glass.ng.sql.q import q_to_ntbl outTbl = q_to_ntbl(db, outTbl, Q, api='psql') else: from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp(db, Q, bufferField, outTbl, api='pgsql2shp', tableIsQuery=True) return outTbl
def remove_deadend(inShp, outShp, db=None): """ Remove deadend """ from glass.pys.oss import fprop from glass.ng.sql.db import create_db from glass.g.it.db import shp_to_psql from glass.g.gp.cln.sql import rm_deadend from glass.g.it.shp import dbtbl_to_shp # Create DB if not db: db = create_db(fprop(inShp, 'fn', forceLower=True), api='psql') else: from glass.ng.prop.sql import db_exists isDb = db_exists(db) if not isDb: create_db(db, api='psql') # Send data to Database inTbl = shp_to_psql(db, inShp, api="shp2pgsql", encoding="LATIN1") # Produce result out_tbl = rm_deadend(db, inTbl, fprop( outShp, 'fn', forceLower=True)) # Export result return dbtbl_to_shp( db, out_tbl, "geom", outShp, inDB='psql', tableIsQuery=None, api="pgsql2shp" )
def break_lines_on_points(lineShp, pntShp, outShp, lnhidonpnt, api='shply', db=None): """ Break lines on points location api's available: - shply (shapely); - psql (postgis); """ if api == 'shply': result = shply_break_lines_on_points(lineShp, pntShp, lnhidonpnt, outShp) elif api == 'psql': from glass.pys.oss import fprop from glass.ng.sql.db import create_db from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp from glass.g.gp.brk.sql import split_lines_on_pnt # Create DB if not db: db = create_db(fprop(lineShp, 'fn', forceLower=True), api='psql') else: from glass.ng.prop.sql import db_exists isDb = db_exists(db) if not isDb: db = create_db(db, api='psql') # Send Data to BD lnhTbl = shp_to_psql(db, lineShp, api="shp2pgsql") pntTbl = shp_to_psql(db, pntShp, api="shp2pgsql") # Get result outTbl = split_lines_on_pnt(db, lnhTbl, pntTbl, fprop(outShp, 'fn', forceLower=True), lnhidonpnt, 'gid') # Export result result = dbtbl_to_shp(db, outTbl, "geom", outShp, inDB='psql', tableIsQuery=None, api="pgsql2shp") else: raise ValueError("API {} is not available".format(api)) return result
def check_autofc_overlap(checkShp, epsg, dbname, outOverlaps): """ Check if the features of one Feature Class overlaps each other """ import os from glass.ng.sql.db import create_db from glass.ng.sql.q import q_to_ntbl from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp create_db(dbname, api='psql') # Send data to postgresql table = shp_to_psql(dbname, checkShp, srsEpsgCode=epsg, api="pandas") # Produce result q = ( "SELECT foo.* FROM (" "SELECT * FROM {t}" ") AS foo, (" "SELECT cat AS relcat, geom AS tst_geom FROM {t}" ") AS foo2 " "WHERE (" "ST_Overlaps(geom, tst_geom) IS TRUE OR " "ST_Equals(geom, tst_geom) IS TRUE OR " "ST_Contains(geom, tst_geom) IS TRUE" ") AND cat <> relcat" ).format(t=table) resultTable = os.path.splitext(os.path.basename(outOverlaps))[0] q_to_ntbl(dbname, resultTable, q, api='psql') dbtbl_to_shp( dbname, resultTable, "geom", outOverlaps, api='psql', epsg=epsg) return outOverlaps
def line_intersect_to_pnt(inShp, outShp, db=None): """ Get Points where two line features of the same feature class intersects. """ from glass.pys.oss import fprop from glass.g.it.shp import dbtbl_to_shp from glass.ng.sql.db import create_db from glass.g.it.db import shp_to_psql from glass.g.gp.ovl.sql import line_intersection_pnt # Create DB if necessary if not db: db = create_db(fprop(inShp, 'fn', forceLower=True), api='psql') else: from glass.ng.prop.sql import db_exists isDb = db_exists(db) if not isDb: create_db(db, api='psql') # Send data to DB inTbl = shp_to_psql(db, inShp, api="shp2pgsql") # Get result outTbl = line_intersection_pnt(db, inTbl, fprop(outShp, 'fn', forceLower=True)) # Export data from DB outShp = dbtbl_to_shp(db, outTbl, "geom", outShp, inDB='psql', tableIsQuery=None, api="pgsql2shp") return outShp
def proj(inShp, outShp, outEPSG, inEPSG=None, gisApi='ogr', sql=None, db_name=None): """ Project Geodata using GIS API's Available: * ogr; * ogr2ogr; * pandas; * ogr2ogr_SQLITE; * psql; """ import os if gisApi == 'ogr': """ Using ogr Python API """ if not inEPSG: raise ValueError( 'To use ogr API, you should specify the EPSG Code of the' ' input data using inEPSG parameter') from osgeo import ogr from glass.g.lyr.fld import copy_flds from glass.g.prop.feat import get_gtype from glass.g.prop import drv_name from glass.g.prop.prj import get_sref_from_epsg, get_trans_param from glass.pys.oss import fprop def copyShp(out, outDefn, lyr_in, trans): for f in lyr_in: g = f.GetGeometryRef() g.Transform(trans) new = ogr.Feature(outDefn) new.SetGeometry(g) for i in range(0, outDefn.GetFieldCount()): new.SetField( outDefn.GetFieldDefn(i).GetNameRef(), f.GetField(i)) out.CreateFeature(new) new.Destroy() f.Destroy() # ####### # # Project # # ####### # transP = get_trans_param(inEPSG, outEPSG) inData = ogr.GetDriverByName(drv_name(inShp)).Open(inShp, 0) inLyr = inData.GetLayer() out = ogr.GetDriverByName(drv_name(outShp)).CreateDataSource(outShp) outlyr = out.CreateLayer(fprop(outShp, 'fn'), get_sref_from_epsg(outEPSG), geom_type=get_gtype(inShp, name=None, py_cls=True, gisApi='ogr')) # Copy fields to the output copy_flds(inLyr, outlyr) # Copy/transform features from the input to the output outlyrDefn = outlyr.GetLayerDefn() copyShp(outlyr, outlyrDefn, inLyr, transP) inData.Destroy() out.Destroy() elif gisApi == 'ogr2ogr': """ Transform SRS of any OGR Compilant Data. Save the transformed data in a new file """ if not inEPSG: from glass.g.prop.prj import get_shp_epsg inEPSG = get_shp_epsg(inShp) if not inEPSG: raise ValueError('To use ogr2ogr, you must specify inEPSG') from glass.pys import execmd from glass.g.prop import drv_name cmd = ('ogr2ogr -f "{}" {} {}{} -s_srs EPSG:{} -t_srs EPSG:{}').format( drv_name(outShp), outShp, inShp, '' if not sql else ' -dialect sqlite -sql "{}"'.format(sql), str(inEPSG), str(outEPSG)) outcmd = execmd(cmd) elif gisApi == 'ogr2ogr_SQLITE': """ Transform SRS of a SQLITE DB table. Save the transformed data in a new table """ from glass.pys import execmd if not inEPSG: raise ValueError( ('With ogr2ogr_SQLITE, the definition of inEPSG is ' 'demandatory.')) # TODO: Verify if database is sqlite db, tbl = inShp['DB'], inShp['TABLE'] sql = 'SELECT * FROM {}'.format(tbl) if not sql else sql outcmd = execmd( ('ogr2ogr -update -append -f "SQLite" {db} -nln "{nt}" ' '-dialect sqlite -sql "{_sql}" -s_srs EPSG:{inepsg} ' '-t_srs EPSG:{outepsg} {db}').format(db=db, nt=outShp, _sql=sql, inepsg=str(inEPSG), outepsg=str(outEPSG))) elif gisApi == 'pandas': # Test if input Shp is GeoDataframe from glass.g.rd.shp import shp_to_obj from glass.g.wt.shp import df_to_shp df = shp_to_obj(inShp) # Project df newDf = df.to_crs('EPSG:{}'.format(str(outEPSG))) # Save as file return df_to_shp(df, outShp) elif gisApi == 'psql': from glass.ng.sql.db import create_db from glass.pys.oss import fprop from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp from glass.g.prj.sql import sql_proj # Create Database if not db_name: db_name = create_db(fprop(outShp, 'fn', forceLower=True), api='psql') else: from glass.ng.prop.sql import db_exists isDb = db_exists(db_name) if not isDb: create_db(db_name, api='psql') # Import Data inTbl = shp_to_psql(db_name, inShp, api='shp2pgsql', encoding="LATIN1") # Transform oTbl = sql_proj(db_name, inTbl, fprop(outShp, 'fn', forceLower=True), outEPSG, geomCol='geom', newGeom='geom') # Export outShp = dbtbl_to_shp(db_name, oTbl, 'geom', outShp, api='psql', epsg=outEPSG) else: raise ValueError('Sorry, API {} is not available'.format(gisApi)) return outShp
def grs_vec_roads(osmdb, lineTbl, polyTbl): """ Select Roads for GRASS GIS """ import datetime from glass.ng.prop.sql import row_num from glass.g.it.shp import dbtbl_to_shp from glass.g.gp.prox.bfing import _buffer from glass.g.gp.gen import dissolve from glass.g.tbl.grs import add_table # Roads to GRASS GIS time_a = datetime.datetime.now().replace(microsecond=0) NR = row_num(osmdb, lineTbl, where="roads IS NOT NULL", api='sqlite') time_b = datetime.datetime.now().replace(microsecond=0) if not NR: return None, {0: ('count_rows_roads', time_b - time_a)} roadsVect = dbtbl_to_shp(osmdb, lineTbl, "geometry", "all_roads", where="roads IS NOT NULL", inDB='sqlite', outShpIsGRASS=True) time_c = datetime.datetime.now().replace(microsecond=0) # Buildings to GRASS GIS NB = row_num(osmdb, polyTbl, where="building IS NOT NULL", api='sqlite') time_d = datetime.datetime.now().replace(microsecond=0) if NB: from glass.g.gp.prox import grs_near as near from glass.g.tbl.grs import update_table builds = dbtbl_to_shp(osmdb, polyTbl, "geometry", "all_builds", where="building IS NOT NULL", filterByReg=True, inDB='sqlite', outShpIsGRASS=True) time_e = datetime.datetime.now().replace(microsecond=0) near(roadsVect, builds, nearDistCol="todist", maxDist=12, as_cmd=True) time_f = datetime.datetime.now().replace(microsecond=0) update_table(roadsVect, "bf_roads", "round(todist,0)", "\"todist > 0\"", lyrN=1, ascmd=True) time_g = datetime.datetime.now().replace(microsecond=0) else: time_e = None time_f = None time_g = None # Run Buffer tool roadsBf = _buffer(roadsVect, "bf_roads", "bf_roads", api='grass', geom_type="line") time_h = datetime.datetime.now().replace(microsecond=0) # Dissolve Roads roadsDiss = dissolve(roadsBf, "diss_roads", "roads", api="grass") add_table(roadsDiss, None, lyrN=1, asCMD=True) time_i = datetime.datetime.now().replace(microsecond=0) return roadsDiss, { 0: ('count_rows_roads', time_b - time_a), 1: ('import_roads', time_c - time_b), 2: ('count_rows_build', time_d - time_c), 3: None if not time_e else ('import_builds', time_e - time_d), 4: None if not time_f else ('near_analysis', time_f - time_e), 5: None if not time_g else ('update_buffer_tbl', time_g - time_f), 6: ('buffer_roads', time_h - time_g if time_g else time_h - time_d), 7: ('diss_roads', time_i - time_h) }
def grs_rst_roads(osmdb, lineTbl, polyTbl, dataFolder, LULC_CLS): """ Raster Roads for GRASS """ import datetime from glass.g.it.shp import dbtbl_to_shp from glass.g.dp.torst import grsshp_to_grsrst as shp_to_rst from glass.g.gp.prox.bfing.sql import splite_buffer from glass.ng.prop.sql import row_num time_a = datetime.datetime.now().replace(microsecond=0) NR = row_num(osmdb, lineTbl, where="roads IS NOT NULL", api='sqlite') time_b = datetime.datetime.now().replace(microsecond=0) if not NR: return None, {0: ('count_rows_roads', time_b - time_a)} roadFile = splite_buffer( osmdb, lineTbl, "bf_roads", "geometry", 'bfu_roads', #os.path.join(dataFolder, 'bf_roads.gml'), whrClause="roads IS NOT NULL", outTblIsFile=None, dissolve="ALL") time_c = datetime.datetime.now().replace(microsecond=0) #roadGrs = shp_to_grs(roadFile, "bf_roads", filterByReg=True, asCMD=True) roadGrs = dbtbl_to_shp(osmdb, roadFile, "geom", 'bf_roads', notTable=True, outShpIsGRASS=True, inDB='sqlite') time_d = datetime.datetime.now().replace(microsecond=0) roadRst = shp_to_rst(roadGrs, int(LULC_CLS), "rst_roads", cmd=True) time_e = datetime.datetime.now().replace(microsecond=0) # Builds to GRASS and to RASTER NB = row_num(osmdb, polyTbl, where="building IS NOT NULL", api='sqlite') time_f = datetime.datetime.now().replace(microsecond=0) if NB: from glass.g.rst.alg import rstcalc from glass.g.rst.rcls import set_null, null_to_value buildsShp = dbtbl_to_shp(osmdb, polyTbl, "geom", "all_builds", where="building IS NOT NULL", notTable=True, outShpIsGRASS=True, inDB='sqlite') time_g = datetime.datetime.now().replace(microsecond=0) buildsRst = shp_to_rst(buildsShp, 1, "rst_builds", cmd=True) time_h = datetime.datetime.now().replace(microsecond=0) # Buildings to nodata | Nodata to 0 null_to_value(buildsRst, 0, as_cmd=True) time_i = datetime.datetime.now().replace(microsecond=0) set_null(buildsRst, 1, ascmd=True) time_j = datetime.datetime.now().replace(microsecond=0) # Do the math: roads + builds | if builds and roads at the same cell # cell will be null in the road layer roadsRes = rstcalc("{} + {}".format(roadRst, buildsRst), "cls_roads", api="grass") time_l = datetime.datetime.now().replace(microsecond=0) return { LULC_CLS: roadsRes }, { 0: ('count_rows_roads', time_b - time_a), 1: ('buffer_roads', time_c - time_b), 2: ('import_roads', time_d - time_c), 3: ('roads_to_rst', time_e - time_d), 4: ('count_build', time_f - time_e), 5: ('builds_to_grs', time_g - time_f), 6: ('builds_to_rst', time_h - time_g), 7: ('bnull_to_val', time_i - time_h), 8: ('builds_to_nd', time_j - time_i), 9: ('roads_build_mc', time_l - time_j) } else: return { LULC_CLS: roadRst }, { 0: ('count_rows_roads', time_b - time_a), 1: ('buffer_roads', time_c - time_b), 2: ('import_roads', time_d - time_c), 3: ('roads_to_rst', time_e - time_d), 4: ('count_build', time_f - time_e) }
def v_break_at_points(workspace, loc, lineShp, pntShp, db, srs, out_correct, out_tocorrect): """ Break lines at points - Based on GRASS GIS v.edit Use PostGIS to sanitize the result TODO: Confirm utility Problem: GRASS GIS always uses the first line to break. """ import os from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop from glass.ng.sql.db import create_db from glass.ng.sql.q import q_to_ntbl tmpFiles = os.path.join(workspace, loc) gbase = run_grass(workspace, location=loc, srs=srs) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, workspace, loc, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp grsLine = shp_to_grs(lineShp, fprop(lineShp, 'fn', forceLower=True)) vedit_break(grsLine, pntShp, geomType='line') LINES = grs_to_shp(grsLine, os.path.join(tmpFiles, grsLine + '_v1.shp'), 'line') # Sanitize output of v.edit.break using PostGIS create_db(db, overwrite=True, api='psql') LINES_TABLE = shp_to_psql(db, LINES, srsEpsgCode=srs, pgTable=fprop(LINES, 'fn', forceLower=True), api="shp2pgsql") # Delete old/original lines and stay only with the breaked one Q = ("SELECT {t}.*, foo.cat_count FROM {t} INNER JOIN (" "SELECT cat, COUNT(cat) AS cat_count, " "MAX(ST_Length(geom)) AS max_len " "FROM {t} GROUP BY cat" ") AS foo ON {t}.cat = foo.cat " "WHERE foo.cat_count = 1 OR foo.cat_count = 2 OR (" "foo.cat_count = 3 AND ST_Length({t}.geom) <= foo.max_len)").format( t=LINES_TABLE) CORR_LINES = q_to_ntbl(db, "{}_corrected".format(LINES_TABLE), Q, api='psql') # TODO: Delete Rows that have exactly the same geometry # Highlight problems that the user must solve case by case Q = ("SELECT {t}.*, foo.cat_count FROM {t} INNER JOIN (" "SELECT cat, COUNT(cat) AS cat_count FROM {t} GROUP BY cat" ") AS foo ON {t}.cat = foo.cat " "WHERE foo.cat_count > 3").format(t=LINES_TABLE) ERROR_LINES = q_to_ntbl(db, "{}_not_corr".format(LINES_TABLE), Q, api='psql') dbtbl_to_shp(db, CORR_LINES, "geom", out_correct, api="pgsql2shp") dbtbl_to_shp(db, ERROR_LINES, "geom", out_tocorrect, api="pgsql2shp")
def feat_within(db, inTbl, inGeom, withinTbl, withinGeom, outTbl, inTblCols=None, withinCols=None, outTblIsFile=None, apiToUse='OGR_SPATIALITE', geom_col=None): """ Get Features within other Geometries in withinTbl e.g. Intersect points with Polygons apiToUse options: * OGR_SPATIALITE; * POSTGIS. """ from glass.pys import obj_to_lst if not inTblCols and not withinCols: colSelect = "intbl.*, witbl.*" else: if inTblCols and not withinCols: colSelect = ", ".join([ "intbl.{}".format(c) for c in obj_to_lst(inTblCols) ]) elif not inTblCols and withinCols: colSelect = ", ".join([ "witbl.{}".format(c) for c in obj_to_lst(withinCols) ]) else: colSelect = "{}, {}".format( ", ".join(["intbl.{}".format(c) for c in obj_to_lst(inTblCols)]), ", ".join(["witbl.{}".format(c) for c in obj_to_lst(withinCols)]) ) Q = ( "SELECT {selcols} FROM {in_tbl} AS intbl " "INNER JOIN {within_tbl} AS witbl ON " "ST_Within(intbl.{in_geom}, witbl.{wi_geom})" ).format( selcols=colSelect, in_tbl=inTbl, within_tbl=withinTbl, in_geom=inGeom, wi_geom=withinGeom ) if apiToUse == "OGR_SPATIALITE": if outTblIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, Q, outTbl, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='ogr2ogr') elif apiToUse == 'POSTGIS': if outTblIsFile: if not geom_col: raise ValueError(( "To export a PostGIS table to file, geom_col " "must be specified!" )) from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp( db, Q, geom_col, outTbl, api="pgsql2shp", tableIsQuery=True) else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='psql') else: raise ValueError(( "API {} is not available. OGR_SPATIALITE and POSTGIS " "are the only valid options" )) return outTbl
def dsn_data_collection_by_multibuffer(inBuffers, workspace, db, datasource, keywords=None): """ Extract Digital Social Network Data for each sub-buffer in buffer. A sub-buffer is a buffer with a radius equals to the main buffer radius /2 and with a central point at North, South, East, West, Northeast, Northwest, Southwest and Southeast of the main buffer central point. inBuffers = { "lisbon" : { 'x' : -89004.994779, # in meters 'y' : -102815.866054, # in meters 'radius' : 10000, 'epsg' : 3763 }, "london : { 'x' : -14210.551441, # in meters 'y' : 6711542.47559, # in meters 'radius' : 10000, 'epsg' : 3857 } } or inBuffers = { "lisbon" : { "path" : /path/to/file.shp, "epsg" : 3763 } } keywords = ['flood', 'accident', 'fire apartment', 'graffiti', 'homeless'] datasource = 'facebook' or datasource = 'flickr' TODO: Only works for Flickr and Facebook """ import os from osgeo import ogr from glass.pys import obj_to_lst from glass.ng.sql.db import create_db from glass.ng.sql.q import q_to_ntbl from glass.g.wt.sql import df_to_db from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp from glass.g.gp.prox.bfing import get_sub_buffers, dic_buffer_array_to_shp if datasource == 'flickr': from glass.g.acq.dsn.flickr import photos_location elif datasource == 'facebook': from glass.g.acq.dsn.fb.places import places_by_query keywords = obj_to_lst(keywords) keywords = ["None"] if not keywords else keywords # Create Database to Store Data create_db(db, overwrite=True, api='psql') for city in inBuffers: # Get Smaller Buffers if "path" in inBuffers[city]: # Get X, Y and Radius from glass.g.prop.feat.bf import bf_prop __bfprop = bf_prop(inBuffers[city]["path"], inBuffers[city]["epsg"], isFile=True) inBuffers[city]["x"] = __bfprop["X"] inBuffers[city]["y"] = __bfprop["Y"] inBuffers[city]["radius"] = __bfprop["R"] inBuffers[city]["list_buffer"] = [{ 'X': inBuffers[city]["x"], 'Y': inBuffers[city]["y"], 'RADIUS': inBuffers[city]['radius'], 'cardeal': 'major' }] + get_sub_buffers(inBuffers[city]["x"], inBuffers[city]["y"], inBuffers[city]["radius"]) # Smaller Buffers to File multiBuffer = os.path.join(workspace, 'buffers_{}.shp'.format(city)) dic_buffer_array_to_shp(inBuffers[city]["list_buffer"], multiBuffer, inBuffers[city]['epsg'], fields={'cardeal': ogr.OFTString}) # Retrive data for each keyword and buffer # Record these elements in one dataframe c = None tblData = None for bf in inBuffers[city]["list_buffer"]: for k in keywords: if datasource == 'flickr': tmpData = photos_location( bf, inBuffers[city]["epsg"], keyword=k if k != 'None' else None, epsg_out=inBuffers[city]["epsg"], onlySearchAreaContained=False) elif datasource == 'facebook': tmpData = places_by_query( bf, inBuffers[city]["epsg"], keyword=k if k != 'None' else None, epsgOut=inBuffers[city]["epsg"], onlySearchAreaContained=False) if type(tmpData) == int: print("NoData finded for buffer '{}' and keyword '{}'". format(bf['cardeal'], k)) continue tmpData["keyword"] = k tmpData["buffer_or"] = bf["cardeal"] if not c: tblData = tmpData c = 1 else: tblData = tblData.append(tmpData, ignore_index=True) inBuffers[city]["data"] = tblData # Get data columns names cols = inBuffers[city]["data"].columns.values dataColumns = [ c for c in cols if c != 'geom' and c != 'keyword' \ and c != 'buffer_or' and c != 'geometry' ] # Send data to PostgreSQL if 'geometry' in cols: cgeom = 'geometry' else: cgeom = 'geom' inBuffers[city]["table"] = 'tbldata_{}'.format(city) df_to_db(db, inBuffers[city]["data"], inBuffers[city]["table"], api='psql', epsg=inBuffers[city]["epsg"], geomType='POINT', colGeom=cgeom) # Send Buffers data to PostgreSQL inBuffers[city]["pg_buffer"] = shp_to_psql( db, multiBuffer, pgTable='buffers_{}'.format(city), api="shp2pgsql", srsEpsgCode=inBuffers[city]["epsg"]) inBuffers[city]["filter_table"] = q_to_ntbl( db, "filter_{}".format(inBuffers[city]["table"]), ("SELECT srcdata.*, " "array_agg(buffersg.cardeal ORDER BY buffersg.cardeal) " "AS intersect_buffer FROM (" "SELECT {cols}, keyword, geom, " "array_agg(buffer_or ORDER BY buffer_or) AS extracted_buffer " "FROM {pgtable} " "GROUP BY {cols}, keyword, geom" ") AS srcdata, (" "SELECT cardeal, geom AS bfg FROM {bftable}" ") AS buffersg " "WHERE ST_Intersects(srcdata.geom, buffersg.bfg) IS TRUE " "GROUP BY {cols}, keyword, geom, extracted_buffer").format( cols=", ".join(dataColumns), pgtable=inBuffers[city]["table"], bftable=inBuffers[city]["pg_buffer"]), api='psql') inBuffers[city]["outside_table"] = q_to_ntbl( db, "outside_{}".format(inBuffers[city]["table"]), ("SELECT * FROM (" "SELECT srcdata.*, " "array_agg(buffersg.cardeal ORDER BY buffersg.cardeal) " "AS not_intersect_buffer FROM (" "SELECT {cols}, keyword, geom, " "array_agg(buffer_or ORDER BY buffer_or) AS extracted_buffer " "FROM {pgtable} " "GROUP BY {cols}, keyword, geom" ") AS srcdata, (" "SELECT cardeal, geom AS bfg FROM {bftable}" ") AS buffersg " "WHERE ST_Intersects(srcdata.geom, buffersg.bfg) IS NOT TRUE " "GROUP BY {cols}, keyword, geom, extracted_buffer" ") AS foo WHERE array_length(not_intersect_buffer, 1) = 9" ).format(cols=", ".join(dataColumns), pgtable=inBuffers[city]["table"], bftable=inBuffers[city]["pg_buffer"]), api='psql') # Union these two tables inBuffers[city]["table"] = q_to_ntbl( db, "data_{}".format(city), ("SELECT * FROM {intbl} UNION ALL " "SELECT {cols}, keyword, geom, extracted_buffer, " "CASE WHEN array_length(not_intersect_buffer, 1) = 9 " "THEN '{array_symbol}' ELSE not_intersect_buffer END AS " "intersect_buffer FROM {outbl}").format( intbl=inBuffers[city]["filter_table"], outbl=inBuffers[city]["outside_table"], cols=", ".join(dataColumns), array_symbol='{' + '}'), api='psql') """ Get Buffers table with info related: -> pnt_obtidos = nr pontos obtidos usando esse buffer -> pnt_obtidos_fora = nt pontos obtidos fora desse buffer, mas obtidos com ele -> pnt_intersect = nt pontos que se intersectam com o buffer -> pnt_intersect_non_obtain = nr pontos que se intersectam mas nao foram obtidos como buffer """ inBuffers[city]["pg_buffer"] = q_to_ntbl( db, "dt_{}".format(inBuffers[city]["pg_buffer"]), ("SELECT main.*, get_obtidos.pnt_obtidos, " "obtidos_fora.pnt_obtidos_fora, intersecting.pnt_intersect, " "int_not_obtained.pnt_intersect_non_obtain " "FROM {bf_table} AS main " "LEFT JOIN (" "SELECT gid, cardeal, COUNT(gid) AS pnt_obtidos " "FROM {bf_table} AS bf " "INNER JOIN {dt_table} AS dt " "ON bf.cardeal = ANY(dt.extracted_buffer) " "GROUP BY gid, cardeal" ") AS get_obtidos ON main.gid = get_obtidos.gid " "LEFT JOIN (" "SELECT gid, cardeal, COUNT(gid) AS pnt_obtidos_fora " "FROM {bf_table} AS bf " "INNER JOIN {dt_table} AS dt " "ON bf.cardeal = ANY(dt.extracted_buffer) " "WHERE ST_Intersects(bf.geom, dt.geom) IS NOT TRUE " "GROUP BY gid, cardeal" ") AS obtidos_fora ON main.gid = obtidos_fora.gid " "LEFT JOIN (" "SELECT gid, cardeal, COUNT(gid) AS pnt_intersect " "FROM {bf_table} AS bf " "INNER JOIN {dt_table} AS dt " "ON bf.cardeal = ANY(dt.intersect_buffer) " "GROUP BY gid, cardeal" ") AS intersecting ON main.gid = intersecting.gid " "LEFT JOIN (" "SELECT gid, cardeal, COUNT(gid) AS pnt_intersect_non_obtain " "FROM {bf_table} AS bf " "INNER JOIN {dt_table} AS dt " "ON bf.cardeal = ANY(dt.intersect_buffer) " "WHERE NOT (bf.cardeal = ANY(dt.extracted_buffer)) " "GROUP BY gid, cardeal" ") AS int_not_obtained " "ON main.gid = int_not_obtained.gid " "ORDER BY main.gid").format(bf_table=inBuffers[city]["pg_buffer"], dt_table=inBuffers[city]["table"]), api='psql') """ Get Points table with info related: -> nobtido = n vezes um ponto foi obtido -> obtido_e_intersect = n vezes um ponto foi obtido usando um buffer com o qual se intersecta -> obtido_sem_intersect = n vezes um ponto foi obtido usando um buffer com o qual nao se intersecta -> nintersect = n vezes que um ponto se intersecta com um buffer -> intersect_sem_obtido = n vezes que um ponto nao foi obtido apesar de se intersectar com o buffer """ inBuffers[city]["table"] = q_to_ntbl( db, "info_{}".format(city), ("SELECT {cols}, dt.keyword, dt.geom, " "CAST(dt.extracted_buffer AS text) AS extracted_buffer, " "CAST(dt.intersect_buffer AS text) AS intersect_buffer, " "array_length(extracted_buffer, 1) AS nobtido, " "SUM(CASE WHEN ST_Intersects(bf.geom, dt.geom) IS TRUE " "THEN 1 ELSE 0 END) AS obtido_e_intersect, " "(array_length(extracted_buffer, 1) - SUM(" "CASE WHEN ST_Intersects(bf.geom, dt.geom) IS TRUE " "THEN 1 ELSE 0 END)) AS obtido_sem_intersect, " "array_length(intersect_buffer, 1) AS nintersect, " "(array_length(intersect_buffer, 1) - SUM(" "CASE WHEN ST_Intersects(bf.geom, dt.geom) IS TRUE " "THEN 1 ELSE 0 END)) AS intersect_sem_obtido " "FROM {dt_table} AS dt " "INNER JOIN {bf_table} AS bf " "ON bf.cardeal = ANY(dt.extracted_buffer) " "GROUP BY {cols}, dt.keyword, dt.geom, " "dt.extracted_buffer, dt.intersect_buffer").format( dt_table=inBuffers[city]["table"], bf_table=inBuffers[city]["pg_buffer"], cols=", ".join(["dt.{}".format(x) for x in dataColumns])), api='psql') # Export Results dbtbl_to_shp(db, inBuffers[city]["table"], 'geom', os.path.join(workspace, "{}.shp".format(inBuffers[city]["table"])), api='psql', epsg=inBuffers[city]["epsg"]) dbtbl_to_shp(db, inBuffers[city]["pg_buffer"], 'geom', os.path.join( workspace, "{}.shp".format(inBuffers[city]["pg_buffer"])), api='psql', epsg=inBuffers[city]["epsg"]) return inBuffers
def dsnsearch_by_cell(GRID_PNT, EPSG, RADIUS, DATA_SOURCE, db, OUTPUT_TABLE): """ Search for data in DSN and other platforms by cell """ import time from glass.g.rd.shp import shp_to_obj from glass.ng.sql.db import create_db from glass.g.acq.dsn.fb.places import places_by_query from glass.g.prj.obj import df_prj from glass.ng.pd import merge_df from glass.g.it.shp import dbtbl_to_shp from glass.ng.sql.q import q_to_ntbl from glass.g.wt.sql import df_to_db # Open GRID SHP GRID_DF = shp_to_obj(GRID_PNT) GRID_DF = df_prj(GRID_DF, 4326) if EPSG != 4326 else GRID_DF GRID_DF["lng"] = GRID_DF.geometry.x.astype(float) GRID_DF["lat"] = GRID_DF.geometry.y.astype(float) GRID_DF["grid_id"] = GRID_DF.index # GET DATA RESULTS = [] def get_data(row, datasrc): if datasrc == 'facebook': d = places_by_query({ 'x': row.lng, 'y': row.lat, 'r': RADIUS }, 4326, keyword=None, epsgOut=EPSG, _limit='100', onlySearchAreaContained=None) else: raise ValueError( '{} as datasource is not a valid value'.format(datasrc)) if type(d) == int: return d['grid_id'] = row.grid_id RESULTS.append(d) time.sleep(5) GRID_DF.apply(lambda x: get_data(x, DATA_SOURCE), axis=1) RT = merge_df(RESULTS) # Create DB create_db(db, overwrite=True, api='psql') # Send Data to PostgreSQL df_to_db(db, RT, "{}_data".format(DATA_SOURCE), EPSG, "POINT", colGeom='geometry' if 'geometry' in RT.columns.values else 'geom') COLS = [ x for x in RT.columns.values if x != "geometry" and \ x != 'geom' and x != "grid_id" ] + ["geom"] GRP_BY_TBL = q_to_ntbl( db, "{}_grpby".format(DATA_SOURCE), ("SELECT {cols}, CAST(array_agg(grid_id) AS text) AS grid_id " "FROM {dtsrc}_data GROUP BY {cols}").format(cols=", ".join(COLS), dtsrc=DATA_SOURCE), api='psql') dbtbl_to_shp(db, GRP_BY_TBL, "geom", OUTPUT_TABLE, api="psql", epsg=EPSG) return OUTPUT_TABLE
def feat_not_within(db, inTbl, inGeom, withinTbl, withinGeom, outTbl, inTblCols=None, outTblIsFile=None, apiToUse='OGR_SPATIALITE', geom_col=None): """ Get features not Within with any of the features in withinTbl apiToUse options: * OGR_SPATIALITE; * POSTGIS. """ from glass.pys import obj_to_lst Q = ( "SELECT {selCols} FROM {tbl} AS in_tbl WHERE (" "in_tbl.{in_geom} NOT IN (" "SELECT inin_tbl.{in_geom} FROM {wi_tbl} AS wi_tbl " "INNER JOIN {tbl} AS inin_tbl ON " "ST_Within(wi_tbl.{wi_geom}, inin_tbl.{in_geom})" "))" ).format( selCols = "*" if not inTblCols else ", ".join(obj_to_lst(inTblCols)), tbl = inTbl, in_geom = inGeom, wi_tbl = withinTbl, wi_geom = withinGeom ) if apiToUse == "OGR_SPATIALITE": if outTblIsFile: from glass.g.tbl.filter import sel_by_attr sel_by_attr(db, Q, outTbl, api_gis='ogr') else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='ogr2ogr') elif apiToUse == "POSTGIS": if outTblIsFile: if not geom_col: raise ValueError(( "To export a PostGIS table to file, " "geom_col must be specified" )) from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp( db, Q, geom_col, outTbl, api='pgsql2shp', tableIsQuery=True ) else: from glass.ng.sql.q import q_to_ntbl q_to_ntbl(db, outTbl, Q, api='psql') else: raise ValueError(( "API {} is not available. OGR_SPATIALITE and POSTGIS " "are the only valid options" )) return outTbl
def lnh_to_polygons(inShp, outShp, api='saga', db=None): """ Line to Polygons API's Available: * saga; * grass; * pygrass; * psql; """ if api == 'saga': """ http://www.saga-gis.org/saga_tool_doc/7.0.0/shapes_polygons_3.html Converts lines to polygons. Line arcs are closed to polygons simply by connecting the last point with the first. Optionally parts of polylines can be merged into one polygon optionally. """ from glass.pys import execmd rcmd = execmd(("saga_cmd shapes_polygons 3 -POLYGONS {} " "LINES {} -SINGLE 1 -MERGE 1").format(outShp, inShp)) elif api == 'grass' or api == 'pygrass': # Do it using GRASS GIS import os from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop # Create GRASS GIS Session wk = os.path.dirname(outShp) lo = fprop(outShp, 'fn', forceLower=True) gs = run_grass(wk, lo, srs=inShp) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gs, wk, lo, 'PERMANENT') # Import Packages from glass.g.it.shp import shp_to_grs, grs_to_shp # Send data to GRASS GIS lnh_shp = shp_to_grs(inShp, fprop(inShp, 'fn', forceLower=True), asCMD=True if api == 'grass' else None) # Build Polylines pol_lnh = line_to_polyline(lnh_shp, "polylines", asCmd=True if api == 'grass' else None) # Polyline to boundary bound = geomtype_to_geomtype(pol_lnh, 'bound_shp', 'line', 'boundary', cmd=True if api == 'grass' else None) # Boundary to Area areas_shp = boundary_to_areas(bound, lo, useCMD=True if api == 'grass' else None) # Export data outShp = grs_to_shp(areas_shp, outShp, 'area', asCMD=True if api == 'grass' else None) elif api == 'psql': """ Do it using PostGIS """ from glass.pys.oss import fprop from glass.ng.sql.db import create_db from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp from glass.g.dp.cg.sql import lnh_to_polg from glass.g.prop.prj import get_shp_epsg # Create DB if not db: db = create_db(fprop(inShp, 'fn', forceLower=True), api='psql') else: from glass.ng.prop.sql import db_exists isDB = db_exists(db) if not isDB: create_db(db, api='psql') # Send data to DB in_tbl = shp_to_psql(db, inShp, api="shp2pgsql") # Get Result result = lnh_to_polg(db, in_tbl, fprop(outShp, 'fn', forceLower=True)) # Export Result outshp = dbtbl_to_shp(db, result, "geom", outShp, api='psql', epsg=get_shp_epsg(inShp)) else: raise ValueError("API {} is not available".format(api)) return outShp
def shps_to_shp(shps, outShp, api="ogr2ogr", fformat='.shp', dbname=None): """ Get all features in several Shapefiles and save them in one file api options: * ogr2ogr; * psql; * pandas; * psql; * grass; """ import os if type(shps) != list: # Check if is dir if os.path.isdir(shps): from glass.pys.oss import lst_ff # List shps in dir shps = lst_ff(shps, file_format=fformat) else: raise ValueError(( 'shps should be a list with paths for Feature Classes or a path to ' 'folder with Feature Classes' )) if api == "ogr2ogr": from glass.pys import execmd from glass.g.prop import drv_name out_drv = drv_name(outShp) # Create output and copy some features of one layer (first in shps) cmdout = execmd('ogr2ogr -f "{}" {} {}'.format( out_drv, outShp, shps[0] )) # Append remaining layers lcmd = [execmd( 'ogr2ogr -f "{}" -update -append {} {}'.format( out_drv, outShp, shps[i] ) ) for i in range(1, len(shps))] elif api == 'pandas': """ Merge SHP using pandas """ from glass.g.rd.shp import shp_to_obj from glass.g.wt.shp import df_to_shp if type(shps) != list: raise ValueError('shps should be a list with paths for Feature Classes') dfs = [shp_to_obj(shp) for shp in shps] result = dfs[0] for df in dfs[1:]: result = result.append(df, ignore_index=True, sort=True) df_to_shp(result, outShp) elif api == 'psql': import os from glass.ng.sql.tbl import tbls_to_tbl, del_tables from glass.g.it.db import shp_to_psql if not dbname: from glass.ng.sql.db import create_db create_db(dbname, api='psql') pg_tbls = shp_to_psql( dbname, shps, api="shp2pgsql" ) if os.path.isfile(outShp): from glass.pys.oss import fprop outbl = fprop(outShp, 'fn') else: outbl = outShp tbls_to_tbl(dbname, pg_tbls, outbl) if outbl != outShp: from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp( dbname, outbl, 'geom', outShp, inDB='psql', api="pgsql2shp" ) del_tables(dbname, pg_tbls) elif api == 'grass': from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop, lst_ff from glass.g.prop.prj import get_shp_epsg lshps = lst_ff(shps, file_format='.shp') epsg = get_shp_epsg(lshps[0]) gwork = os.path.dirname(outShp) outshpname = fprop(outShp, "fn") loc = f'loc_{outshpname}' gbase = run_grass(gwork, loc=loc, srs=epsg) import grass.script.setup as gsetup gsetup.init(gbase, gwork, loc, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp # Import data gshps = [shp_to_grs(s, fprop(s, 'fn'), asCMD=True) for s in lshps] patch = vpatch(gshps, outshpname) grs_to_shp(patch, outShp, "area") else: raise ValueError( "{} API is not available" ) return outShp