def matrix_od(origins, destinations, networkShp, speedLimitCol, onewayCol, grsWorkspace, grsLocation, outputShp): """ Produce matrix OD using GRASS GIS """ from glass.pys.oss import fprop from glass.g.wenv.grs import run_grass # Open an GRASS GIS Session gbase = run_grass(grsWorkspace, grassBIN="grass76", location=grsLocation, srs=networkShp) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, grsWorkspace, grsLocation, 'PERMANENT') # Import GRASS GIS Module from glass.g.it.shp import shp_to_grs, grs_to_shp # Add Data to GRASS GIS rdvMain = shp_to_grs(networkShp, fprop(networkShp, 'fn', forceLower=True)) """Get matrix distance:""" MATRIX_OD = prod_matrix(origins, destinations, rdvMain, speedLimitCol, onewayCol) return grs_to_shp(MATRIX_OD, outputShp, "line", lyrN=3)
def run_viewshed_by_cpu(tid, obs, dem, output, vis_basename='vis', maxdst=None, obselevation=None): # Create GRASS GIS location loc_name = 'loc_' + str(tid) gbase = run_grass(output, location=loc_name, srs=dem) # Start GRASS GIS Session import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, output, loc_name, 'PERMANENT') from glass.g.it.rst import rst_to_grs, grs_to_rst from glass.g.rst.surf import grs_viewshed # Send DEM to GRASS GIS grs_dem = rst_to_grs(dem, 'grs_dem', as_cmd=True) # Produce Viewshed for each point in obs for idx, row in obs.iterrows(): vrst = grs_viewshed( grs_dem, (row.geometry.x, row.geometry.y), '{}_{}'.format(vis_basename, str(row[obs_id])), max_dist=maxdst, obs_elv=obselevation ) frst = grs_to_rst(vrst, os.path.join(output, vrst + '.tif'))
def shp_to_shp(inshp, outshp, gisApi='ogr', supportForSpatialLite=None): """ Convert a vectorial file to another with other file format API's Available: * ogr; * grass; When using gisApi='ogr' - Set supportForSpatialLite to True if outShp is a sqlite db and if you want SpatialLite support for that database. """ if gisApi == 'ogr': from glass.pys import execmd from glass.g.prop import drv_name out_driver = drv_name(outshp) if out_driver == 'SQLite' and supportForSpatialLite: splite = ' -dsco "SPATIALITE=YES"' else: splite = '' cmd = 'ogr2ogr -f "{drv}" {out} {_in}{lite}'.format(drv=out_driver, out=outshp, _in=inshp, lite=splite) # Run command cmdout = execmd(cmd) elif gisApi == 'grass': # TODO identify input geometry type import os from glass.pys.oss import fprop from glass.g.wenv.grs import run_grass from glass.g.prop.prj import get_epsg # Start GRASS GIS Session ws = os.path.dirname(outshp) loc = f'loc_{fprop(outshp, "fn")}' epsg = get_epsg(inshp) gbase = run_grass(ws, location=loc, srs=epsg) import grass.script.setup as gsetup gsetup.init(gbase, ws, loc, 'PERMANENT') from glass.g.it.shp import grs_to_shp, shp_to_grs gshp = shp_to_grs(inshp, fprop(inshp, 'fn')) grs_to_shp(gshp, outshp, 'area') else: raise ValueError('Sorry, API {} is not available'.format(gisApi)) return outshp
def clip(inFeat, clipFeat, outFeat, api_gis="grass", clip_by_region=None): """ Clip Analysis api_gis Options: * grass * pygrass * ogr2ogr """ from glass.pys.oss import fprop if api_gis == "pygrass" or api_gis == "grass": import os from glass.g.wenv.grs import run_grass from glass.g.prop.prj import get_epsg epsg = get_epsg(inFeat) work = os.path.dirname(outFeat) refname = fprop(outFeat, 'fn') loc = f"loc_{refname}" grsbase = run_grass(work, location=loc, srs=epsg) import grass.script.setup as gsetup gsetup.init(grsbase, work, loc, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.prop.feat import feat_count shp = shp_to_grs(inFeat, fprop(inFeat, 'fn')) clp = shp_to_grs(clipFeat, fprop(clipFeat, 'fn')) # Clip rslt = grsclip(shp, clp, refname, cmd=True if api_gis == "grass" else None, clip_by_region=clip_by_region) # Export grs_to_shp(rslt, outFeat, 'area') elif api_gis == 'ogr2ogr': from glass.pys import execmd from glass.g.prop import drv_name rcmd = execmd( ("ogr2ogr -f \"{}\" {} {} -clipsrc {} -clipsrclayer {}").format( drv_name(outFeat), outFeat, inFeat, clipFeat, fprop(clipFeat, 'fn'))) else: raise ValueError("{} is not available!".format(api_gis)) return outFeat
def run_slope(tid, inrsts, outfolder, oname, percentage): """ Thread function """ iirsts = inrsts.mdt.tolist() # Create GRASS GIS Location loc_name = f'thread_{str(tid)}' gbase = run_grass( outfolder, location=loc_name, srs=iirsts[0] ) # Start GRASS GIS Session import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, outfolder, loc_name, 'PERMANENT') from glass.g.it.rst import rst_to_grs, grs_to_rst from glass.g.rst.surf import slope from glass.g.wenv.grs import rst_to_region for rst in iirsts: # Import data mdt = rst_to_grs(rst, fprop(rst, 'fn')) # Set region rst_to_region(mdt) # Get ID in name mdt_id = re.search(r'\d+', mdt).group() # Get slope if percentage: slope_perc = slope( mdt, f"pp_{oname}_{mdt_id}", data='percent' ) slope_degr = slope( mdt, f"{oname}_{mdt_id}", data='degrees' ) # Export if percentage: grs_to_rst(slope_perc, os.path.join( percentage, slope_degr + '.tif' )) grs_to_rst(slope_degr, os.path.join( outfolder, slope_degr + '.tif' ))
def union(lyrA, lyrB, outShp, api_gis="grass"): """ Calculates the geometric union of the overlayed polygon layers, i.e. the intersection plus the symmetrical difference of layers A and B. API's Available: * saga; * grass; * pygrass; """ if api_gis == "saga": from glass.pys import execmd rcmd = execmd( ("saga_cmd shapes_polygons 17 -A {} -B {} -RESULT {} -SPLIT 1" ).format(lyrA, lyrB, outShp)) elif api_gis == "pygrass" or api_gis == "grass": import os from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop from glass.g.prop.prj import get_epsg ws = os.path.dirname(outShp) refname = fprop(outShp) loc = f"loc_{refname}" gbase = run_grass(ws, location=loc, srs=get_epsg(lyrA)) import grass.script.setup as gs gs.init(gbase, ws, loc, 'PERMANENT') # Import data from glass.g.it.shp import shp_to_grs, grs_to_shp lyr_a = shp_to_grs(lyrA, fprop(lyrA, 'fn'), asCMD=True) lyr_b = shp_to_grs(lyrB, fprop(lyrB, 'fn'), asCMD=True) shpunion = grsunion(lyr_a, lyr_b, refname, cmd=True if api_gis == "grass" else None) # Export data grs_to_shp(shpunion, outShp, "area") else: raise ValueError("{} is not available!".format(api_gis)) return outShp
def bash_matrix_od(origins, destinationShp, network, costCol, oneway, grsWork, output): """ Produce matrix OD using GRASS GIS - BASH MODE """ from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop, mkdir from glass.g.dp.split import splitShp_by_range from glass.g.dp.mge import shps_to_shp # SPLIT ORIGINS IN PARTS originsFld = mkdir(os.path.join(grsWork, 'origins_parts')) originsList = splitShp_by_range(origins, 100, originsFld) # Open an GRASS GIS Session gbase = run_grass(grsWork, grassBIN="grass76", location='location', srs=network) import grass.script as grass import grass.script.setup as gsetup RESULTS = [] R_FOLDER = mkdir(os.path.join(grsWork, 'res_parts')) for e in range(len(originsList)): gsetup.init(gbase, grsWork, "grs_loc_{}".format(e), 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp # Add Data to GRASS GIS rdvMain = shp_to_grs(network, fprop(network, 'fn', forceLower=True)) # Produce Matrix result_part = prod_matrix(originsList[e], destinationShp, rdvMain, costCol, oneway) # Export Result shp = grs_to_shp(result_part, os.path.join(R_FOLDER, result_part + '.shp'), geom_type="line", lyrN=3) RESULTS.append(shp) shps_to_shp(RESULTS, output, api='pandas') return output
def clip_and_union(la, lb, cell, work, proc, output): ref_rst = shpext_to_rst(cell, os.path.join(os.path.dirname(cell), fprop(cell, 'fn') + '.tif'), cellsize=10) # Start GRASS GIS Session loc = "proc_" + str(proc) grsbase = run_grass(work, location=loc, srs=ref_rst) import grass.script.setup as gsetup gsetup.init(grsbase, work, loc, 'PERMANENT') # Import GRASS GIS modules from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.prop.feat import feat_count # Add data to GRASS a = shp_to_grs(la, fprop(la, 'fn'), filterByReg=True, asCMD=True) b = shp_to_grs(lb, fprop(lb, 'fn'), filterByReg=True, asCMD=True) if not feat_count(a, gisApi="grass", work=work, loc=loc): return if not feat_count(b, gisApi="grass", work=work, loc=loc): return # Clip a_clip = grsclip(a, None, "{}_clip".format(a), cmd=True, clip_by_region=True) b_clip = grsclip(b, None, "{}_clip".format(b), cmd=True, clip_by_region=True) # Union u_shp = grsunion(a_clip, b_clip, f"un_{fprop(cell, 'fn')}", cmd=True) # Export o = grs_to_shp(u_shp, output, "area")
def grscliprst(in_rst, clip_ext, outrst): """ Clip Raster using GRASS GIS """ import os from glass.pys.oss import fprop from glass.g.wenv.grs import run_grass from glass.g.wenv.grs import rst_to_region from glass.g.prop.prj import get_epsg # Get EPSG From Raster EPSG = get_epsg(in_rst) if not EPSG: raise ValueError( 'Cannot get EPSG code of Extent Template File ({})'.format(in_rst)) workspace = os.path.dirname(outrst) loc = 'loc_' + fprop(outrst, 'fn') # Create GRASS GIS Session gbase = run_grass(workspace, location=loc, srs=EPSG) import grass.script.setup as gsetup gsetup.init(gbase, workspace, loc, 'PERMANENT') # GRASS GIS modules from glass.g.it.rst import rst_to_grs, grs_to_rst, grs_to_mask # Add data to GRASS GIS rst = rst_to_grs(in_rst, fprop(in_rst, 'fn'), as_cmd=True) clip = rst_to_grs(clip_ext, fprop(clip_ext, 'fn'), as_cmd=True) # Set New region rst_to_region(clip) # Set Mask grs_to_mask(clip) # Export result return grs_to_rst(rst, outrst)
def rsts_to_shps(rstfolder, outfolder, rsttemplate): """ Rasters in folder to Shapefile this script uses GRASS GIS """ import os from glass.pys.oss import lst_ff, fprop from glass.g.wenv.grs import run_grass # List Raster Files rsts = lst_ff(rstfolder, file_format='tif') # Start GRASS GIS Session loc = 'convrst' grsbase = run_grass(outfolder, location=loc, srs=rsttemplate) import grass.script.setup as gsetup gsetup.init(grsbase, outfolder, loc, 'PERMANENT') from glass.g.it.rst import rst_to_grs from glass.g.it.shp import grs_to_shp for rst in rsts: grs_rst = rst_to_grs(rst, fprop(rst, 'fn'), as_cmd=True) # to polygon grs_shp = rst_to_polyg(grs_rst, grs_rst + "_shp", rstColumn="value", gisApi='grasscmd') grs_to_shp(grs_shp, os.path.join(outfolder, grs_rst + '.shp'), 'area') return outfolder
def multi_run(ti, df, ofolder): loc_name = 'loc_{}'.format(str(ti)) grsbase = run_grass(ofolder, location=loc_name, srs=srs_epsg) import grass.script.setup as gsetup gsetup.init(grsbase, ofolder, loc_name, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.gp.ovl import grsunion for idx, row in df.iterrows(): # Import data into GRASS GIS lyr_a = shp_to_grs(df.shp_a, fprop(df.shp_a, 'fn'), asCMD=True) lyr_b = shp_to_grs(df.shp_b, fprop(df.shp_b, 'fn'), asCMD=True) # Run Union shpUnion = grsunion(lyr_a, lyr_b, f"{lyr_a[:10]}_{lyr_b[:10]}", cmd=True) # Export data result = grs_to_shp(shpUnion, os.path.join(ofolder, shpUnion + '.shp'), "area")
def shp_to_rst(shp, inSource, cellsize, nodata, outRaster, epsg=None, rst_template=None, snapRst=None, api='gdal'): """ Feature Class to Raster cellsize will be ignored if rst_template is defined * API's Available: - gdal; - pygrass; - grass; """ if api == 'gdal': from osgeo import gdal, ogr from glass.g.prop import drv_name if not epsg: from glass.g.prop.prj import get_shp_sref srs = get_shp_sref(shp).ExportToWkt() else: from glass.g.prop.prj import epsg_to_wkt srs = epsg_to_wkt(epsg) # Get Extent dtShp = ogr.GetDriverByName(drv_name(shp)).Open(shp, 0) lyr = dtShp.GetLayer() if not rst_template: if not snapRst: x_min, x_max, y_min, y_max = lyr.GetExtent() x_res = int((x_max - x_min) / cellsize) y_res = int((y_max - y_min) / cellsize) else: from glass.g.prop.rst import adjust_ext_to_snap x_min, y_max, y_res, x_res, cellsize = adjust_ext_to_snap( shp, snapRst) else: from glass.g.rd.rst import rst_to_array img_temp = gdal.Open(rst_template) geo_transform = img_temp.GetGeoTransform() y_res, x_res = rst_to_array(rst_template).shape # Create output dtRst = gdal.GetDriverByName(drv_name(outRaster)).Create( outRaster, x_res, y_res, gdal.GDT_Byte) if not rst_template: dtRst.SetGeoTransform((x_min, cellsize, 0, y_max, 0, -cellsize)) else: dtRst.SetGeoTransform(geo_transform) dtRst.SetProjection(str(srs)) bnd = dtRst.GetRasterBand(1) bnd.SetNoDataValue(nodata) gdal.RasterizeLayer(dtRst, [1], lyr, burn_values=[1]) del lyr dtShp.Destroy() elif api == 'grass' or api == 'pygrass': """ Use GRASS GIS - Start Session - Import data - Convert - Export """ import os from glass.pys.oss import fprop from glass.g.wenv.grs import run_grass from glass.g.prop.prj import get_epsg # Create GRASS GIS Session ws = os.path.dirname(outRaster) loc = fprop(outRaster, 'fn') epsg = get_epsg(shp) gbase = run_grass(ws, location=loc, srs=epsg) import grass.script.setup as gsetup gsetup.init(gbase, ws, loc, 'PERMANENT') # Import Packages from glass.g.it.shp import shp_to_grs from glass.g.it.rst import grs_to_rst from glass.g.wenv.grs import shp_to_region # Shape to GRASS GIS gshp = shp_to_grs(shp, fprop(shp, 'fn'), asCMD=True) # Set Region shp_to_region(gshp, cellsize) # Convert grst = grsshp_to_grsrst(gshp, inSource, gshp + '__rst', api="grass") # Export grs_to_rst(grst, outRaster, as_cmd=True) else: raise ValueError('API {} is not available'.format(api)) return outRaster
def raster_based(osmdata, nomenclature, refRaster, lulcRst, overwrite=None, dataStore=None, roadsAPI='POSTGIS'): """ Convert OSM Data into Land Use/Land Cover Information An raster based approach. TODO: Add detailed description """ # ************************************************************************ # # Python Modules from Reference Packages # # ************************************************************************ # import datetime; import os; import pandas; import copy # ************************************************************************ # # glass dependencies # # ************************************************************************ # from glass.pys.oss import mkdir, fprop from glass.g.prop import check_isRaster from glass.g.prop.prj import get_rst_epsg from glass.g.wenv.grs import run_grass if roadsAPI == 'POSTGIS': from glass.ng.sql.db import create_db from glass.g.it.db import osm_to_psql from glass.ete.osm2lulc.mod2 import roads_sqdb from glass.ng.sql.bkup import dump_db from glass.ng.sql.db import drop_db else: from glass.g.it.osm import osm_to_sqdb from glass.ete.osm2lulc.mod2 import grs_rst_roads from glass.ete.osm2lulc.utils import osm_project, add_lulc_to_osmfeat, osmlulc_rsttbl from glass.ete.osm2lulc.utils import get_ref_raster from glass.ete.osm2lulc.mod1 import grs_rst from glass.ete.osm2lulc.m3_4 import rst_area from glass.ete.osm2lulc.mod5 import basic_buffer from glass.ete.osm2lulc.mod6 import rst_pnt_to_build # ************************************************************************ # # Global Settings # # ************************************************************************ # # Check if input parameters exists! if not os.path.exists(os.path.dirname(lulcRst)): raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst))) if not os.path.exists(osmdata): raise ValueError('File with OSM DATA ({}) does not exist!'.format(osmdata)) if not os.path.exists(refRaster): raise ValueError('File with reference area ({}) does not exist!'.format(refRaster)) # Check if Nomenclature is valid nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \ nomenclature != "CORINE_LAND_COVER" and \ nomenclature == "GLOBE_LAND_30" else nomenclature time_a = datetime.datetime.now().replace(microsecond=0) workspace = os.path.join(os.path.dirname( lulcRst), 'osmtolulc') if not dataStore else dataStore # Check if workspace exists if os.path.exists(workspace): if overwrite: mkdir(workspace) else: raise ValueError('Path {} already exists'.format(workspace)) else: mkdir(workspace) # Get Ref Raster refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=2) from glass.ete.osm2lulc import PRIORITIES, osmTableData, LEGEND __priorites = PRIORITIES[nomenclature] __legend = LEGEND[nomenclature] time_b = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Convert OSM file to SQLITE DB or to POSTGIS DB # # ************************************************************************ # if roadsAPI == 'POSTGIS': osm_db = create_db(fprop( osmdata, 'fn', forceLower=True), overwrite=True) osm_db = osm_to_psql(osmdata, osm_db) else: osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite')) time_c = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Add Lulc Classes to OSM_FEATURES by rule # # ************************************************************************ # add_lulc_to_osmfeat(osm_db, osmTableData, nomenclature, api=roadsAPI) time_d = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Transform SRS of OSM Data # # ************************************************************************ # osmTableData = osm_project( osm_db, epsg, api=roadsAPI, isGlobeLand=None if nomenclature != 'GLOBE_LAND_30' else True ) time_e = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Start a GRASS GIS Session # # ************************************************************************ # grass_base = run_grass( workspace, grassBIN='grass78', location='grloc', srs=epsg) import grass.script as grass import grass.script.setup as gsetup gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT') # ************************************************************************ # # IMPORT SOME glass MODULES FOR GRASS GIS # # ************************************************************************ # from glass.g.it.rst import rst_to_grs, grs_to_rst from glass.g.rst.mos import rsts_to_mosaic from glass.g.wenv.grs import rst_to_region # ************************************************************************ # # SET GRASS GIS LOCATION EXTENT # # ************************************************************************ # extRst = rst_to_grs(refRaster, 'extent_raster') rst_to_region(extRst) time_f = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # MapResults # mergeOut = {} # ************************************************************************ # # ************************************************************************ # # 1 - Selection Rule # # ************************************************************************ # """ selOut = { cls_code : rst_name, ... } """ selOut, timeCheck1 = grs_rst(osm_db, osmTableData['polygons'], api=roadsAPI) for cls in selOut: mergeOut[cls] = [selOut[cls]] time_g = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # 2 - Get Information About Roads Location # # ************************************************************************ # """ roads = { cls_code : rst_name, ... } """ if roadsAPI != 'POSTGIS': roads, timeCheck2 = grs_rst_roads( osm_db, osmTableData['lines'], osmTableData['polygons'], workspace, 1221 if nomenclature != "GLOBE_LAND_30" else 801 ) else: roadCls = 1221 if nomenclature != "GLOBE_LAND_30" else 801 roads, timeCheck2 = roads_sqdb( osm_db, osmTableData['lines'], osmTableData['polygons'], apidb='POSTGIS', asRst=roadCls ) roads = {roadCls : roads} for cls in roads: if cls not in mergeOut: mergeOut[cls] = [roads[cls]] else: mergeOut[cls].append(roads[cls]) time_h = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # 3 - Area Upper than # # ************************************************************************ # """ auOut = { cls_code : rst_name, ... } """ if nomenclature != 'GLOBE_LAND_30': auOut, timeCheck3 = rst_area( osm_db, osmTableData['polygons'], UPPER=True, api=roadsAPI ) for cls in auOut: if cls not in mergeOut: mergeOut[cls] = [auOut[cls]] else: mergeOut[cls].append(auOut[cls]) time_l = datetime.datetime.now().replace(microsecond=0) else: timeCheck3 = None time_l = None # ************************************************************************ # # 4 - Area Lower than # # ************************************************************************ # """ alOut = { cls_code : rst_name, ... } """ if nomenclature != 'GLOBE_LAND_30': alOut, timeCheck4 = rst_area( osm_db, osmTableData['polygons'], UPPER=None, api=roadsAPI ) for cls in alOut: if cls not in mergeOut: mergeOut[cls] = [alOut[cls]] else: mergeOut[cls].append(alOut[cls]) time_j = datetime.datetime.now().replace(microsecond=0) else: timeCheck4 = None time_j = None # ************************************************************************ # # 5 - Get data from lines table (railway | waterway) # # ************************************************************************ # """ bfOut = { cls_code : rst_name, ... } """ bfOut, timeCheck5 = basic_buffer( osm_db, osmTableData['lines'], workspace, apidb=roadsAPI ) for cls in bfOut: if cls not in mergeOut: mergeOut[cls] = [bfOut[cls]] else: mergeOut[cls].append(bfOut[cls]) time_m = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # 7 - Assign untagged Buildings to tags # # ************************************************************************ # if nomenclature != "GLOBE_LAND_30": buildsOut, timeCheck7 = rst_pnt_to_build( osm_db, osmTableData['points'], osmTableData['polygons'], api_db=roadsAPI ) for cls in buildsOut: if cls not in mergeOut: mergeOut[cls] = buildsOut[cls] else: mergeOut[cls] += buildsOut[cls] time_n = datetime.datetime.now().replace(microsecond=0) else: timeCheck7 = None time_n = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Produce LULC Map # # ************************************************************************ # """ Merge all results for one cls into one raster mergeOut = { cls_code : [rst_name, rst_name, ...], ... } into mergeOut = { cls_code : patched_raster, ... } """ for cls in mergeOut: if len(mergeOut[cls]) == 1: mergeOut[cls] = mergeOut[cls][0] else: mergeOut[cls] = rsts_to_mosaic( mergeOut[cls], 'mosaic_{}'.format(str(cls)), api="grass" ) time_o = datetime.datetime.now().replace(microsecond=0) """ Merge all Class Raster using a priority rule """ __priorities = PRIORITIES[nomenclature] lst_rst = [] for cls in __priorities: if cls not in mergeOut: continue else: lst_rst.append(mergeOut[cls]) outGrs = rsts_to_mosaic(lst_rst, os.path.splitext( os.path.basename(lulcRst))[0], api="grass" ) time_p = datetime.datetime.now().replace(microsecond=0) # Ceck if lulc Rst has an valid format outIsRst = check_isRaster(lulcRst) if not outIsRst: from glass.pys.oss import fprop lulcRst = os.path.join( os.path.dirname(lulcRst), fprop(lulcRst, 'fn') + '.tif' ) grs_to_rst(outGrs, lulcRst, as_cmd=True) osmlulc_rsttbl(nomenclature, os.path.join( os.path.dirname(lulcRst), os.path.basename(lulcRst) + '.vat.dbf' )) time_q = datetime.datetime.now().replace(microsecond=0) # Dump Database if PostGIS was used # Drop Database if PostGIS was used if roadsAPI == 'POSTGIS': dump_db(osm_db, os.path.join( workspace, osm_db + '.sql' ), api='psql') drop_db(osm_db) return lulcRst, { 0 : ('set_settings', time_b - time_a), 1 : ('osm_to_sqdb', time_c - time_b), 2 : ('cls_in_sqdb', time_d - time_c), 3 : ('proj_data', time_e - time_d), 4 : ('set_grass', time_f - time_e), 5 : ('rule_1', time_g - time_f, timeCheck1), 6 : ('rule_2', time_h - time_g, timeCheck2), 7 : None if not timeCheck3 else ('rule_3', time_l - time_h, timeCheck3), 8 : None if not timeCheck4 else ('rule_4', time_j - time_l, timeCheck4), 9 : ('rule_5', time_m - time_j if timeCheck4 else time_m - time_h, timeCheck5), 10 : None if not timeCheck7 else ('rule_7', time_n - time_m, timeCheck7), 11 : ('merge_rst', time_o - time_n), 12 : ('priority_rule', time_p - time_o), 13 : ('export_rst', time_q - time_p) }
def vector_based(osmdata, nomenclature, refRaster, lulcShp, overwrite=None, dataStore=None, RoadsAPI='POSTGIS'): """ Convert OSM Data into Land Use/Land Cover Information An vector based approach. TODO: Add a detailed description. RoadsAPI Options: * GRASS * SQLITE * POSTGIS """ # ************************************************************************ # # Python Modules from Reference Packages # # ************************************************************************ # import datetime; import os; import copy # ************************************************************************ # # glass dependencies # # ************************************************************************ # from glass.pys.oss import fprop, mkdir from glass.g.wenv.grs import run_grass if RoadsAPI == 'POSTGIS': from glass.ng.sql.db import create_db from glass.g.it.db import osm_to_psql from glass.ng.sql.db import drop_db from glass.ng.sql.bkup import dump_db else: from glass.g.it.osm import osm_to_sqdb from glass.ete.osm2lulc.utils import osm_project, add_lulc_to_osmfeat, get_ref_raster from glass.g.dp.mge import shps_to_shp from glass.ete.osm2lulc.mod1 import grs_vector if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS': from glass.ete.osm2lulc.mod2 import roads_sqdb else: from glass.ete.osm2lulc.mod2 import grs_vec_roads from glass.ete.osm2lulc.m3_4 import grs_vect_selbyarea from glass.ete.osm2lulc.mod5 import grs_vect_bbuffer from glass.ete.osm2lulc.mod6 import vector_assign_pntags_to_build from glass.g.dp.mge import same_attr_to_shp from glass.g.prj import def_prj # ************************************************************************ # # Global Settings # # ************************************************************************ # # Check if input parameters exists! if not os.path.exists(os.path.dirname(lulcShp)): raise ValueError('{} does not exist!'.format(os.path.dirname(lulcShp))) if not os.path.exists(osmdata): raise ValueError('File with OSM DATA ({}) does not exist!'.format(osmdata)) if not os.path.exists(refRaster): raise ValueError('File with reference area ({}) does not exist!'.format(refRaster)) # Check if Nomenclature is valid nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \ nomenclature != "CORINE_LAND_COVER" and \ nomenclature == "GLOBE_LAND_30" else nomenclature time_a = datetime.datetime.now().replace(microsecond=0) # Create workspace for temporary files workspace = os.path.join(os.path.dirname( lulcShp), 'osmtolulc') if not dataStore else dataStore # Check if workspace exists if os.path.exists(workspace): if overwrite: mkdir(workspace) else: raise ValueError('Path {} already exists'.format(workspace)) else: mkdir(workspace) # Get Reference Raster refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=10) from glass.ete.osm2lulc import osmTableData, PRIORITIES, LEGEND __priorities = PRIORITIES[nomenclature] __legend = LEGEND[nomenclature] time_b = datetime.datetime.now().replace(microsecond=0) if RoadsAPI != 'POSTGIS': # ******************************************************************** # # Convert OSM file to SQLITE DB # # ******************************************************************** # osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite')) else: # Convert OSM file to POSTGRESQL DB # osm_db = create_db(fprop( osmdata, 'fn', forceLower=True), overwrite=True) osm_db = osm_to_psql(osmdata, osm_db) time_c = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Add Lulc Classes to OSM_FEATURES by rule # # ************************************************************************ # add_lulc_to_osmfeat( osm_db, osmTableData, nomenclature, api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI ) time_d = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Transform SRS of OSM Data # # ************************************************************************ # osmTableData = osm_project( osm_db, epsg, api='SQLITE' if RoadsAPI != 'POSTGIS' else RoadsAPI, isGlobeLand=None if nomenclature != 'GLOBE_LAND_30' else True ) time_e = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Start a GRASS GIS Session # # ************************************************************************ # grass_base = run_grass(workspace, grassBIN='grass78', location='grloc', srs=epsg) #import grass.script as grass import grass.script.setup as gsetup gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT') # ************************************************************************ # # IMPORT SOME glass MODULES FOR GRASS GIS # # ************************************************************************ # from glass.g.gp.ovl import erase from glass.g.wenv.grs import rst_to_region from glass.g.gp.gen import dissolve from glass.g.tbl.grs import add_and_update, reset_table, update_table from glass.g.tbl.col import add_fields from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.it.rst import rst_to_grs # ************************************************************************ # # SET GRASS GIS LOCATION EXTENT # # ************************************************************************ # extRst = rst_to_grs(refRaster, 'extent_raster') rst_to_region(extRst) time_f = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # MapResults # # ************************************************************************ # osmShps = [] # ************************************************************************ # # 1 - Selection Rule # # ************************************************************************ # ruleOneShp, timeCheck1 = grs_vector( osm_db, osmTableData['polygons'], apidb=RoadsAPI ) osmShps.append(ruleOneShp) time_g = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # 2 - Get Information About Roads Location # # ************************************************************************ # ruleRowShp, timeCheck2 = roads_sqdb( osm_db, osmTableData['lines'], osmTableData['polygons'], apidb=RoadsAPI ) if RoadsAPI == 'SQLITE' or RoadsAPI == 'POSTGIS' else grs_vec_roads( osm_db, osmTableData['lines'], osmTableData['polygons']) osmShps.append(ruleRowShp) time_h = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # 3 - Area Upper than # # ************************************************************************ # if nomenclature != "GLOBE_LAND_30": ruleThreeShp, timeCheck3 = grs_vect_selbyarea( osm_db, osmTableData['polygons'], UPPER=True, apidb=RoadsAPI ) osmShps.append(ruleThreeShp) time_l = datetime.datetime.now().replace(microsecond=0) else: timeCheck3 = None time_l = None # ************************************************************************ # # 4 - Area Lower than # # ************************************************************************ # if nomenclature != "GLOBE_LAND_30": ruleFourShp, timeCheck4 = grs_vect_selbyarea( osm_db, osmTableData['polygons'], UPPER=False, apidb=RoadsAPI ) osmShps.append(ruleFourShp) time_j = datetime.datetime.now().replace(microsecond=0) else: timeCheck4 = None time_j = None # ************************************************************************ # # 5 - Get data from lines table (railway | waterway) # # ************************************************************************ # ruleFiveShp, timeCheck5 = grs_vect_bbuffer( osm_db, osmTableData["lines"], api_db=RoadsAPI ) osmShps.append(ruleFiveShp) time_m = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # 7 - Assign untagged Buildings to tags # # ************************************************************************ # if nomenclature != "GLOBE_LAND_30": ruleSeven11, ruleSeven12, timeCheck7 = vector_assign_pntags_to_build( osm_db, osmTableData['points'], osmTableData['polygons'], apidb=RoadsAPI ) if ruleSeven11: osmShps.append(ruleSeven11) if ruleSeven12: osmShps.append(ruleSeven12) time_n = datetime.datetime.now().replace(microsecond=0) else: timeCheck7 = None time_n = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Produce LULC Map # # ************************************************************************ # """ Get Shps with all geometries related with one class - One Shape for Classe """ _osmShps = [] for i in range(len(osmShps)): if not osmShps[i]: continue _osmShps.append(grs_to_shp( osmShps[i], os.path.join(workspace, osmShps[i] + '.shp'), 'auto', lyrN=1, asCMD=True, asMultiPart=None )) for shp in _osmShps: def_prj(os.path.splitext(shp)[0] + '.prj', epsg=epsg, api='epsgio') _osmShps = same_attr_to_shp( _osmShps, "cat", workspace, "osm_", resultDict=True ) del osmShps time_o = datetime.datetime.now().replace(microsecond=0) """ Merge all Classes into one feature class using a priority rule """ osmShps = {} for cls in _osmShps: if cls == '1': osmShps[1221] = shp_to_grs(_osmShps[cls], "osm_1221", asCMD=True) else: osmShps[int(cls)] = shp_to_grs(_osmShps[cls], "osm_" + cls, asCMD=True) # Erase overlapping areas by priority osmNameRef = copy.deepcopy(osmShps) for e in range(len(__priorities)): if e + 1 == len(__priorities): break if __priorities[e] not in osmShps: continue else: for i in range(e+1, len(__priorities)): if __priorities[i] not in osmShps: continue else: osmShps[__priorities[i]] = erase( osmShps[__priorities[i]], osmShps[__priorities[e]], "{}_{}".format(osmNameRef[__priorities[i]], e), notTbl=True, api='pygrass' ) time_p = datetime.datetime.now().replace(microsecond=0) # Export all classes lst_merge = [] a = None for i in range(len(__priorities)): if __priorities[i] not in osmShps: continue if not a: reset_table( osmShps[__priorities[i]], {'cls' : 'varchar(5)', 'leg' : 'varchar(75)'}, {'cls' : str(__priorities[i]), 'leg' : str(__legend[__priorities[i]])} ) a = 1 else: add_and_update( osmShps[__priorities[i]], {'cls' : 'varchar(5)'}, {'cls' : str(__priorities[i])} ) ds = dissolve( osmShps[__priorities[i]], 'dl_{}'.format(str(__priorities[i])), 'cls', api="grass" ) add_fields(ds, {'leg': 'varchar(75)'}, api="grass") update_table(ds, 'leg', str(__legend[__priorities[i]]), 'leg is null') lst_merge.append(grs_to_shp( ds, os.path.join( workspace, "lulc_{}.shp".format(str(__priorities[i])) ), 'auto', lyrN=1, asCMD=True, asMultiPart=None )) time_q = datetime.datetime.now().replace(microsecond=0) if fprop(lulcShp, 'ff') != '.shp': lulcShp = os.path.join( os.path.dirname(lulcShp), fprop(lulcShp, 'fn') + '.shp') shps_to_shp(lst_merge, lulcShp, api='pandas') # Check if prj of lulcShp exists and create it if necessary prj_ff = os.path.splitext(lulcShp)[0] + '.prj' if not os.path.exists(prj_ff): def_prj(prj_ff, epsg=epsg, api='epsgio') time_r = datetime.datetime.now().replace(microsecond=0) # Dump Database if PostGIS was used # Drop Database if PostGIS was used if RoadsAPI == 'POSTGIS': dump_db(osm_db, os.path.join( workspace, osm_db + '.sql' ), api='psql') drop_db(osm_db) return lulcShp, { 0 : ('set_settings', time_b - time_a), 1 : ('osm_to_sqdb', time_c - time_b), 2 : ('cls_in_sqdb', time_d - time_c), 3 : ('proj_data', time_e - time_d), 4 : ('set_grass', time_f - time_e), 5 : ('rule_1', time_g - time_f, timeCheck1), 6 : ('rule_2', time_h - time_g, timeCheck2), 7 : None if not timeCheck3 else ('rule_3', time_l - time_h, timeCheck3), 8 : None if not timeCheck4 else ('rule_4', time_j - time_l, timeCheck4), 9 : ('rule_5', time_m - time_j if timeCheck4 else time_m - time_h, timeCheck5), 10 : None if not timeCheck7 else ('rule_7', time_n - time_m, timeCheck7), 11 : ('disj_cls', time_o - time_n), 12 : ('priority_rule', time_p - time_o), 13 : ('export_cls', time_q - time_p), 14 : ('merge_cls', time_r - time_q) }
def pop_within_area(mapunits, mapunits_id, outcol, subunits, subunits_id, pop_col, mapunits_fk, area_shp, output, res_areas=None, res_areas_fk=None): """ Used to calculate % pop exposta a ruidos superiores a 65db Useful to calculate population a menos de x minutos de um tipo de equipamento Retuns population % living inside some polygons """ import os import pandas as pd from glass.g.rd.shp import shp_to_obj from glass.g.wt.rst import shpext_to_rst from glass.g.wt.shp import obj_to_shp from glass.pys.oss import mkdir, fprop from glass.g.gp.ovl import grsintersection from glass.g.prop.prj import get_epsg from glass.g.wenv.grs import run_grass # Prepare GRASS GIS Workspace configuration oname = fprop(output, 'fn') gw = mkdir(os.path.join(os.path.dirname(output), 'ww_' + oname), overwrite=True) # Boundary to Raster w_epsg = get_epsg(area_shp) ref_rst = shpext_to_rst(mapunits, os.path.join(gw, 'extent.tif'), cellsize=10, epsg=w_epsg) # Create GRASS GIS Session loc = 'loc_' + oname gbase = run_grass(gw, location=loc, srs=ref_rst) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, gw, loc, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp # Send data to GRASS GIS grs_res = shp_to_grs( res_areas if res_areas and res_areas_fk else subunits, fprop(res_areas if res_areas and res_areas_fk else subunits, 'fn'), asCMD=True) grs_ash = shp_to_grs(area_shp, fprop(area_shp, 'fn'), asCMD=True) # Run intersection int_ = grsintersection(grs_res, grs_ash, f'i_{grs_res}_{grs_ash}', api='grass') # Export result res_int = grs_to_shp(int_, os.path.join(gw, int_ + '.shp'), 'area') # Compute new indicator mapunits_df = shp_to_obj(mapunits) subunits_df = shp_to_obj(subunits) if res_areas and res_areas_fk: resareas_df = shp_to_obj(res_areas) int______df = shp_to_obj(res_int) # For each bgri, get hab area with population if res_areas and res_areas_fk: resareas_df['gtarea'] = resareas_df.geometry.area # Group By respop = pd.DataFrame({ 'areav': resareas_df.groupby([res_areas_fk])['gtarea'].agg('sum') }).reset_index() # Join with subunits df respop.rename(columns={res_areas_fk: 'jtblfid'}, inplace=True) subunits_df = subunits_df.merge(respop, how='left', left_on=subunits_id, right_on='jtblfid') subunits_df.drop(['jtblfid'], axis=1, inplace=True) else: subunits_df['areav'] = subunits_df.geometry.area # For each subunit, get area intersecting area_shp int______df['gtarea'] = int______df.geometry.area int_id = 'a_' + res_areas_fk if res_areas and res_areas_fk else \ 'a_' + subunits_id area_int = pd.DataFrame({ 'areai': int______df.groupby([int_id])['gtarea'].agg('sum') }).reset_index() # Join with main subunits df area_int.rename(columns={int_id: 'jtblfid'}, inplace=True) subunits_df = subunits_df.merge(area_int, how='left', left_on=subunits_id, right_on='jtblfid') subunits_df.drop(['jtblfid'], axis=1, inplace=True) subunits_df.areai = subunits_df.areai.fillna(0) subunits_df.areav = subunits_df.areav.fillna(0) subunits_df['pop_af'] = (subunits_df.areai * subunits_df[pop_col]) / subunits_df.areav subunits_pop = pd.DataFrame( subunits_df.groupby([mapunits_fk]).agg({ pop_col: 'sum', 'pop_af': 'sum' })) subunits_pop.reset_index(inplace=True) # Produce final table - mapunits table with new indicator subunits_pop.rename(columns={mapunits_fk: 'jtblid'}, inplace=True) mapunits_df = mapunits_df.merge(subunits_pop, how='left', left_on=mapunits_id, right_on='jtblid') mapunits_df[outcol] = (mapunits_df.pop_af * 100) / mapunits_df[pop_col] mapunits_df.drop(['jtblid', pop_col, 'pop_af'], axis=1, inplace=True) obj_to_shp(mapunits_df, 'geometry', w_epsg, output) return output
def shparea_by_mapunitpopulation(polygons, mapunits, units_id, outcol, output, units_pop=None, areacol=None): """ Polygons area by mapunit or by mapunit population """ import os import pandas as pd from glass.g.wt.rst import shpext_to_rst from glass.pys.oss import mkdir, fprop from glass.g.gp.ovl import grsintersection from glass.g.prop.prj import get_epsg from glass.g.wenv.grs import run_grass from glass.g.rd.shp import shp_to_obj from glass.g.wt.shp import obj_to_shp delareacol = 1 if not areacol else 0 areacol = outcol if not units_pop else areacol if areacol else 'areav' # Prepare GRASS GIS Workspace configuration oname = fprop(output, 'fn') gw = mkdir(os.path.join(os.path.dirname(output), 'ww_' + oname), overwrite=True) # Boundary to raster w_epsg = get_epsg(mapunits) ref_rst = shpext_to_rst(mapunits, os.path.join(gw, 'extent.tif'), cellsize=10, epsg=w_epsg) # Sanitize columns popunits_df_tmp = shp_to_obj(mapunits) drop_cols = [ c for c in popunits_df_tmp.columns.values if c != units_id and c != 'geometry' ] popunits_df_tmp.drop(drop_cols, axis=1, inplace=True) popunits_i = obj_to_shp(popunits_df_tmp, 'geometry', w_epsg, os.path.join(gw, 'popunits.shp')) # Create GRASS GIS Session _l = 'loc_' + oname gbase = run_grass(gw, location=_l, srs=ref_rst) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, gw, _l, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp # Data to GRASS GIS g_popunits = shp_to_grs(popunits_i, fprop(mapunits, 'fn'), asCMD=True) g_polygons = shp_to_grs(polygons, fprop(polygons, 'fn'), asCMD=True) # Run intersection i_shp = grsintersection(g_popunits, g_polygons, f'i_{g_popunits[:5]}_{g_polygons[:5]}', cmd=True) # Export result i_res = grs_to_shp(i_shp, os.path.join(gw, i_shp + '.shp'), 'area') # Open intersection result and mapunits mapunits_df = shp_to_obj(mapunits) int_df = shp_to_obj(i_res) int_df['garea'] = int_df.geometry.area int_gp = pd.DataFrame({ areacol: int_df.groupby(['a_' + units_id])['garea'].agg('sum') }).reset_index() mapunits_df = mapunits_df.merge(int_gp, how='left', left_on=units_id, right_on='a_' + units_id) if units_pop: mapunits_df[outcol] = mapunits_df[areacol] / mapunits_df[units_pop] dc = ['a_' + units_id, areacol ] if units_pop and delareacol else ['a_' + units_id] mapunits_df.drop(dc, axis=1, inplace=True) obj_to_shp(mapunits_df, 'geometry', w_epsg, output) return output
def match_cellsize_and_clip(rstBands, refRaster, outFolder, clipShp=None): """ Resample images to make them with the same resolution and clip Good to resample Sentinel bands with more than 10 meters. Dependencies: * GRASS GIS; * GDAL/OGR. """ import os from glass.g.prop.prj import get_rst_epsg from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop, mkdir # Check if outfolder exists if not os.path.exists(outFolder): mkdir(outFolder, overwrite=None) # Get EPSG from refRaster epsg = get_rst_epsg(refRaster, returnIsProj=None) """ Start GRASS GIS Session """ GRS_WORKSPACE = mkdir(os.path.join(outFolder, 'grswork')) grsb = run_grass( GRS_WORKSPACE, grassBIN='grass78', location='resample', srs=epsg ) import grass.script.setup as gsetup gsetup.init(grsb, GRS_WORKSPACE, 'resample', 'PERMANENT') """ Import packages related with GRASS GIS """ from glass.g.it.rst import rst_to_grs, grs_to_rst from glass.g.wenv.grs import rst_to_region from glass.g.it.shp import shp_to_grs from glass.g.dp.torst import grsshp_to_grsrst as shp_to_rst from glass.g.it.rst import grs_to_mask # Send Ref Raster to GRASS GIS and set region extRst = rst_to_grs(refRaster, 'ext_rst') rst_to_region(extRst) # Import all bands in rstBands grs_bands = [rst_to_grs(i, fprop(i, 'fn')) for i in rstBands] if clipShp: # Add clipShp to GRASS grs_clip = shp_to_grs(clipShp, fprop(clipShp, 'fn'), asCMD=True) # SHP to Raster rstClip = shp_to_rst( grs_clip, 1, f'rst_{grs_clip}', cmd=True ) # Set region using rst_to_region(rstClip) # Set mask grs_to_mask(rstClip) # Export bands return [grs_to_rst( i, os.path.join(outFolder, i + '.tif') ) for i in grs_bands]
def joinLines_by_spatial_rel_raster(mainLines, mainId, joinLines, joinCol, outfile, epsg): """ Join Attributes based on a spatial overlap. An raster based approach """ import os import pandas from glass.g.rd.shp import shp_to_obj from glass.g.wt.shp import df_to_shp from glass.g.gp.ext import shpext_to_boundshp from glass.g.dp.torst import shp_to_rst from glass.g.it.pd import df_to_geodf from glass.g.wenv.grs import run_grass from glass.ng.pd.joins import join_dfs from glass.ng.pd.agg import df_groupBy from glass.pys.oss import fprop, mkdir workspace = mkdir(os.path.join(os.path.dirname(mainLines, 'tmp_dt'))) # Create boundary file boundary = shpext_to_boundshp(mainLines, os.path.join(workspace, "bound.shp"), epsg) boundRst = shp_to_rst(boundary, None, 5, -99, os.path.join(workspace, "rst_base.tif"), epsg=epsg, api='gdal') # Start GRASS GIS Session gbase = run_grass(workspace, location="grs_loc", srs=boundRst) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, workspace, "grs_loc", "PERMANENT") from glass.g.rst.local import combine from glass.g.prop.rst import get_rst_report_data from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.dp.torst import grsshp_to_grsrst as shp_to_rst # Add data to GRASS GIS mainVector = shp_to_grs(mainLines, fprop(mainLines, 'fn', forceLower=True)) joinVector = shp_to_grs(joinLines, fprop(joinLines, 'fn', forceLower=True)) mainRst = shp_to_rst(mainVector, mainId, f"rst_{mainVector}") joinRst = shp_to_rst(joinVector, joinCol, f"rst_{joinVector}") combRst = combine(mainRst, joinRst, "combine_rst", api="pygrass") combine_data = get_rst_report_data(combRst, UNITS="c") combDf = pandas.DataFrame(combine_data, columns=["comb_cat", "rst_1", "rst_2", "ncells"]) combDf = combDf[combDf["rst_2"] != '0'] combDf["ncells"] = combDf["ncells"].astype(int) gbdata = df_groupBy(combDf, ["rst_1"], "MAX", "ncells") fTable = join_dfs(gbdata, combDf, ["rst_1", "ncells"], ["rst_1", "ncells"]) fTable["rst_2"] = fTable["rst_2"].astype(int) fTable = df_groupBy(fTable, ["rst_1", "ncells"], STAT='MIN', STAT_FIELD="rst_2") mainLinesCat = grs_to_shp(mainVector, os.path.join(workspace, mainVector + '.shp'), 'line') mainLinesDf = shp_to_obj(mainLinesCat) resultDf = join_dfs(mainLinesDf, fTable, "cat", "rst_1", onlyCombinations=None) resultDf.rename(columns={"rst_2": joinCol}, inplace=True) resultDf = df_to_geodf(resultDf, "geometry", epsg) df_to_shp(resultDf, outfile) return outfile
def check_shape_diff(SHAPES_TO_COMPARE, OUT_FOLDER, REPORT, DB, GRASS_REGION_TEMPLATE): """ Script to check differences between pairs of Feature Classes Suponha que temos diversas Feature Classes (FC) e que cada uma delas possui um determinado atributo; imagine tambem que, considerando todos os pares possiveis entre estas FC, se pretende comparar as diferencas na distribuicao dos valores desse atributo para cada par. * Dependencias: - GRASS; - PostgreSQL; - PostGIS. """ import datetime import os import pandas from glass.ng.sql.q import q_to_obj from glass.ng.it import db_to_tbl from glass.g.wt.sql import df_to_db from glass.g.dp.rst.toshp import rst_to_polyg from glass.g.it.db import shp_to_psql from glass.g.dp.tomtx import tbl_to_area_mtx from glass.g.prop import check_isRaster from glass.pys.oss import fprop from glass.ng.sql.db import create_db from glass.ng.sql.tbl import tbls_to_tbl from glass.ng.sql.q import q_to_ntbl # Check if folder exists, if not create it if not os.path.exists(OUT_FOLDER): from glass.pys.oss import mkdir mkdir(OUT_FOLDER, overwrite=None) else: raise ValueError('{} already exists!'.format(OUT_FOLDER)) from glass.g.wenv.grs import run_grass gbase = run_grass(OUT_FOLDER, grassBIN='grass78', location='shpdif', srs=GRASS_REGION_TEMPLATE) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, OUT_FOLDER, 'shpdif', 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.it.rst import rst_to_grs from glass.g.tbl.col import rn_cols # Convert to SHAPE if file is Raster i = 0 _SHP_TO_COMPARE = {} for s in SHAPES_TO_COMPARE: isRaster = check_isRaster(s) if isRaster: # To GRASS rstName = fprop(s, 'fn') inRst = rst_to_grs(s, "rst_" + rstName, as_cmd=True) # To Vector d = rst_to_polyg(inRst, rstName, rstColumn="lulc_{}".format(i), gisApi="grass") # Export Shapefile shp = grs_to_shp(d, os.path.join(OUT_FOLDER, d + '.shp'), "area") _SHP_TO_COMPARE[shp] = "lulc_{}".format(i) else: # To GRASS grsV = shp_to_grs(s, fprop(s, 'fn'), asCMD=True) # Change name of column with comparing value ncol = "lulc_{}".format(str(i)) rn_cols(grsV, {SHAPES_TO_COMPARE[s]: "lulc_{}".format(str(i))}, api="grass") # Export shp = grs_to_shp(grsV, os.path.join(OUT_FOLDER, grsV + '_rn.shp'), "area") _SHP_TO_COMPARE[shp] = "lulc_{}".format(str(i)) i += 1 SHAPES_TO_COMPARE = _SHP_TO_COMPARE __SHAPES_TO_COMPARE = SHAPES_TO_COMPARE # Create database create_db(DB, api='psql') """ Union SHAPEs """ UNION_SHAPE = {} FIX_GEOM = {} SHPS = list(__SHAPES_TO_COMPARE.keys()) for i in range(len(SHPS)): for e in range(i + 1, len(SHPS)): # Optimized Union print("Union between {} and {}".format(SHPS[i], SHPS[e])) time_a = datetime.datetime.now().replace(microsecond=0) __unShp = optimized_union_anls( SHPS[i], SHPS[e], os.path.join(OUT_FOLDER, "un_{}_{}.shp".format(i, e)), GRASS_REGION_TEMPLATE, os.path.join(OUT_FOLDER, "work_{}_{}".format(i, e)), multiProcess=True) time_b = datetime.datetime.now().replace(microsecond=0) print(time_b - time_a) # Rename cols unShp = rn_cols( __unShp, { "a_" + __SHAPES_TO_COMPARE[SHPS[i]]: __SHAPES_TO_COMPARE[SHPS[i]], "b_" + __SHAPES_TO_COMPARE[SHPS[e]]: __SHAPES_TO_COMPARE[SHPS[e]] }) UNION_SHAPE[(SHPS[i], SHPS[e])] = unShp # Send data to postgresql SYNTH_TBL = {} for uShp in UNION_SHAPE: # Send data to PostgreSQL union_tbl = shp_to_psql(DB, UNION_SHAPE[uShp], api='shp2pgsql') # Produce table with % of area equal in both maps areaMapTbl = q_to_ntbl( DB, "{}_syn".format(union_tbl), ("SELECT CAST('{lulc_1}' AS text) AS lulc_1, " "CAST('{lulc_2}' AS text) AS lulc_2, " "round(" "CAST(SUM(g_area) / 1000000 AS numeric), 4" ") AS agree_area, round(" "CAST((SUM(g_area) / MIN(total_area)) * 100 AS numeric), 4" ") AS agree_percentage, " "round(" "CAST(MIN(total_area) / 1000000 AS numeric), 4" ") AS total_area FROM (" "SELECT {map1_cls}, {map2_cls}, ST_Area(geom) AS g_area, " "CASE " "WHEN {map1_cls} = {map2_cls} " "THEN 1 ELSE 0 " "END AS isthesame, total_area FROM {tbl}, (" "SELECT SUM(ST_Area(geom)) AS total_area FROM {tbl}" ") AS foo2" ") AS foo WHERE isthesame = 1 " "GROUP BY isthesame").format( lulc_1=fprop(uShp[0], 'fn'), lulc_2=fprop(uShp[1], 'fn'), map1_cls=__SHAPES_TO_COMPARE[uShp[0]], map2_cls=__SHAPES_TO_COMPARE[uShp[1]], tbl=union_tbl), api='psql') # Produce confusion matrix for the pair in comparison matrixTbl = tbl_to_area_mtx(DB, union_tbl, __SHAPES_TO_COMPARE[uShp[0]], __SHAPES_TO_COMPARE[uShp[1]], union_tbl + '_mtx') SYNTH_TBL[uShp] = {"TOTAL": areaMapTbl, "MATRIX": matrixTbl} # UNION ALL TOTAL TABLES total_table = tbls_to_tbl(DB, [SYNTH_TBL[k]["TOTAL"] for k in SYNTH_TBL], 'total_table') # Create table with % of agreement between each pair of maps mapsNames = q_to_obj( DB, ("SELECT lulc FROM (" "SELECT lulc_1 AS lulc FROM {tbl} GROUP BY lulc_1 " "UNION ALL " "SELECT lulc_2 AS lulc FROM {tbl} GROUP BY lulc_2" ") AS lu GROUP BY lulc ORDER BY lulc").format(tbl=total_table), db_api='psql').lulc.tolist() FLDS_TO_PIVOT = ["agree_percentage", "total_area"] Q = ("SELECT * FROM crosstab('" "SELECT CASE " "WHEN foo.lulc_1 IS NOT NULL THEN foo.lulc_1 ELSE jtbl.tmp1 " "END AS lulc_1, CASE " "WHEN foo.lulc_2 IS NOT NULL THEN foo.lulc_2 ELSE jtbl.tmp2 " "END AS lulc_2, CASE " "WHEN foo.{valCol} IS NOT NULL THEN foo.{valCol} ELSE 0 " "END AS agree_percentage FROM (" "SELECT lulc_1, lulc_2, {valCol} FROM {tbl} UNION ALL " "SELECT lulc_1, lulc_2, {valCol} FROM (" "SELECT lulc_1 AS lulc_2, lulc_2 AS lulc_1, {valCol} " "FROM {tbl}" ") AS tst" ") AS foo FULL JOIN (" "SELECT lulc_1 AS tmp1, lulc_2 AS tmp2 FROM (" "SELECT lulc_1 AS lulc_1 FROM {tbl} GROUP BY lulc_1 " "UNION ALL " "SELECT lulc_2 AS lulc_1 FROM {tbl} GROUP BY lulc_2" ") AS tst_1, (" "SELECT lulc_1 AS lulc_2 FROM {tbl} GROUP BY lulc_1 " "UNION ALL " "SELECT lulc_2 AS lulc_2 FROM {tbl} GROUP BY lulc_2" ") AS tst_2 WHERE lulc_1 = lulc_2 GROUP BY lulc_1, lulc_2" ") AS jtbl ON foo.lulc_1 = jtbl.tmp1 AND foo.lulc_2 = jtbl.tmp2 " "ORDER BY lulc_1, lulc_2" "') AS ct(" "lulc_map text, {crossCols}" ")") TOTAL_AGREE_TABLE = None TOTAL_AREA_TABLE = None for f in FLDS_TO_PIVOT: if not TOTAL_AGREE_TABLE: TOTAL_AGREE_TABLE = q_to_ntbl( DB, "agreement_table", Q.format(tbl=total_table, valCol=f, crossCols=", ".join([ "{} numeric".format(map_) for map_ in mapsNames ])), api='psql') else: TOTAL_AREA_TABLE = q_to_ntbl(DB, "area_table", Q.format(tbl=total_table, valCol=f, crossCols=", ".join([ "{} numeric".format(map_) for map_ in mapsNames ])), api='psql') # Union Mapping UNION_MAPPING = pandas.DataFrame( [[k[0], k[1], fprop(UNION_SHAPE[k], 'fn')] for k in UNION_SHAPE], columns=['shp_a', 'shp_b', 'union_shp']) UNION_MAPPING = df_to_db(DB, UNION_MAPPING, 'union_map', api='psql') # Export Results TABLES = [UNION_MAPPING, TOTAL_AGREE_TABLE, TOTAL_AREA_TABLE ] + [SYNTH_TBL[x]["MATRIX"] for x in SYNTH_TBL] SHEETS = ["union_map", "agreement_percentage", "area_with_data_km"] + [ "{}_{}".format(fprop(x[0], 'fn')[:15], fprop(x[1], 'fn')[:15]) for x in SYNTH_TBL ] db_to_tbl(DB, ["SELECT * FROM {}".format(x) for x in TABLES], REPORT, sheetsNames=SHEETS, dbAPI='psql') return REPORT
def shps_to_shp(shps, outShp, api="ogr2ogr", fformat='.shp', dbname=None): """ Get all features in several Shapefiles and save them in one file api options: * ogr2ogr; * psql; * pandas; * psql; * grass; """ import os if type(shps) != list: # Check if is dir if os.path.isdir(shps): from glass.pys.oss import lst_ff # List shps in dir shps = lst_ff(shps, file_format=fformat) else: raise ValueError(( 'shps should be a list with paths for Feature Classes or a path to ' 'folder with Feature Classes' )) if api == "ogr2ogr": from glass.pys import execmd from glass.g.prop import drv_name out_drv = drv_name(outShp) # Create output and copy some features of one layer (first in shps) cmdout = execmd('ogr2ogr -f "{}" {} {}'.format( out_drv, outShp, shps[0] )) # Append remaining layers lcmd = [execmd( 'ogr2ogr -f "{}" -update -append {} {}'.format( out_drv, outShp, shps[i] ) ) for i in range(1, len(shps))] elif api == 'pandas': """ Merge SHP using pandas """ from glass.g.rd.shp import shp_to_obj from glass.g.wt.shp import df_to_shp if type(shps) != list: raise ValueError('shps should be a list with paths for Feature Classes') dfs = [shp_to_obj(shp) for shp in shps] result = dfs[0] for df in dfs[1:]: result = result.append(df, ignore_index=True, sort=True) df_to_shp(result, outShp) elif api == 'psql': import os from glass.ng.sql.tbl import tbls_to_tbl, del_tables from glass.g.it.db import shp_to_psql if not dbname: from glass.ng.sql.db import create_db create_db(dbname, api='psql') pg_tbls = shp_to_psql( dbname, shps, api="shp2pgsql" ) if os.path.isfile(outShp): from glass.pys.oss import fprop outbl = fprop(outShp, 'fn') else: outbl = outShp tbls_to_tbl(dbname, pg_tbls, outbl) if outbl != outShp: from glass.g.it.shp import dbtbl_to_shp dbtbl_to_shp( dbname, outbl, 'geom', outShp, inDB='psql', api="pgsql2shp" ) del_tables(dbname, pg_tbls) elif api == 'grass': from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop, lst_ff from glass.g.prop.prj import get_shp_epsg lshps = lst_ff(shps, file_format='.shp') epsg = get_shp_epsg(lshps[0]) gwork = os.path.dirname(outShp) outshpname = fprop(outShp, "fn") loc = f'loc_{outshpname}' gbase = run_grass(gwork, loc=loc, srs=epsg) import grass.script.setup as gsetup gsetup.init(gbase, gwork, loc, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp # Import data gshps = [shp_to_grs(s, fprop(s, 'fn'), asCMD=True) for s in lshps] patch = vpatch(gshps, outshpname) grs_to_shp(patch, outShp, "area") else: raise ValueError( "{} API is not available" ) return outShp
def snap_points_to_near_line(lineShp, pointShp, epsg, workGrass, outPoints, location='overlap_pnts', api='grass', movesShp=None): """ Move points to overlap near line API's Available: * grass; * saga. """ if api == 'grass': """ Uses GRASS GIS to find near lines. """ import os import numpy from geopandas import GeoDataFrame from glass.pys.oss import fprop from glass.g.wenv.grs import run_grass from glass.g.rd.shp import shp_to_obj from glass.g.wt.shp import df_to_shp # Create GRASS GIS Location grassBase = run_grass(workGrass, location=location, srs=epsg) import grass.script as grass import grass.script.setup as gsetup gsetup.init(grassBase, workGrass, location, 'PERMANENT') # Import some GRASS GIS tools from glass.g.gp.prox import grs_near as near from glass.g.tbl.attr import geomattr_to_db from glass.g.it.shp import shp_to_grs, grs_to_shp # Import data into GRASS GIS grsLines = shp_to_grs(lineShp, fprop(lineShp, 'fn', forceLower=True)) grsPoint = shp_to_grs(pointShp, fprop(pointShp, 'fn', forceLower=True)) # Get distance from points to near line near(grsPoint, grsLines, nearCatCol="tocat", nearDistCol="todistance") # Get coord of start/end points of polylines geomattr_to_db(grsLines, ['sta_pnt_x', 'sta_pnt_y'], 'start', 'line') geomattr_to_db(grsLines, ['end_pnt_x', 'end_pnt_y'], 'end', 'line') # Export data from GRASS GIS ogrPoint = grs_to_shp( grsPoint, os.path.join(workGrass, grsPoint + '.shp', 'point', asMultiPart=True)) ogrLine = grs_to_shp( grsLines, os.path.join(workGrass, grsLines + '.shp', 'point', asMultiPart=True)) # Points to GeoDataFrame pntDf = shp_to_obj(ogrPoint) # Lines to GeoDataFrame lnhDf = shp_to_obj(ogrLine) # Erase unecessary fields pntDf.drop(["todistance"], axis=1, inplace=True) lnhDf.drop([ c for c in lnhDf.columns.values if c != 'geometry' and c != 'cat' and c != 'sta_pnt_x' and c != 'sta_pnt_y' and c != 'end_pnt_x' and c != 'end_pnt_y' ], axis=1, inplace=True) # Join Geometries - Table with Point Geometry and Geometry of the # nearest line resultDf = pntDf.merge(lnhDf, how='inner', left_on='tocat', right_on='cat') # Move points resultDf['geometry'] = [ geoms[0].interpolate(geoms[0].project(geoms[1])) for geoms in zip(resultDf.geometry_y, resultDf.geometry_x) ] resultDf.drop(["geometry_x", "geometry_y", "cat_x", "cat_y"], axis=1, inplace=True) resultDf = GeoDataFrame(resultDf, crs={"init": 'epsg:{}'.format(epsg)}, geometry="geometry") # Check if points are equal to any start/end points resultDf["x"] = resultDf.geometry.x resultDf["y"] = resultDf.geometry.y resultDf["check"] = numpy.where( (resultDf["x"] == resultDf["sta_pnt_x"]) & (resultDf["y"] == resultDf["sta_pnt_y"]), 1, 0) resultDf["check"] = numpy.where( (resultDf["x"] == resultDf["end_pnt_x"]) & (resultDf["y"] == resultDf["end_pnt_y"]), 1, 0) # To file df_to_shp(resultDf, outPoints) elif api == 'saga': """ Snap Points to Lines using SAGA GIS """ from glass.pys import execmd cmd = ("saga_cmd shapes_points 19 -INPUT {pnt} -SNAP {lnh} " "-OUTPUT {out}{mv}").format( pnt=pointShp, lnh=lineShp, out=outPoints, mv="" if not movesShp else " -MOVES {}".format(movesShp)) outcmd = execmd(cmd) else: raise ValueError("{} is not available!".format(api)) return outPoints
def run_viewshed_by_cpu(tid, db, obs, dem, srs, vis_basename='vis', maxdst=None, obselevation=None): # Create Database new_db = create_db("{}_{}".format(db, str(tid)), api='psql') # Points to Database pnt_tbl = df_to_db( new_db, obs, 'pnt_tbl', api='psql', epsg=srs, geomType='Point', colGeom='geometry') # Create GRASS GIS Session workspace = mkdir(os.path.join( os.path.dirname(dem), 'work_{}'.format(str(tid)) )) loc_name = 'vis_loc' gbase = run_grass(workspace, location=loc_name, srs=dem) # Start GRASS GIS Session import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, workspace, loc_name, 'PERMANENT') from glass.g.it.rst import rst_to_grs, grs_to_rst from glass.g.rst.surf import grs_viewshed from glass.g.deldt import del_rst # Send DEM to GRASS GIS grs_dem = rst_to_grs(dem, 'grs_dem', as_cmd=True) # Produce Viewshed for each point in obs for idx, row in obs.iterrows(): # Get Viewshed raster vrst = grs_viewshed( grs_dem, (row.geometry.x, row.geometry.y), '{}_{}'.format(vis_basename, str(row[obs_id])), max_dist=maxdst, obs_elv=obselevation ) # Export Raster to File frst = grs_to_rst(vrst, os.path.join(workspace, vrst + '.tif')) # Raster to Array img = gdal.Open(frst) num = img.ReadAsArray() # Two Dimension to One Dimension # Reshape Array numone = num.reshape(num.shape[0] * num.shape[1]) # Get Indexes with visibility visnum = np.arange(numone.shape[0]).astype(np.uint32) visnum = visnum[numone == 1] # Get Indexes intervals visint = get_minmax_fm_seq_values(visnum) # Get rows indexes _visint = visint.reshape(visint.shape[0] * visint.shape[1]) visrow = _visint / num.shape[1] visrow = visrow.astype(np.uint32) # Get cols indexes viscol = _visint - (visrow * num.shape[1]) # Reshape visrow = visrow.reshape(visint.shape) viscol = viscol.reshape(visint.shape) # Split array irow, erow = np.vsplit(visrow.T, 1)[0] icol, ecol = np.vsplit(viscol.T, 1)[0] # Visibility indexes to Pandas DataFrame idxnum = np.full(irow.shape, row[obs_id]) visdf = pd.DataFrame({ 'pntid' : idxnum, 'rowi' : irow, 'rowe' : erow, 'coli': icol, 'cole' : ecol }) # Pandas DF to database # Create Visibility table df_to_db( new_db, visdf, vis_basename, api='psql', colGeom=None, append=None if not idx else True ) # Delete all variables numone = None visnum = None visint = None _visint = None visrow = None viscol = None irow = None erow = None icol = None ecol = None idxnum = None visdf = None del img # Delete GRASS GIS File del_rst(vrst) # Delete TIFF File del_file(frst) frst = None
def optimized_union_anls(lyr_a, lyr_b, outShp, ref_boundary, workspace=None, multiProcess=None): """ Optimized Union Analysis Goal: optimize v.overlay performance for Union operations """ import os from glass.pys.oss import fprop, lst_ff from glass.pys.oss import cpu_cores from glass.g.smp import create_fishnet from glass.g.wenv.grs import run_grass from glass.g.dp.split import eachfeat_to_newshp from glass.g.dp.mge import shps_to_shp from glass.g.wt.rst import shpext_to_rst from glass.g.prop.ext import get_ext if workspace: if not os.path.exists(workspace): from glass.pys.oss import mkdir mkdir(workspace, overwrite=True) else: from glass.pys.oss import mkdir workspace = mkdir(os.path.join(os.path.dirname(outShp), "union_work")) # Create Fishnet ncpu = cpu_cores() if ncpu == 12: nrow = 4 ncol = 3 elif ncpu == 8: nrow = 4 ncol = 2 else: nrow = 2 ncol = 2 ext = get_ext(ref_boundary) width = (ext[1] - ext[0]) / ncol height = (ext[3] - ext[2]) / nrow gridShp = create_fishnet(ref_boundary, os.path.join(workspace, 'ref_grid.shp'), width, height, xy_row_col=None) # Split Fishnet in several files cellsShp = eachfeat_to_newshp(gridShp, workspace) if not multiProcess: # INIT GRASS GIS Session grsbase = run_grass(workspace, location="grs_loc", srs=ref_boundary) import grass.script.setup as gsetup gsetup.init(grsbase, workspace, "grs_loc", 'PERMANENT') # Add data to GRASS GIS from glass.g.it.shp import shp_to_grs cellsShp = [ shp_to_grs(shp, fprop(shp, 'fn'), asCMD=True) for shp in cellsShp ] LYR_A = shp_to_grs(lyr_a, fprop(lyr_a, 'fn'), asCMD=True) LYR_B = shp_to_grs(lyr_b, fprop(lyr_b, 'fn'), asCMD=True) # Clip Layers A and B for each CELL in fishnet LYRS_A = [ grsclip(LYR_A, cellsShp[x], LYR_A + "_" + str(x), cmd=True) for x in range(len(cellsShp)) ] LYRS_B = [ grsclip(LYR_B, cellsShp[x], LYR_B + "_" + str(x), cmd=True) for x in range(len(cellsShp)) ] # Union SHPS UNION_SHP = [ grsunion(LYRS_A[i], LYRS_B[i], f"un_{str(i)}", cmd=True) for i in range(len(cellsShp)) ] # Export Data from glass.g.it.shp import grs_to_shp _UNION_SHP = [ grs_to_shp(shp, os.path.join(workspace, shp + ".shp"), "area") for shp in UNION_SHP ] else: def clip_and_union(la, lb, cell, work, proc, output): ref_rst = shpext_to_rst(cell, os.path.join(os.path.dirname(cell), fprop(cell, 'fn') + '.tif'), cellsize=10) # Start GRASS GIS Session loc = "proc_" + str(proc) grsbase = run_grass(work, location=loc, srs=ref_rst) import grass.script.setup as gsetup gsetup.init(grsbase, work, loc, 'PERMANENT') # Import GRASS GIS modules from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.prop.feat import feat_count # Add data to GRASS a = shp_to_grs(la, fprop(la, 'fn'), filterByReg=True, asCMD=True) b = shp_to_grs(lb, fprop(lb, 'fn'), filterByReg=True, asCMD=True) if not feat_count(a, gisApi="grass", work=work, loc=loc): return if not feat_count(b, gisApi="grass", work=work, loc=loc): return # Clip a_clip = grsclip(a, None, "{}_clip".format(a), cmd=True, clip_by_region=True) b_clip = grsclip(b, None, "{}_clip".format(b), cmd=True, clip_by_region=True) # Union u_shp = grsunion(a_clip, b_clip, f"un_{fprop(cell, 'fn')}", cmd=True) # Export o = grs_to_shp(u_shp, output, "area") import multiprocessing thrds = [ multiprocessing.Process( target=clip_and_union, name="th-{}".format(i), args=(lyr_a, lyr_b, cellsShp[i], os.path.join(workspace, "th_{}".format(i)), i, os.path.join(workspace, "uniao_{}.shp".format(i)))) for i in range(len(cellsShp)) ] for t in thrds: t.start() for t in thrds: t.join() ff_shp = lst_ff(workspace, file_format='.shp') _UNION_SHP = [] for i in range(len(cellsShp)): p = os.path.join(workspace, "uniao_{}.shp".format(i)) if p in ff_shp: _UNION_SHP.append(p) else: continue # Merge all union into the same layer MERGED_SHP = shps_to_shp(_UNION_SHP, outShp, api="ogr2ogr") return MERGED_SHP
def make_dem(grass_workspace, data, field, output, extent_template, method="IDW", cell_size=None, mask=None): """ Create Digital Elevation Model Methods Available: * IDW; * BSPLINE; * SPLINE; * CONTOUR; """ from glass.pys.oss import fprop from glass.g.wenv.grs import run_grass from glass.g.prop.prj import get_epsg LOC_NAME = fprop(data, 'fn', forceLower=True)[:5] + "_loc" # Get EPSG From Raster EPSG = get_epsg(extent_template) if not EPSG: raise ValueError( 'Cannot get EPSG code of Extent Template File ({})'.format( extent_template ) ) # Know if data geometry are points if method == 'BSPLINE' or method == 'SPLINE': from glass.g.prop.feat import get_gtype data_gtype = get_gtype(data, gisApi='ogr') # Create GRASS GIS Location grass_base = run_grass(grass_workspace, location=LOC_NAME, srs=EPSG) # Start GRASS GIS Session import grass.script.setup as gsetup gsetup.init(grass_base, grass_workspace, LOC_NAME, 'PERMANENT') # Get Extent Raster ref_template = ob_ref_rst(extent_template, os.path.join( grass_workspace, LOC_NAME ), cellsize=cell_size) # IMPORT GRASS GIS MODULES # from glass.g.it.rst import rst_to_grs, grs_to_rst from glass.g.it.shp import shp_to_grs from glass.g.wenv.grs import rst_to_region # Configure region rst_to_grs(ref_template, 'extent') rst_to_region('extent') # Convert elevation "data" to GRASS Vector elv = shp_to_grs(data, 'elevation') OUTPUT_NAME = fprop(output, 'fn', forceLower=True) if method == "BSPLINE": from glass.g.rst.itp import bspline # Convert to points if necessary if data_gtype != 'POINT' and data_gtype != 'MULTIPOINT': from glass.g.dp.cg import feat_vertex_to_pnt elev_pnt = feat_vertex_to_pnt(elv, "elev_pnt", nodes=None) else: elev_pnt = elv outRst = bspline(elev_pnt, field, OUTPUT_NAME, mway='bicubic', lyrN=1, asCMD=True) elif method == "SPLINE": from glass.g.rst.itp import surfrst # Convert to points if necessary if data_gtype != 'POINT' and data_gtype != 'MULTIPOINT': from glass.g.dp.cg import feat_vertex_to_pnt elev_pnt = feat_vertex_to_pnt(elv, "elev_pnt", nodes=None) else: elev_pnt = elv outRst = surfrst(elev_pnt, field, OUTPUT_NAME, lyrN=1, ascmd=True) elif method == "CONTOUR": from glass.g.dp.torst import grsshp_to_grsrst as shp_to_rst from glass.g.rst.itp import surfcontour # Apply mask if mask if mask: from glass.g.it.rst import grs_to_mask, rst_to_grs rst_mask = rst_to_grs(mask, 'rst_mask', as_cmd=True) grs_to_mask(rst_mask) # Elevation (GRASS Vector) to Raster elevRst = shp_to_rst(elv, field, 'rst_elevation') # Run Interpolator outRst = surfcontour(elevRst, OUTPUT_NAME, ascmd=True) elif method == "IDW": from glass.g.rst.itp import ridw from glass.g.rst.alg import rstcalc from glass.g.dp.torst import grsshp_to_grsrst as shp_to_rst # Elevation (GRASS Vector) to Raster elevRst = shp_to_rst(elv, field, 'rst_elevation') # Multiply cells values by 100 000.0 rstcalc('int(rst_elevation * 100000)', 'rst_elev_int', api='pygrass') # Run IDW to generate the new DEM ridw('rst_elev_int', 'dem_int', numberPoints=15) # DEM to Float rstcalc('dem_int / 100000.0', OUTPUT_NAME, api='pygrass') # Export DEM to a file outside GRASS Workspace grs_to_rst(OUTPUT_NAME, output) return output
def distance_between_catpoints(srcShp, facilitiesShp, networkShp, speedLimitCol, onewayCol, grsWorkspace, grsLocation, outputShp): """ Path bet points TODO: Work with files with cat """ import os from glass.pys.oss import fprop from glass.g.wenv.grs import run_grass from glass.g.dp.mge import shps_to_shp from glass.g.prop.feat import feat_count # Merge Source points and Facilities into the same Feature Class SRC_NFEAT = feat_count(srcShp, gisApi='pandas') FACILITY_NFEAT = feat_count(facilitiesShp, gisApi='pandas') POINTS = shps_to_shp([srcShp, facilitiesShp], os.path.join(os.path.dirname(outputShp), "points_net.shp"), api='pandas') # Open an GRASS GIS Session gbase = run_grass(grsWorkspace, grassBIN="grass76", location=grsLocation, srs=networkShp) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, grsWorkspace, grsLocation, 'PERMANENT') # Import GRASS GIS Module from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.tbl.attr import geomattr_to_db from glass.g.cp import copy_insame_vector from glass.g.tbl import category from glass.g.tbl.grs import add_table, update_table from glass.g.mob.grstbx.vnet import network_from_arcs from glass.g.mob.grstbx.vnet import add_pnts_to_network from glass.g.mob.grstbx.vnet import netpath # Add Data to GRASS GIS rdvMain = shp_to_grs(networkShp, fprop(networkShp, 'fn', forceLower=True)) pntShp = shp_to_grs(POINTS, "points_net") """Get closest facility layer:""" # Connect Points to Network newNetwork = add_pnts_to_network(rdvMain, pntShp, "rdv_points") # Sanitize Network Table and Cost Columns newNetwork = category(newNetwork, "rdv_points_time", "add", LyrN="3", geomType="line") add_table(newNetwork, ("cat integer,kph double precision,length double precision," "ft_minutes double precision," "tf_minutes double precision,oneway text"), lyrN=3) copy_insame_vector(newNetwork, "kph", speedLimitCol, 3, geomType="line") copy_insame_vector(newNetwork, "oneway", onewayCol, 3, geomType="line") geomattr_to_db(newNetwork, "length", "length", "line", createCol=False, unit="meters", lyrN=3) update_table(newNetwork, "kph", "3.6", "kph IS NULL", lyrN=3) update_table(newNetwork, "ft_minutes", "(length * 60) / (kph * 1000.0)", "ft_minutes IS NULL", lyrN=3) update_table(newNetwork, "tf_minutes", "(length * 60) / (kph * 1000.0)", "tf_minutes IS NULL", lyrN=3) # Exagerate Oneway's update_table(newNetwork, "ft_minutes", "1000", "oneway = 'TF'", lyrN=3) update_table(newNetwork, "tf_minutes", "1000", "oneway = 'FT'", lyrN=3) # Produce result result = netpath(newNetwork, "", "ft_minutes", "tf_minutes", fprop(outputShp, 'fn'), arcLyr=3, nodeLyr=2) return grs_to_shp(result, outputShp, geomType="line", lyrN=3)
def shp_diff_fm_ref(refshp, refcol, shps, out_folder, refrst, db=None): """ Check differences between each shp in shps and one reference shape Dependencies: - GRASS; - PostgreSQL with Postgis or GeoPandas; """ import os from glass.g.prop import check_isRaster from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop from glass.g.tbl.tomtx import tbl_to_areamtx # Check if folder exists, if not create it if not os.path.exists(out_folder): from glass.pys.oss import mkdir mkdir(out_folder) # Start GRASS GIS Session gbase = run_grass(out_folder, grassBIN='grass78', location='shpdif', srs=refrst) import grass.script.setup as gsetup gsetup.init(gbase, out_folder, 'shpdif', 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.it.rst import rst_to_grs from glass.g.tbl.col import rn_cols from glass.g.dp.rst.toshp import rst_to_polyg # Convert to SHAPE if file is Raster # Rename interest columns i = 0 lstff = [refshp] + list(shps.keys()) __shps = {} for s in lstff: is_rst = check_isRaster(s) if is_rst: # To GRASS rname = fprop(s, 'fn') inrst = rst_to_grs(s, "rst_" + rname, as_cmd=True) # To vector d = rst_to_polyg(inrst, rname, rstColumn="lulc_{}".format(str(i)), gisApi="grass") else: # To GRASS d = shp_to_grs(s, fprop(s, 'fn'), asCMD=True) # Change name of interest colum rn_cols(d, {shps[s] if i else refcol: "lulc_{}".format(str(i))}, api="grass") # Export To Shapefile if not i: refshp = grs_to_shp(d, os.path.join(out_folder, d + '.shp'), 'area') refcol = "lulc_{}".format(str(i)) else: shp = grs_to_shp(d, os.path.join(out_folder, d + '.shp'), 'area') __shps[shp] = "lulc_{}".format(str(i)) i += 1 # Union Shapefiles union_shape = {} for shp in __shps: # Optimized Union sname = fprop(shp, 'fn') union_shape[shp] = optimized_union_anls( shp, refshp, os.path.join(out_folder, sname + '_un.shp'), refrst, os.path.join(out_folder, "wk_" + sname), multiProcess=True) # Produce confusion matrices mtxf = tbl_to_areamtx(union_shape[shp], "a_" + __shps[shp], 'b_' + refcol, os.path.join(out_folder, sname + '.xlsx'), db=db, with_metrics=True) return out_folder
def v_break_at_points(workspace, loc, lineShp, pntShp, db, srs, out_correct, out_tocorrect): """ Break lines at points - Based on GRASS GIS v.edit Use PostGIS to sanitize the result TODO: Confirm utility Problem: GRASS GIS always uses the first line to break. """ import os from glass.g.it.db import shp_to_psql from glass.g.it.shp import dbtbl_to_shp from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop from glass.ng.sql.db import create_db from glass.ng.sql.q import q_to_ntbl tmpFiles = os.path.join(workspace, loc) gbase = run_grass(workspace, location=loc, srs=srs) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, workspace, loc, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp grsLine = shp_to_grs(lineShp, fprop(lineShp, 'fn', forceLower=True)) vedit_break(grsLine, pntShp, geomType='line') LINES = grs_to_shp(grsLine, os.path.join(tmpFiles, grsLine + '_v1.shp'), 'line') # Sanitize output of v.edit.break using PostGIS create_db(db, overwrite=True, api='psql') LINES_TABLE = shp_to_psql(db, LINES, srsEpsgCode=srs, pgTable=fprop(LINES, 'fn', forceLower=True), api="shp2pgsql") # Delete old/original lines and stay only with the breaked one Q = ("SELECT {t}.*, foo.cat_count FROM {t} INNER JOIN (" "SELECT cat, COUNT(cat) AS cat_count, " "MAX(ST_Length(geom)) AS max_len " "FROM {t} GROUP BY cat" ") AS foo ON {t}.cat = foo.cat " "WHERE foo.cat_count = 1 OR foo.cat_count = 2 OR (" "foo.cat_count = 3 AND ST_Length({t}.geom) <= foo.max_len)").format( t=LINES_TABLE) CORR_LINES = q_to_ntbl(db, "{}_corrected".format(LINES_TABLE), Q, api='psql') # TODO: Delete Rows that have exactly the same geometry # Highlight problems that the user must solve case by case Q = ("SELECT {t}.*, foo.cat_count FROM {t} INNER JOIN (" "SELECT cat, COUNT(cat) AS cat_count FROM {t} GROUP BY cat" ") AS foo ON {t}.cat = foo.cat " "WHERE foo.cat_count > 3").format(t=LINES_TABLE) ERROR_LINES = q_to_ntbl(db, "{}_not_corr".format(LINES_TABLE), Q, api='psql') dbtbl_to_shp(db, CORR_LINES, "geom", out_correct, api="pgsql2shp") dbtbl_to_shp(db, ERROR_LINES, "geom", out_tocorrect, api="pgsql2shp")
def join_attr_by_distance(mainTable, joinTable, workGrass, epsg_code, output): """ Find nearest feature and join attributes of the nearest feature to the mainTable Uses GRASS GIS to find near lines. """ import os from glass.g.wenv.grs import run_grass from glass.g.rd.shp import shp_to_obj from glass.g.it.pd import df_to_geodf from glass.g.wt.shp import df_to_shp from glass.pys.oss import fprop # Create GRASS GIS Location grassBase = run_grass(workGrass, location='join_loc', srs=epsg_code) import grass.script as grass import grass.script.setup as gsetup gsetup.init(grassBase, workGrass, 'join_loc', 'PERMANENT') # Import some GRASS GIS tools from glass.g.gp.prox import grs_near as near from glass.g.it.shp import shp_to_grs, grs_to_shp # Import data into GRASS GIS grsMain = shp_to_grs(mainTable, fprop(mainTable, 'fn', forceLower=True)) grsJoin = shp_to_grs(joinTable, fprop(joinTable, 'fn', forceLower=True)) # Get distance from each feature of mainTable to the nearest feature # of the join table near(grsMain, grsJoin, nearCatCol="tocat", nearDistCol="todistance") # Export data from GRASS GIS ogrMain = grs_to_shp(grsMain, os.path.join(workGrass, 'join_loc', grsMain + '_grs.shp'), None, asMultiPart=True) ogrJoin = grs_to_shp(grsJoin, os.path.join(workGrass, 'join_loc', grsJoin + '_grs.shp'), None, asMultiPart=True) dfMain = shp_to_obj(ogrMain) dfJoin = shp_to_obj(ogrJoin) dfResult = dfMain.merge(dfJoin, how='inner', left_on='tocat', right_on='cat') dfResult.drop(["geometry_y", "cat_y"], axis=1, inplace=True) dfResult.rename(columns={"cat_x": "cat_grass"}, inplace=True) dfResult["tocat"] = dfResult["tocat"] - 1 dfResult["cat_grass"] = dfResult["cat_grass"] - 1 dfResult = df_to_geodf(dfResult, "geometry_x", epsg_code) df_to_shp(dfResult, output) return output
def intersection(inShp, intersectShp, outShp, api='geopandas'): """ Intersection between ESRI Shapefile 'API's Available: * geopandas * saga; * pygrass; * grass; """ if api == 'geopandas': import geopandas from glass.g.rd.shp import shp_to_obj from glass.g.wt.shp import df_to_shp dfShp = shp_to_obj(inShp) dfIntersect = shp_to_obj(intersectShp) res_interse = geopandas.overlay(dfShp, dfIntersect, how='intersection') df_to_shp(res_interse, outShp) elif api == 'saga': from glass.pys import execmd cmdout = execmd( ("saga_cmd shapes_polygons 14 -A {} -B {} -RESULT {} -SPLIT 1" ).format(inShp, intersectShp, outShp)) elif api == 'pygrass' or api == 'grass': import os from glass.g.wenv.grs import run_grass from glass.pys.oss import fprop from glass.g.prop.prj import get_epsg epsg = get_epsg(inShp) w = os.path.dirname(outShp) refname = fprop(outShp, 'fn') loc = f"loc_{refname}" grsbase = run_grass(w, location=loc, srs=epsg) import grass.script.setup as gsetup gsetup.init(grsbase, w, loc, 'PERMANENT') from glass.g.it.shp import shp_to_grs, grs_to_shp shpa = shp_to_grs(inShp, fprop(inShp, 'fn')) shpb = shp_to_grs(intersectShp, fprop(intersectShp, 'fn')) # Intersection intshp = grsintersection(shpa, shpb, refname, True if api == 'grass' else None) # Export r = grs_to_shp(intshp, outShp, 'area') else: raise ValueError("{} is not available!".format(api)) return outShp
def lulc_by_cell(tid, boundary, lulc_shps, fishnet, result, workspace): from glass.g.wenv.grs import run_grass from glass.g.dp.torst import shp_to_rst bname = fprop(boundary, 'fn') # Boundary to Raster ref_rst = shp_to_rst(boundary, None, 10, 0, os.path.join(workspace, f'rst_{bname}.tif')) # Create GRASS GIS Session loc_name = 'loc_' + bname gbase = run_grass(workspace, location=loc_name, srs=ref_rst) import grass.script.setup as gsetup gsetup.init(gbase, workspace, loc_name, 'PERMANENT') # GRASS GIS Modules from glass.g.it.shp import shp_to_grs, grs_to_shp from glass.g.gp.ovl import grsintersection from glass.g.tbl.attr import geomattr_to_db from glass.g.prop.feat import feat_count # Send Fishnet to GRASS GIS fnet = shp_to_grs(fishnet, fprop(fishnet, 'fn'), asCMD=True) # Processing ulst = [] l_lulc_grs = [] for shp in lulc_shps: iname = fprop(shp, 'fn') # LULC Class to GRASS GIS lulc_grs = shp_to_grs(shp, iname, filterByReg=True, asCMD=True) if not feat_count( lulc_grs, gisApi='grass', work=workspace, loc=loc_name): continue # Intersect Fishnet | LULC CLass union_grs = grsintersection(fnet, lulc_grs, iname + '_i', cmd=True) # Get Areas geomattr_to_db(union_grs, "areav", "area", "boundary", unit='meters') # Export Table funion = grs_to_shp(union_grs, os.path.join(result, iname + '.shp'), 'area') ulst.append(funion) l_lulc_grs.append(lulc_grs) # Intersect between all LULC SHPS ist_shp = [] if len(l_lulc_grs) > 1: for i in range(len(l_lulc_grs)): for e in range(i + 1, len(l_lulc_grs)): ishp = grsintersection(l_lulc_grs[i], l_lulc_grs[e], 'lulcint_' + str(i) + '_' + str(e), cmd=True) if not feat_count( ishp, gisApi='grass', work=workspace, loc=loc_name): continue else: ist_shp.append(ishp) if len(ist_shp): from glass.g.gp.gen import dissolve from glass.g.tbl.grs import reset_table if len(ist_shp) > 1: from glass.g.dp.mge import shps_to_shp # Export shapes _ist_shp = [ grs_to_shp(s, os.path.join(workspace, loc_name, s + '.shp'), 'area') for s in ist_shp ] # Merge Intersections merge_shp = shps_to_shp(_ist_shp, os.path.join(workspace, loc_name, 'merge_shp.shp'), api='pandas') # Import GRASS merge_shp = shp_to_grs(merge_shp, 'merge_shp') else: merge_shp = ist_shp[0] # Dissolve Shape reset_table(merge_shp, {'refid': 'varchar(2)'}, {'refid': '1'}) overlay_areas = dissolve(merge_shp, 'overlay_areas', 'refid', api='grass') # Union Fishnet | Overlay's union_ovl = grsintersection(fnet, overlay_areas, 'ovl_union', cmd=True) funion_ovl = grs_to_shp(union_ovl, os.path.join(result, union_ovl + '.shp'), 'area') ulst.append(funion_ovl) # Export Tables return ulst