def FilterAndExport(CLS, cnt): time_x = datetime.datetime.now().replace(microsecond=0) if api == 'SQLITE': shp = sel_by_attr( osmcon, SQL_Q.format(lc=str(CLS), tbl=polyTbl), os.path.join(folder, 'sel_{}.shp'.format(str(CLS))), api_gis='ogr' ) else: shp = sel_by_attr( osmcon, SQL_Q.format(lc=str(CLS), tbl=polyTbl), os.path.join(folder, 'sel_{}.shp'.format(str(CLS))), api='pgsql2shp', geom_col="geometry", tableIsQuery=True ) time_y = datetime.datetime.now().replace(microsecond=0) rstCls = shp_to_raster( shp, None, cellsize, 0, os.path.join(folder, 'sel_{}.tif'.format(str(CLS))), epsg=srscode, rst_template=rstTemplate, api='gdal' ) time_z = datetime.datetime.now().replace(microsecond=0) clsRst[int(CLS)] = rstCls timeGasto[cnt + 1] = ('toshp_{}'.format(str(CLS)), time_y - time_x) timeGasto[cnt + 2] = ('torst_{}'.format(str(CLS)), time_z - time_y)
def selAndExport(CLS, cnt): time_x = datetime.datetime.now().replace(microsecond=0) if api == "SQLITE": shpCls = sel_by_attr( osmLink, SQL_Q.format(c=str(CLS), tbl=polyTbl, w=WHR.format( op=OPERATOR, r=RULE_COL, ga=GEOM_AREA, cls_=CLS )), os.path.join(folder, "{}_{}.shp".format(RULE_COL,CLS)), api_gis='ogr' ) else: shpCls = sel_by_attr( osmLink, SQL_Q.format(c=str(CLS), tbl=polyTbl, w=WHR.format( op=OPERATOR, r=RULE_COL, ga=GEOM_AREA, cls_=CLS )), os.path.join(folder, "{}_{}.shp".format(RULE_COL, str(CLS))), api='pgsql2shp', geom_col="geometry", tableIsQuery=True ) time_y = datetime.datetime.now().replace(microsecond=0) rst = shp_to_raster( shpCls, None, cellsize, 0, os.path.join( folder, "{}_{}.tif".format(RULE_COL, CLS) ), epsg=srscode, rst_template=rstTemplate, api='gdal' ) time_z = datetime.datetime.now().replace(microsecond=0) clsRst[int(CLS)] = rst timeGasto[cnt + 1] = ('sq_to_shp_{}'.format(str(CLS)), time_y - time_x) timeGasto[cnt + 2] = ('shp_to_rst_{}'.format(str(CLS)), time_z - time_y)
def exportAndBufferB(CLS, cnt): # Run BUFFER Tool time_x = datetime.datetime.now().replace(microsecond=0) bb_file = st_buffer(osmLink, lineTbl, "bf_basic_buffer", "geometry", os.path.join( folder, 'bb_rule5_{}.shp'.format(str(int(CLS)))), whrClause="basic_buffer={}".format(str(int(CLS))), outTblIsFile=True, dissolve=None, cols_select="basic_buffer") time_y = datetime.datetime.now().replace(microsecond=0) # To raster rstCls = shp_to_raster(bb_file, None, cells, 0, os.path.join(folder, 'rst_bbfr_{}.tif'.format(CLS)), epsg=srscode, rst_template=rtemplate, api='gdal') time_z = datetime.datetime.now().replace(microsecond=0) clsRst[CLS] = rstCls timeGasto[cnt + 1] = ('buffer_{}'.format(str(CLS)), time_y - time_x) timeGasto[cnt + 2] = ('torst_{}'.format(str(CLS)), time_z - time_y)
def exportAndBuffer(): time_cc = datetime.datetime.now().replace(microsecond=0) roadFile = splite_buffer(osmdata, lineTbl, "bf_roads", "geometry", os.path.join(folder, 'bf_roads.gml'), whrClause="roads IS NOT NULL", outTblIsFile=True, dissolve=None) time_c = datetime.datetime.now().replace(microsecond=0) distRst = shp_to_raster(roadFile, None, cellsize, -1, os.path.join(folder, 'rst_roads.tif'), epsg=srs, rst_template=rstTemplate, api="gdal") time_d = datetime.datetime.now().replace(microsecond=0) bfShps.append(distRst) timeGasto[1] = ('buffer_roads', time_c - time_cc) timeGasto[2] = ('to_rst_roads', time_d - time_c)
def rst_area(osmLink, polygonTable, UPPER=True, api='SQLITE'): """ Select features with area upper than. A field with threshold is needed in the database. """ import datetime from gasp.fm.sql import query_to_df if api == 'POSTGIS': from gasp.to.shp.grs import psql_to_grs as db_to_grs else: from gasp.to.shp.grs import sqlite_to_shp as db_to_grs from gasp.to.rst import shp_to_raster from gasp.osm2lulc.var import GEOM_AREA RULE_COL = 'area_upper' if UPPER else 'area_lower' OPERATOR = " > " if UPPER else " < " WHR = "{ga} {op} t_{r} AND {r}={cls_}" # Get Classes time_a = datetime.datetime.now().replace(microsecond=0) lulcCls = query_to_df(osmLink, ( "SELECT {r} FROM {tbl} WHERE {ga} {op} t_{r} GROUP BY {r}" ).format( r=RULE_COL, tbl=polygonTable, ga=GEOM_AREA, op=OPERATOR ), db_api='psql' if api == 'POSTGIS' else 'sqlite')[RULE_COL].tolist() time_b = datetime.datetime.now().replace(microsecond=0) timeGasto = {0 : ('check_cls', time_b - time_a)} # Import data into GRASS and convert it to raster clsRst = {} tk = 1 for cls in lulcCls: time_x = datetime.datetime.now().replace(microsecond=0) grsVect = db_to_grs( osmLink, polygonTable, "{}_{}".format(RULE_COL, cls), where=WHR.format( op=OPERATOR, r=RULE_COL, ga=GEOM_AREA, cls_=cls ), notTable=True, filterByReg=True ) time_y = datetime.datetime.now().replace(microsecond=0) timeGasto[tk] = ('import_{}'.format(cls), time_y - time_x) grsRst = shp_to_raster( grsVect, int(cls), None, None, "rst_{}".format(RULE_COL), api='grass' ) time_z = datetime.datetime.now().replace(microsecond=0) timeGasto[tk+1] = ('torst_{}'.format(cls), time_z - time_y) clsRst[int(cls)] = grsRst tk += 2 return clsRst, timeGasto
def grs_rst(osmLink, polyTbl, api='SQLITE'): """ Simple selection, convert result to Raster """ import datetime from gasp.fm.sql import query_to_df if api == 'POSTGIS': from gasp.to.shp.grs import psql_to_grs as db_to_grs else: from gasp.to.shp.grs import sqlite_to_shp as db_to_grs from gasp.to.rst import shp_to_raster # Get Classes time_a = datetime.datetime.now().replace(microsecond=0) lulcCls = query_to_df( osmLink, ("SELECT selection FROM {} " "WHERE selection IS NOT NULL " "GROUP BY selection").format(polyTbl), db_api='psql' if api == 'POSTGIS' else 'sqlite').selection.tolist() time_b = datetime.datetime.now().replace(microsecond=0) timeGasto = {0: ('check_cls', time_b - time_a)} # Import data into GRASS and convert it to raster clsRst = {} tk = 1 for cls in lulcCls: time_x = datetime.datetime.now().replace(microsecond=0) grsVect = db_to_grs(osmLink, polyTbl, "rule1_{}".format(str(cls)), where="selection = {}".format(str(cls)), notTable=True, filterByReg=True) time_y = datetime.datetime.now().replace(microsecond=0) grsRst = shp_to_raster(grsVect, int(cls), None, None, "rst_rule1_{}".format(str(cls)), api='grass') time_z = datetime.datetime.now().replace(microsecond=0) clsRst[int(cls)] = grsRst timeGasto[tk] = ('import_{}'.format(cls), time_y - time_x) timeGasto[tk + 1] = ('torst_{}'.format(cls), time_z - time_y) tk += 2 return clsRst, timeGasto
def build12_torst(buildTbl): LulcCls = query_to_df( osmLink, "SELECT cls FROM {} GROUP BY cls".format(buildTbl), db_api='psql' if apidb == 'POSTGIS' else 'sqlite').cls.tolist() for lulc_cls in LulcCls: time_m = datetime.datetime.now().replace(microsecond=0) # To SHP if apidb == 'SQLITE': shpB = sel_by_attr(osmLink, "SELECT * FROM {} WHERE cls={}".format( buildTbl, str(lulc_cls)), os.path.join( folder, 'nshp_build_{}.shp'.format(lulc_cls)), api_gis='ogr') else: shpB = sel_by_attr(osmLink, "SELECT * FROM {} WHERE cls={}".format( buildTbl, str(lulc_cls)), os.path.join( folder, 'nshp_build_{}.shp'.format(lulc_cls)), api='pgsql2shp', geom_col="geometry", tableIsQuery=True) time_n = datetime.datetime.now().replace(microsecond=0) # To RST brst = shp_to_raster(shpB, None, cells, 0, os.path.join( folder, 'nrst_build_{}.tif'.format(lulc_cls)), srscode, rstT, api='gdal') time_o = datetime.datetime.now().replace(microsecond=0) resLyr[int(lulc_cls)] = [brst] timeGasto[int(lulc_cls)] = ('to_shp_{}'.format(str(lulc_cls)), time_n - time_m) timeGasto[int(lulc_cls) + 1] = ('to_rst_n_{}'.format( str(lulc_cls)), time_o - time_n)
def toRaster(buildShp, cls): if not os.path.exists(buildShp): return # To Raster time_x = datetime.datetime.now().replace(microsecond=0) rstbuild = shp_to_raster(buildShp, None, cells, 0, os.path.join(folder, 'rst_build_{}.tif'.format(cls)), srscode, rstT, api='gdal') time_y = datetime.datetime.now().replace(microsecond=0) resLyr[33] = rstbuild timeGasto[33] = ('to_rst_{}'.format(cls), time_y - time_x)
def exportBuild(): time_ee = datetime.datetime.now().replace(microsecond=0) NB = row_num(osmdata, polyTbl, where="building IS NOT NULL", api='sqlite') time_e = datetime.datetime.now().replace(microsecond=0) timeGasto[3] = ('check_builds', time_e - time_ee) if not NB: return bShp = sel_by_attr( osmdata, "SELECT geometry FROM {} WHERE building IS NOT NULL".format( polyTbl), os.path.join(folder, 'road_builds.shp'), api_gis='ogr') time_f = datetime.datetime.now().replace(microsecond=0) bRst = shp_to_raster(bShp, None, cellsize, -1, os.path.join(folder, 'road_builds.tif'), epsg=srs, rst_template=rstTemplate, api='gdal') time_g = datetime.datetime.now().replace(microsecond=0) BUILDINGS.append(bRst) timeGasto[4] = ('export_builds', time_f - time_e) timeGasto[5] = ('builds_to_rst', time_g - time_f)
def rst_pnt_to_build(osmLink, pntTable, polyTable, api_db='SQLITE'): """ Replace buildings with tag yes using the info in the Points Layer Only used for URBAN ATLAS and CORINE LAND COVER """ import datetime from gasp.sql.mng.tbl import row_num as cnt_row from gasp.fm.sql import query_to_df if api_db == 'POSTGIS': from gasp.to.shp.grs import psql_to_grs as db_to_shp else: from gasp.to.shp.grs import sqlite_to_shp as db_to_shp from gasp.sql.anls.ovlay import sgbd_get_feat_within from gasp.sql.anls.ovlay import sgbd_get_feat_not_within from gasp.to.rst import shp_to_raster time_a = datetime.datetime.now().replace(microsecond=0) new_build = sgbd_get_feat_within( osmLink, ("(SELECT buildings AS pnt_build, geometry AS pnt_geom " "FROM {} WHERE buildings IS NOT NULL)").format(pntTable), "pnt_geom", ("(SELECT buildings AS poly_build, geometry AS poly_geom " "FROM {} WHERE buildings IS NOT NULL)").format(polyTable), "poly_geom", "new_buildings", inTblCols="pnt_build AS cls", withinCols="poly_geom AS geometry", outTblIsFile=None, apiToUse="OGR_SPATIALITE" if api_db != "POSTGIS" else api_db) time_b = datetime.datetime.now().replace(microsecond=0) yes_build = sgbd_get_feat_not_within( osmLink, ("(SELECT buildings AS poly_build, geometry AS poly_geom " "FROM {} WHERE buildings IS NOT NULL)").format(polyTable), "poly_geom", ("(SELECT buildings AS pnt_build, geometry AS pnt_geom " "FROM {} WHERE buildings IS NOT NULL)").format(pntTable), "pnt_geom", "yes_builds", inTblCols="poly_geom AS geometry, 11 AS cls", outTblIsFile=None, apiToUse="OGR_SPATIALITE" if api_db != "POSTGIS" else api_db) time_c = datetime.datetime.now().replace(microsecond=0) resLayers = {} N11 = cnt_row(osmLink, yes_build, api='psql' if api_db == 'POSTGIS' else 'sqlite') time_d = datetime.datetime.now().replace(microsecond=0) if N11: # Data to GRASS GIS grsBuild11 = db_to_shp(osmLink, yes_build, "yes_builds", notTable=True, filterByReg=True) time_f = datetime.datetime.now().replace(microsecond=0) # To raster rstBuild11 = shp_to_raster(grsBuild11, 11, None, None, "rst_builds11", api="grass") time_g = datetime.datetime.now().replace(microsecond=0) resLayers[11] = [rstBuild11] else: time_f = None time_g = None # Add data into GRASS GIS lulcCls = query_to_df( osmLink, "SELECT cls FROM {} GROUP BY cls".format(new_build), db_api='psql' if api_db == 'POSTGIS' else 'sqlite').cls.tolist() timeGasto = { 0: ('intersect', time_b - time_a), 1: ('disjoint', time_c - time_b), 2: ('count_b11', time_d - time_c), 3: None if not time_f else ('import_b11', time_f - time_d), 4: None if not time_g else ('torst_b11', time_g - time_f), } tk = 5 for cls in lulcCls: time_x = datetime.datetime.now().replace(microsecond=0) shp = db_to_shp(osmLink, new_build, "nbuild_{}".format(str(cls)), "cls = {}".format(cls), notTable=True, filterByReg=True) time_y = datetime.datetime.now().replace(microsecond=0) rstb = shp_to_raster(shp, int(cls), None, None, "rst_nbuild_{}".format(str(cls)), api="grass") time_z = datetime.datetime.now().replace(microsecond=0) if int(cls) == 11 and int(cls) in resLayers: resLayers[int(cls)].append(rstb) else: resLayers[int(cls)] = [rstb] timeGasto[tk] = ('import_bn{}'.format(cls), time_y - time_x) timeGasto[tk + 1] = ('torst_bn{}'.format(cls), time_z - time_y) tk += 2 return resLayers, timeGasto
def cost_surface(dem, lulc, cls_lulc, prod_lulc, roads, kph, barr, grass_location, output, grass_path=None): """ Tool for make a cost surface based on the roads, slope, land use and physical barriers. ach cell has a value that represents the resistance to the movement. """ import os from gasp.oss.ops import create_folder from gasp.os import os_name from gasp.session import run_grass from gasp.prop.rst import get_cellsize from gasp.prop.rst import rst_distinct from .constants import lulc_weight from .constants import get_slope_categories """ Auxiliar Methods """ def edit_lulc(shp, fld_cls, new_cls): FT_TF_GRASS(shp, 'lulc', 'None') add_field('lulc', 'leg', 'INT') for key in new_cls.keys(): l = new_cls[key]['cls'] sql = " OR ".join([ "{campo}='{value}'".format(campo=fld_cls, value=i) for i in l ]) update_table('lulc', 'leg', int(key), sql) return {'shp': 'lulc', 'fld': 'leg'} def combine_to_cost(rst_combined, lst_rst, work, slope_weight, rdv_cos_weight, cellsize, mode_movement): # The tool r.report doesn't work properly, for that we need some aditional information l = [] for i in lst_rst: FT_TF_GRASS(i, os.path.join(work, i + '.tif'), 'None') values = rst_distinct(os.path.join(work, i + '.tif'), gisApi='gdal') l.append(min(values)) # ****** # Now, we can procede normaly txt_file = os.path.join(work, 'text_combine.txt') raster_report(rst_combined, txt_file) open_txt = open(txt_file, 'r') c = 0 dic_combine = {} for line in open_txt.readlines(): try: if c == 4: dic_combine[0] = [str(l[0]), str(l[1])] elif c >= 5: pl = line.split('|') cat = pl[2].split('; ') cat1 = cat[0].split(' ') cat2 = cat[1].split(' ') dic_combine[int(pl[1])] = [cat1[1], cat2[1]] c += 1 except: break cst_dic = {} for key in dic_combine.keys(): cls_slope = int(dic_combine[key][0]) cos_vias = int(dic_combine[key][1]) if cos_vias >= 6: weight4slope = slope_weight[cls_slope]['rdv'] if mode_movement == 'pedestrian': weight4other = (3600.0 * cellsize) / (5.0 * 1000.0) else: weight4other = (3600.0 * cellsize) / (cos_vias * 1000.0) else: weight4slope = slope_weight[cls_slope]['cos'] weight4other = rdv_cos_weight[cos_vias]['weight'] cst_dic[key] = (weight4slope * weight4other) * 10000000.0 return cst_dic def Rules4CstSurface(dic, work): txt = open(os.path.join(work, 'cst_surface.txt'), 'w') for key in dic.keys(): txt.write('{cat} = {cst}\n'.format(cat=str(key), cst=str(dic[key]))) txt.close() return os.path.join(work, 'cst_surface.txt') """ Prepare GRASS GIS Environment """ workspace = os.path.dirname(grass_location) location = os.path.basename(grass_location) # Start GRASS GIS Engine grass_base = run_grass(workspace, location, dem, win_path=grass_path) import grass.script as grass import grass.script.setup as gsetup gsetup.init(grass_base, workspace, location, 'PERMANENT') # Import GRASS GIS Modules from gasp.cpu.grs import grass_converter from gasp.spanlst.surf import slope from gasp.spanlst.rcls import reclassify from gasp.spanlst.rcls import interval_rules from gasp.spanlst.rcls import category_rules from gasp.spanlst.rcls import grass_set_null from gasp.mng.grstbl import add_field, update_table from gasp.anls.ovlay import union from gasp.to.rst import rst_to_grs, grs_to_rst from gasp.to.rst import shp_to_raster from gasp.to.shp.grs import shp_to_grs from gasp.cpu.grs.spanlst import mosaic_raster from gasp.spanlst.local import combine from gasp.spanlst.algebra import rstcalc from gasp.cpu.grs.spanlst import raster_report """Global variables""" # Workspace for temporary files wTmp = create_folder(os.path.join(workspace, 'tmp')) # Cellsize cellsize = float(get_cellsize(dem), gisApi='gdal') # Land Use Land Cover weights lulcWeight = lulc_weight(prod_lulc, cellsize) # Slope classes and weights slope_cls = get_slope_categories() """Make Cost Surface""" # Generate slope raster rst_to_grs(dem, 'dem') slope('dem', 'rst_slope', api="pygrass") # Reclassify Slope rulesSlope = interval_rules(slope_cls, os.path.join(wTmp, 'slope.txt')) reclassify('rst_slope', 'recls_slope', rulesSlope) # LULC - Dissolve, union with barriers and conversion to raster lulc_shp = edit_lulc(lulc, cls_lulc, lulc_weight) shp_to_grs(barr, 'barriers') union(lulc_shp['shp'], 'barriers', 'barrcos', api_gis="grass") update_table('barrcos', 'a_' + lulc_shp['fld'], 99, 'b_cat=1') shp_to_raster('barrcos', 'a_' + lulc_shp['fld'], None, None, 'rst_barrcos', api='pygrass') # Reclassify this raster - convert the values 99 to NULL or NODATA grass_set_null('rst_barrcos', 99) # Add the roads layer to the GRASS GIS shp_to_grs(roads, 'rdv') if kph == 'pedestrian': add_field('rdv', 'foot', 'INT') update_table('rdv', 'foot', 50, 'foot IS NULL') shp_to_raster('rdv', 'foot', None, None, 'rst_rdv', api='pygrass') else: shp_to_raster('rdv', kph, None, None, 'rst_rdv', api='pygrass') # Merge LULC/BARR and Roads mosaic_raster('rst_rdv', 'rst_barrcos', 'rdv_barrcos') # Combine LULC/BARR/ROADS with Slope combine('recls_slope', 'rdv_barrcos', 'rst_combine', api="pygrass") """ Estimating cost for every combination at rst_combine The order of the rasters on the following list has to be the same of GRASS Combine""" cst = combine_to_cost('rst_combine', ['recls_slope', 'rdv_barrcos'], wTmp, slope_cls, lulc_weight, cell_size, kph) # Reclassify combined rst rulesSurface = category_rules(cst, os.path.join('r_surface.txt')) reclassify('rst_combine', 'cst_tmp', rulesSurface) rstcalc('cst_tmp / 10000000.0', 'cst_surface', api='pygrass') grs_to_rst('cst_surface', output)
def cstDistance_with_motorway(cst_surface, motorway, fld_motorway, nodes_start, nodes_end, pnt_destiny, grass_location, isolines): """ Produce a surface representing the acumulated cost of each cell to a destination point considering the false intersections caused by a non planar graph """ import os from gasp.oss.ops import create_folder from gasp.prop.ff import drv_name from gasp.cpu.grs.spanlst import rseries from gasp.spanlst.algebra import rstcalc from gasp.spanlst.dist import rcost from gasp.to.rst import rst_to_grs from gasp.to.rst import shp_to_raster from gasp.cpu.gdl.sampling import gdal_values_to_points from pysage.tools_thru_api.gdal.ogr import OGR_CreateNewShape """ Auxiliar Methods """ def dist_to_nodes(pnt_shp, cstSurface, string, w): nodes = ogr.GetDriverByName(drv_name(pnt_shp)).Open(pnt_shp, 0) nodesLyr = nodes.GetLayer() c = 0 dicNodes = {} for pnt in nodesLyr: geom = pnt.GetGeometryRef() point = geom.ExportToWkb() OGR_CreateNewShape( OGR_GetDriverName(pnt_shp), os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))), ogr.wkbPoint, [point]) FT_TF_GRASS( os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))), '{pnt}_{o}'.format(pnt=string, o=str(c)), 'None') GRASS_CostDistance(cstSurface, '{pnt}_{o}'.format(pnt=string, o=str(c)), 'cst_{pnt}_{a}'.format(pnt=string, a=str(c))) dicNodes['{pnt}_{o}'.format(pnt=string, o=str(c))] = [ os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))), 'cst_{pnt}_{a}'.format(pnt=string, a=str(c)) ] c += 1 return dicNodes """GRASS GIS Configuration""" # Workspace for temporary files wTmp = create_folder(os.path.join(os.path.dirname(grass_location), 'tmp')) """Make Accessibility Map""" # Add Cost Surface to GRASS GIS convert(cst_surface, 'cst_surface') # Add Destination To GRASS convert(pnt_destiny, 'destination') # Run r.cost with only with a secundary roads network rcost('cst_surface', 'destination', 'cst_dist_secun') # We have to know if the path through motorway implies minor cost. # Add primary roads to grass convert(motorway, 'rdv_prim', 'None') # We need a cost surface only with the cost of motorway roads shp_to_raster('rdv_prim', fld_motorway, None, None, 'rst_rdv', api='pygrass') rstcalc('(3600.0 * {cs}) / (rst_rdv * 1000.0)'.format( cs=get_cellsize(cst_surface, gisApi='gdal')), 'cst_motorway', api='grass') # For each node of entrance into a motorway, we need to know: # - the distance to the entrance node; # - the distance between the entrance and every exit node # - the distance between the exit and the destination # Geting the distance to the entrance node entranceNodes = dist_to_nodes(nodes_start, 'cst_surface', 'start', wTmp) # Geting the distances to all entrance nodes exitNodes = dist_to_nodes(nodes_end, 'cst_surface', 'exit', wTmp) # Getting the values needed for start_pnt in entranceNodes.keys(): for exit_pnt in exitNodes.keys(): GRASS_CostDistance( 'cst_motorway', exit_pnt, 'cst2exit_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))) FT_TF_GRASS( 'cst2exit_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1])), os.path.join( wTmp, 'cst2exit_{a}_{b}.tif'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))), 'None') cst_start_exit = GDAL_ExtractValuesByPoint( entranceNodes[start_pnt][0], os.path.join( wTmp, 'cst2exit_{a}_{b}.tif'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1])))) if os.path.isfile( os.path.join(wTmp, exitNodes[exit_pnt][1] + '.tif')) == False: FT_TF_GRASS( exitNodes[exit_pnt][1], os.path.join(wTmp, exitNodes[exit_pnt][1] + '.tif'), 'None') cst_exit_destination = GDAL_ExtractValuesByPoint( pnt_destiny, os.path.join(wTmp, exitNodes[exit_pnt][1] + '.tif')) GRASS_RasterCalculator( '{rst} + {a} + {b}'.format(rst=entranceNodes[start_pnt][1], a=str(cst_start_exit[0]), b=str(min(cst_exit_destination))), 'cst_path_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))) lst_outputs.append('cst_path_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))) lst_outputs.append('cst_dist_secun') rseries(lst_outputs, 'isocronas', 'minimum')
def infovalue(landslides, variables, iv_rst, dataEpsg): """ Informative Value using GDAL Library """ import os import math import numpy from osgeo import gdal from gasp.fm.rst import rst_to_array from gasp.fm import tbl_to_obj from gasp.prop.feat import get_geom_type from gasp.prop.rst import rst_shape from gasp.prop.rst import count_cells from gasp.prop.rst import get_cellsize from gasp.stats.rst import frequencies from gasp.oss.ops import create_folder from gasp.to.rst import array_to_raster # Create Workspace for temporary files workspace = create_folder(os.path.join(os.path.dirname(landslides), 'tmp')) # Get Variables Raster Shape and see if there is any difference varShapes = rst_shape(variables, gisApi='gdal') for i in range(1, len(variables)): if varShapes[variables[i - 1]] != varShapes[variables[i]]: raise ValueError( ('All rasters must have the same dimension! ' 'Raster {} and Raster {} have not the same shape!').format( variables[i - 1], variables[i])) # See if landslides are raster or not # Try to open as raster try: land_rst = rst_to_array(landslides) lrows, lcols = land_rst.shape if [lrows, lcols] != varShapes[variables[0]]: raise ValueError( ("Raster with Landslides ({}) has to have the same " "dimension that Raster Variables").format(landslides)) except: # Landslides are not Raster # Open as Feature Class # See if is Point or Polygon land_df = tbl_to_obj(landslides) geomType = get_geom_type(land_df, geomCol="geometry", gisApi='pandas') if geomType == 'Polygon' or geomType == 'MultiPolygon': # it will be converted to raster bellow land_poly = landslides elif geomType == 'Point' or geomType == 'MultiPoint': # Do a Buffer from gasp.anls.prox.bf import geodf_buffer_to_shp land_poly = geodf_buffer_to_shp( land_df, 100, os.path.join(workspace, 'landslides_buffer.shp')) # Convert To Raster from gasp.to.rst import shp_to_raster land_raster = shp_to_raster(land_poly, None, get_cellsize(variables[0], gisApi='gdal'), -9999, os.path.join(workspace, 'landslides_rst.tif'), rst_template=variables[0], api='gdal') land_rst = rst_to_array(land_raster) # Get Number of cells of each raster and number of cells # with landslides landsldCells = frequencies(land_raster)[1] totalCells = count_cells(variables[0]) # Get number of cells by classe in variable freqVar = {r: frequencies(r) for r in variables} for rst in freqVar: for cls in freqVar[rst]: if cls == 0: freqVar[rst][-1] = freqVar[rst][cls] del freqVar[rst][cls] else: continue # Get cell number with landslides by class varArray = {r: rst_to_array(r) for r in variables} for r in varArray: numpy.place(varArray[r], varArray[r] == 0, -1) landArray = {r: land_rst * varArray[r] for r in varArray} freqLndVar = {r: frequencies(landArray[r]) for r in landArray} # Estimate VI for each class on every variable vi = {} for var in freqVar: vi[var] = {} for cls in freqVar[var]: if cls in freqLndVar[var]: vi[var][cls] = math.log10( (float(freqLndVar[var][cls]) / freqVar[var][cls]) / (float(landsldCells) / totalCells)) else: vi[var][cls] = 9999 # Replace Classes without VI, from 9999 to minimum VI vis = [] for d in vi.values(): vis += d.values() min_vi = min(vis) for r in vi: for cls in vi[r]: if vi[r][cls] == 9999: vi[r][cls] = min_vi else: continue # Replace cls by vi in rst_arrays resultArrays = {v: numpy.zeros(varArray[v].shape) for v in varArray} for v in varArray: numpy.place(resultArrays[v], resultArrays[v] == 0, -128) for v in varArray: for cls in vi[v]: numpy.place(resultArrays[v], varArray[v] == cls, vi[v][cls]) # Sum all arrays and save the result as raster vi_rst = resultArrays[variables[0]] + resultArrays[variables[1]] for v in range(2, len(variables)): vi_rst = vi_rst + resultArrays[variables[v]] numpy.place(vi_rst, vi_rst == len(variables) * -128, -128) result = array_to_raster(vi_rst, iv_rst, variables[i], dataEpsg, gdal.GDT_Float32, noData=-128, gisApi='gdal') return iv_rst
def roads_sqdb(osmcon, lnhTbl, plTbl, apidb='SQLITE', asRst=None): """ Raods procedings using SQLITE """ import datetime from gasp.sql.mng.tbl import row_num as cnt_rows if apidb == 'SQLITE': from gasp.sql.anls.prox import splite_buffer as st_buffer from gasp.to.shp.grs import sqlite_to_shp as db_to_shp else: from gasp.sql.anls.prox import st_buffer from gasp.to.shp.grs import psql_to_grs as db_to_shp time_a = datetime.datetime.now().replace(microsecond=0) NR = cnt_rows(osmcon, lnhTbl, where="roads IS NOT NULL", api='psql' if apidb == 'POSTGIS' else 'sqlite') time_b = datetime.datetime.now().replace(microsecond=0) if not NR: return None, {0: ('count_rows_roads', time_b - time_a)} NB = cnt_rows(osmcon, plTbl, where="building IS NOT NULL", api='psql' if apidb == 'POSTGIS' else 'sqlite') time_c = datetime.datetime.now().replace(microsecond=0) if NB: from gasp.sql.mng.qw import exec_write_q ROADS_Q = "(SELECT{} roads, bf_roads, geometry FROM {} WHERE roads IS NOT NULL)".format( "" if apidb == 'SQLITE' else " gid,", lnhTbl) if apidb == 'SQLITE': from gasp.sql.anls.prox import splite_near nroads = splite_near(osmcon, ROADS_Q, plTbl, "geometry", "geometry", "near_roads", whrNear="building IS NOT NULL") time_d = datetime.datetime.now().replace(microsecond=0) # Update buffer distance field exec_write_q(osmcon, [ ("UPDATE near_roads SET bf_roads = CAST(round(dist_near, 0) AS integer) " "WHERE dist_near >= 1 AND dist_near <= 12"), ("UPDATE near_roads SET bf_roads = 1 WHERE dist_near >= 0 AND " "dist_near < 1") ], api='sqlite') time_e = datetime.datetime.now().replace(microsecond=0) else: from gasp.sql.anls.prox import st_near nroads = st_near( osmcon, ROADS_Q, 'gid', 'geometry', "(SELECT * FROM {} WHERE building IS NOT NULL)".format(plTbl), "geometry", "near_roads", untilDist="12", near_col="dist_near") time_d = datetime.datetime.now().replace(microsecond=0) exec_write_q(osmcon, [ ("UPDATE near_roads SET " "bf_roads = CAST(round(CAST(dist_near AS numeric), 0) AS integer) " "WHERE dist_near >= 1 AND dist_near <= 12"), ("UPDATE near_roads SET bf_roads = 1 WHERE dist_near >= 0 AND " "dist_near < 1"), ("CREATE INDEX near_dist_idx ON near_roads USING gist (geometry)" ) ], api='psql') time_e = datetime.datetime.now().replace(microsecond=0) else: nroads = ("(SELECT roads, bf_roads, geometry " "FROM {} WHERE roads IS NOT NULL) AS foo").format(lnhTbl) time_d = None time_e = None # Execute Buffer bfTbl = st_buffer(osmcon, nroads, "bf_roads", "geometry", "bf_roads", cols_select="roads", outTblIsFile=None, dissolve="ALL") time_f = datetime.datetime.now().replace(microsecond=0) # Send data to GRASS GIS roadsGrs = db_to_shp(osmcon, bfTbl, "froads", notTable=None, filterByReg=True) time_g = datetime.datetime.now().replace(microsecond=0) if asRst: from gasp.to.rst import shp_to_raster roadsGrs = shp_to_raster(roadsGrs, int(asRst), None, None, "rst_roads", api="grass") time_h = datetime.datetime.now().replace(microsecond=0) else: time_h = None return roadsGrs, { 0: ('count_rows_roads', time_b - time_a), 1: ('count_rows_build', time_c - time_b), 2: None if not time_d else ('near_analysis', time_d - time_c), 3: None if not time_e else ('update_buffer_tbl', time_e - time_d), 4: ('buffer_roads', time_f - time_e if time_e else time_f - time_c), 5: ('import_roads', time_g - time_f), 6: None if not time_h else ('roads_to_raster', time_h - time_g) }
def basic_buffer(osmLink, lineTable, dataFolder, apidb='SQLITE'): """ Data from Lines table to Polygons using a basic buffering stratagie """ import datetime from gasp.fm.sql import query_to_df if apidb == 'POSTGIS': from gasp.sql.anls.prox import st_buffer else: from gasp.sql.anls.prox import splite_buffer as st_buffer from gasp.to.rst import shp_to_raster from gasp.to.shp.grs import shp_to_grs time_a = datetime.datetime.now().replace(microsecond=0) lulcCls = query_to_df( osmLink, ("SELECT basic_buffer FROM {} WHERE basic_buffer IS NOT NULL " "GROUP BY basic_buffer").format(lineTable), db_api='psql' if apidb == 'POSTGIS' else 'sqlite').basic_buffer.tolist() time_b = datetime.datetime.now().replace(microsecond=0) timeGasto = {0: ('check_cls', time_b - time_a)} clsRst = {} tk = 1 for cls in lulcCls: # Run BUFFER Tool time_x = datetime.datetime.now().replace(microsecond=0) bb_file = st_buffer(osmLink, lineTable, "bf_basic_buffer", "geometry", os.path.join( dataFolder, 'bb_rule5_{}.shp'.format(str(int(cls)))), whrClause="basic_buffer={}".format(str(int(cls))), outTblIsFile=True, dissolve="ALL", cols_select="basic_buffer") time_y = datetime.datetime.now().replace(microsecond=0) # Data TO GRASS grsVect = shp_to_grs(bb_file, "bb_{}".format(int(cls)), asCMD=True, filterByReg=True) time_z = datetime.datetime.now().replace(microsecond=0) # Data to Raster rstVect = shp_to_raster(grsVect, int(cls), None, None, "rbb_{}".format(int(cls)), api="grass") time_w = datetime.datetime.now().replace(microsecond=0) clsRst[int(cls)] = rstVect timeGasto[tk] = ('do_buffer_{}'.format(cls), time_y - time_x) timeGasto[tk + 1] = ('import_{}'.format(cls), time_z - time_y) timeGasto[tk + 2] = ('torst_{}'.format(cls), time_w - time_z) tk += 3 return clsRst, timeGasto
def make_DEM(grass_workspace, data, field, output, extent_template, method="IDW"): """ Create Digital Elevation Model Methods Available: * IDW; * BSPLINE; * SPLINE; * CONTOUR """ from gasp.oss import get_filename from gasp.session import run_grass from gasp.prop.rst import get_epsg_raster LOC_NAME = get_filename(data, forceLower=True)[:5] + "_loc" # Get EPSG From Raster EPSG = get_epsg_raster(extent_template) # Create GRASS GIS Location grass_base = run_grass(grass_workspace, location=LOC_NAME, srs=EPSG) # Start GRASS GIS Session import grass.script as grass import grass.script.setup as gsetup gsetup.init(grass_base, grass_workspace, LOC_NAME, 'PERMANENT') # IMPORT GRASS GIS MODULES # from gasp.to.rst import rst_to_grs, grs_to_rst from gasp.to.shp.grs import shp_to_grs from gasp.prop.grs import rst_to_region # Configure region rst_to_grs(extent_template, 'extent') rst_to_region('extent') # Convert elevation "data" to GRASS Vector elv = shp_to_grs(data, 'elevation') OUTPUT_NAME = get_filename(output, forceLower=True) if method == "BSPLINE": # Convert to points from gasp.cpu.grs.mng.feat import feat_vertex_to_pnt from gasp.spanlst.interp import bspline elev_pnt = feat_vertex_to_pnt(elv, "elev_pnt", nodes=None) outRst = bspline(elev_pnt, field, OUTPUT_NAME, lyrN=1, asCMD=True) elif method == "SPLINE": # Convert to points from gasp.cpu.grs.mng.feat import feat_vertex_to_pnt from gasp.spanlst.interp import surfrst elev_pnt = feat_vertex_to_pnt(elv, "elev_pnt", nodes=None) outRst = surfrst(elev_pnt, field, OUTPUT_NAME, lyrN=1, ascmd=True) elif method == "CONTOUR": from gasp.to.rst import shp_to_raster from gasp.spanlst.interp import surfcontour # Elevation (GRASS Vector) to Raster elevRst = shp_to_raster(elv, field, None, None, 'rst_elevation', api="pygrass") # Run Interpolator outRst = surfcontour(elevRst, OUTPUT_NAME, ascmd=True) elif method == "IDW": from gasp.spanlst.interp import ridw from gasp.spanlst.algebra import rstcalc from gasp.to.rst import shp_to_raster # Elevation (GRASS Vector) to Raster elevRst = shp_to_raster(elv, field, None, None, 'rst_elevation', api='pygrass') # Multiply cells values by 100 000.0 rstcalc('int(rst_elevation * 100000)', 'rst_elev_int', api='pygrass') # Run IDW to generate the new DEM ridw('rst_elev_int', 'dem_int', numberPoints=15) # DEM to Float rstcalc('dem_int / 100000.0', OUTPUT_NAME, api='pygrass') # Export DEM to a file outside GRASS Workspace grs_to_rst(OUTPUT_NAME, output) return output
def mean_time_in_povoated_areas(network, rdv_name, stat_units, popFld, destinations, output, workspace, ONEWAY=True, GRID_REF_CELLSIZE=10): """ Receive statistical units and some destinations. Estimates the mean distance to that destinations for each statistical unit. The mean for each statistical will be calculated using a point grid: -> Statistical unit to grid point; -> Distance from grid point to destination; -> Mean of these distances. This method will only do the math for areas (statistic units) with population. """ import os import arcpy from gasp.cpu.arcg.lyr import feat_lyr from gasp.cpu.arcg.anls.exct import select_by_attr from gasp.cpu.arcg.mng.fld import field_statistics from gasp.cpu.arcg.mng.fld import add_field from gasp.cpu.arcg.mng.gen import merge from gasp.mng.gen import copy_feat from gasp.mob.arctbx.closest import closest_facility from gasp.to.shp.arcg import rst_to_pnt from gasp.to.rst import shp_to_raster if arcpy.CheckExtension("Network") == "Available": arcpy.CheckOutExtension("Network") else: raise ValueError('Network analyst extension is not avaiable') arcpy.env.overwriteOutput = True WORK = workspace # Add field stat_units = copy_feat(stat_units, os.path.join(WORK, os.path.basename(stat_units)), gisApi='arcpy') add_field(stat_units, "TIME", "DOUBLE", "10", precision="3") # Split stat_units into two layers # One with population # One with no population withPop = select_by_attr(stat_units, '{}>0'.format(popFld), os.path.join(WORK, 'with_pop.shp')) noPop = select_by_attr(stat_units, '{}=0'.format(popFld), os.path.join(WORK, 'no_pop.shp')) # For each statistic unit with population withLyr = feat_lyr(withPop) cursor = arcpy.UpdateCursor(withLyr) FID = 0 for feature in cursor: # Create a new file unity = select_by_attr( withLyr, 'FID = {}'.format(str(FID)), os.path.join(WORK, 'unit_{}.shp'.format(str(FID)))) # Convert to raster rst_unity = shp_to_raster(unity, "FID", GRID_REF_CELLSIZE, None, os.path.join(WORK, 'unit_{}.tif'.format(str(FID))), api='arcpy') # Convert to point pnt_unity = rst_to_pnt( rst_unity, os.path.join(WORK, 'pnt_un_{}.shp'.format(str(FID)))) # Execute closest facilitie CLOSEST_TABLE = os.path.join(WORK, 'cls_fac_{}.dbf'.format(str(FID))) closest_facility(network, rdv_name, destinations, pnt_unity, CLOSEST_TABLE, oneway_restriction=ONEWAY) # Get Mean MEAN_TIME = field_statistics(CLOSEST_TABLE, 'Total_Minu', 'MEAN')[0] # Record Mean feature.setValue("TIME", MEAN_TIME) cursor.updateRow(feature) FID += 1 merge([withPop, noPop], output) return output
def joinLines_by_spatial_rel_raster(mainLines, mainId, joinLines, joinCol, outfile, epsg): """ Join Attributes based on a spatial overlap. An raster based approach """ import os; import pandas from geopandas import GeoDataFrame from gasp.to.geom import regulardf_to_geodf from gasp.session import run_grass from gasp.oss import get_filename from gasp.oss.ops import create_folder from gasp.mng.ext import shpextent_to_boundary from gasp.mng.joins import join_dfs from gasp.mng.df import df_groupBy from gasp.to.rst import shp_to_raster from gasp.fm import tbl_to_obj from gasp.to.shp import df_to_shp workspace = create_folder(os.path.join( os.path.dirname(mainLines, 'tmp_dt') )) # Create boundary file boundary = shpextent_to_boundary( mainLines, os.path.join(workspace, "bound.shp"), epsg ) boundRst = shp_to_raster(boundary, None, 5, -99, os.path.join( workspace, "rst_base.tif"), epsg=epsg, api='gdal') # Start GRASS GIS Session gbase = run_grass(workspace, location="grs_loc", srs=boundRst) import grass.script as grass import grass.script.setup as gsetup gsetup.init(gbase, workspace, "grs_loc", "PERMANENT") from gasp.spanlst.local import combine from gasp.cpu.grs.spanlst import get_rst_report_data from gasp.to.shp.grs import shp_to_grs, grs_to_shp from gasp.to.rst import shp_to_raster # Add data to GRASS GIS mainVector = shp_to_grs( mainLines, get_filename(mainLines, forceLower=True)) joinVector = shp_to_grs( joinLines, get_filename(joinLines, forceLower=True)) mainRst = shp_to_raster( mainVector, mainId, None, None, "rst_" + mainVector, api='pygrass' ); joinRst = shp_to_raster( joinVector, joinCol, None, None, "rst_" + joinVector, api='pygrass' ) combRst = combine(mainRst, joinRst, "combine_rst", api="pygrass") combine_data = get_rst_report_data(combRst, UNITS="c") combDf = pandas.DataFrame(combine_data, columns=[ "comb_cat", "rst_1", "rst_2", "ncells" ]) combDf = combDf[combDf["rst_2"] != '0'] combDf["ncells"] = combDf["ncells"].astype(int) gbdata = df_groupBy(combDf, ["rst_1"], "MAX", "ncells") fTable = join_dfs(gbdata, combDf, ["rst_1", "ncells"], ["rst_1", "ncells"]) fTable["rst_2"] = fTable["rst_2"].astype(int) fTable = df_groupBy( fTable, ["rst_1", "ncells"], STAT='MIN', STAT_FIELD="rst_2" ) mainLinesCat = grs_to_shp( mainVector, os.path.join(workspace, mainVector + '.shp'), 'line') mainLinesDf = tbl_to_obj(mainLinesCat) resultDf = join_dfs( mainLinesDf, fTable, "cat", "rst_1", onlyCombinations=None ) resultDf.rename(columns={"rst_2" : joinCol}, inplace=True) resultDf = regulardf_to_geodf(resultDf, "geometry", epsg) df_to_shp(resultDf, outfile) return outfile
def pg_num_roads(osmLink, nom, lnhTbl, polyTbl, folder, cellsize, srs, rstT): """ Select, Calculate Buffer distance using POSTGIS, make buffer of roads and convert roads to raster """ import datetime import os from osgeo import gdal from gasp.sql.mng.tbl import row_num from gasp.sql.anls.prox import st_buffer from gasp.to.rst import shp_to_raster # There are roads? time_a = datetime.datetime.now().replace(microsecond=0) NR = row_num(osmLink, lnhTbl, where="roads IS NOT NULL", api='psql') time_b = datetime.datetime.now().replace(microsecond=0) if not NR: return None, {0: ('count_rows_roads', time_b - time_a)} # There are buildings? NB = row_num(osmLink, polyTbl, where="building IS NOT NULL", api='psql') time_c = datetime.datetime.now().replace(microsecond=0) if NB: from gasp.sql.anls.prox import st_near from gasp.sql.mng.qw import exec_write_q nroads = st_near( osmLink, ("(SELECT gid, roads, bf_roads, geometry FROM {} " "WHERE roads IS NOT NULL)").format(lnhTbl), "gid", "geometry", ("(SELECT * FROM {} WHERE building IS NOT NULL)").format(polyTbl), "geometry", "near_roads", untilDist="12", near_col="dist_near") time_d = datetime.datetime.now().replace(microsecond=0) exec_write_q(osmLink, [( "UPDATE near_roads SET " "bf_roads = CAST(round(CAST(dist_near AS numeric), 0) AS integer) " "WHERE dist_near >= 1 AND dist_near <= 12" ), "CREATE INDEX near_dist_idx ON near_roads USING gist (geometry)"]) time_e = datetime.datetime.now().replace(microsecond=0) else: nroads = ("(SELECT roads, bf_roads, geometry FROM {} " "WHERE roads IS NOT NULL) AS foo").format(lnhTbl) time_d = None time_e = None # Execute Buffer bufferShp = st_buffer(osmLink, nroads, "bf_roads", "geometry", os.path.join(folder, "bf_roads.shp"), cols_select="roads", outTblIsFile=True, dissolve=None) time_f = datetime.datetime.now().replace(microsecond=0) # Convert to Raster roadsRst = shp_to_raster(bufferShp, None, cellsize, 0, os.path.join(folder, "rst_roads.tif"), epsg=srs, rst_template=rstT, api='gdal') time_g = datetime.datetime.now().replace(microsecond=0) LULC_CLS = '1221' if nom != "GLOBE_LAND_30" else '801' return { int(LULC_CLS): roadsRst }, { 0: ('count_rows_roads', time_b - time_a), 1: ('count_rows_build', time_c - time_b), 2: None if not time_d else ('near_analysis', time_d - time_c), 3: None if not time_e else ('update_buffer_tbl', time_e - time_d), 4: ('buffer_roads', time_f - time_e if time_e else time_f - time_c), 5: ('roads_to_raster', time_g - time_f) }
def grs_rst_roads(osmdb, lineTbl, polyTbl, dataFolder, LULC_CLS): """ Raster Roads for GRASS """ import os import datetime from gasp.to.shp.grs import shp_to_grs, sqlite_to_shp from gasp.to.rst import shp_to_raster from gasp.sql.anls.prox import splite_buffer from gasp.sql.mng.tbl import row_num time_a = datetime.datetime.now().replace(microsecond=0) NR = row_num(osmdb, lineTbl, where="roads IS NOT NULL", api='sqlite') time_b = datetime.datetime.now().replace(microsecond=0) if not NR: return None, {0: ('count_rows_roads', time_b - time_a)} roadFile = splite_buffer( osmdb, lineTbl, "bf_roads", "geometry", 'bfu_roads', #os.path.join(dataFolder, 'bf_roads.gml'), whrClause="roads IS NOT NULL", outTblIsFile=None, dissolve="ALL") time_c = datetime.datetime.now().replace(microsecond=0) #roadGrs = shp_to_grs(roadFile, "bf_roads", filterByReg=True, asCMD=True) roadGrs = sqlite_to_shp(osmdb, "bfu_roads", 'bf_roads', notTable=True) time_d = datetime.datetime.now().replace(microsecond=0) roadRst = shp_to_raster(roadGrs, int(LULC_CLS), None, None, "rst_roads", api="grass") time_e = datetime.datetime.now().replace(microsecond=0) # Builds to GRASS and to RASTER NB = row_num(osmdb, polyTbl, where="building IS NOT NULL", api='sqlite') time_f = datetime.datetime.now().replace(microsecond=0) if NB: from gasp.spanlst.algebra import rstcalc from gasp.spanlst.rcls import set_null, null_to_value buildsShp = sqlite_to_shp(osmdb, polyTbl, "all_builds", where="building IS NOT NULL", notTable=True) time_g = datetime.datetime.now().replace(microsecond=0) buildsRst = shp_to_raster(buildsShp, 1, None, None, "rst_builds", api="grass") time_h = datetime.datetime.now().replace(microsecond=0) # Buildings to nodata | Nodata to 0 null_to_value(buildsRst, 0, as_cmd=True) time_i = datetime.datetime.now().replace(microsecond=0) set_null(buildsRst, 1, ascmd=True) time_j = datetime.datetime.now().replace(microsecond=0) # Do the math: roads + builds | if builds and roads at the same cell # cell will be null in the road layer roadsRes = rstcalc("{} + {}".format(roadRst, buildsRst), "cls_roads", api="grass") time_l = datetime.datetime.now().replace(microsecond=0) return { LULC_CLS: roadsRes }, { 0: ('count_rows_roads', time_b - time_a), 1: ('buffer_roads', time_c - time_b), 2: ('import_roads', time_d - time_c), 3: ('roads_to_rst', time_e - time_d), 4: ('count_build', time_f - time_e), 5: ('builds_to_grs', time_g - time_f), 6: ('builds_to_rst', time_h - time_g), 7: ('bnull_to_val', time_i - time_h), 8: ('builds_to_nd', time_j - time_i), 9: ('roads_build_mc', time_l - time_j) } else: return { LULC_CLS: roadRst }, { 0: ('count_rows_roads', time_b - time_a), 1: ('buffer_roads', time_c - time_b), 2: ('import_roads', time_d - time_c), 3: ('roads_to_rst', time_e - time_d), 4: ('count_build', time_f - time_e) }
def get_ref_raster(refBoundBox, folder, cellsize=None): """ Get Reference Raster """ import os from gasp.prop.ff import check_isRaster # Check if refRaster is really a Raster isRst = check_isRaster(refBoundBox) if not isRst: from gasp.prop.ff import check_isShp if not check_isShp(refBoundBox): raise ValueError(( 'refRaster File has an invalid file format. Please give a file ' 'with one of the following extensions: ' 'shp, gml, json, kml, tif or img')) else: # We have a shapefile # Check SRS and see if it is a projected SRS from gasp.prop.prj import get_epsg_shp epsg, isProj = get_epsg_shp(refBoundBox, returnIsProj=True) if not epsg: raise ValueError( 'Cannot get epsg code from {}'.format(refBoundBox)) if not isProj: # A conversion between SRS is needed from gasp.mng.prj import project ref_shp = project(refBoundBox, os.path.join(folder, 'tmp_ref_shp.shp'), outEPSG=3857, inEPSG=epsg, gisApi='ogr2ogr') epsg = 3857 else: ref_shp = refBoundBox # Convert to Raster from gasp.to.rst import shp_to_raster refRaster = shp_to_raster(ref_shp, None, 2 if not cellsize else cellsize, -1, os.path.join(folder, 'ref_raster.tif'), api='gdal') else: # We have a raster from gasp.prop.rst import get_epsg_raster epsg, isProj = get_epsg_raster(refBoundBox, returnIsProj=True) if not epsg: raise ValueError( 'Cannot get epsg code from {}'.format(refBoundBox)) # Check if Raster has a SRS with projected coordinates if not isProj: # We need to reproject raster from gasp.mng.prj import gdal_reproject_raster refRaster = gdal_reproject_raster( refBoundBox, os.path.join(folder, 'refrst_3857.tif'), epsg, 3857, cellsize=2 if not cellsize else cellsize) epsg = 3857 else: refRaster = refBoundBox return refRaster, epsg