def kernel_density(pnt_feat, popField, radius, template, outRst): """ Kernel density estimation. If any point is currently in selection only selected points are taken into account. """ import os from gasp.prop.ext import rst_ext from gasp.prop.rst import get_cellsize from gasp.oss import get_filename from gasp.to.rst.saga import saga_to_geotiff left, right, bottom, top = rst_ext(template, gisApi='gdal') cellsize = get_cellsize(template, gisApi='gdal') SAGA_RASTER = os.path.join(os.path.dirname(outRst), 'saga_{}.sgrd'.format(get_filename(outRst))) cmd = ("saga_cmd grid_gridding 6 -POINTS {} -POPULATION {} " "-RADIUS {} -TARGET_DEFINITION 0 -TARGET_USER_SIZE {} " "-TARGET_USER_XMIN {} -TARGET_USER_XMAX {} " "-TARGET_USER_YMIN {} -TARGET_USER_YMAX {} " "-TARGET_OUT_GRID {}").format(pnt_feat, popField, str(radius), str(abs(cellsize)), str(left), str(right), str(bottom), str(top), SAGA_RASTER) outcmd = exec_cmd(cmd) # Convert to tiff saga_to_geotiff(SAGA_RASTER, outRst) return outRst
def conditional_dependence(movs, indp): """ Estimate conditional dependence between several rasters """ import math from decimal import Decimal from gasp.prop.feat import feat_count from gasp.prop.rst import get_cellsize, count_cells from gasp.stats.rst import frequencies def foundPredT(dic): PredT = 0.0 for value in dic.keys(): count = dic[value] PredT += (float(value) * count) return PredT def foundTStd(dic, c): TVar = 0.0 for value in dic.keys(): count = dic[value] TVar += (float(value) * count * c)**2 TStd = math.sqrt(TVar) return TStd def calcCondIndp(Z): pi = math.pi const6 = 0.2316419 b1 = 0.31938153 b2 = -0.356563782 b3 = 1.781477937 b4 = -1.821255978 b5 = 1.330274429 Z = float(Z) X = Z if X < 0.0: X = -Z t = 1.0 / (const6 * X + 1.0) pid = 2.0 * pi XX = -X * X / 2.0 XX = math.exp(XX) / math.sqrt(pid) PZ = (b1 * t) + (b2 * t * t) + (b3 * (t**3)) + (b4 * (t**4)) + (b5 * (t**5)) PZ = 1.0 - (PZ * XX) if Z < 0: PZ = 1.0 - PZ return PZ # Count phenomena ocorrences - total number of landslides ExpNumTP = Decimal(feat_count(movs, gisApi='ogr')) # Count the number of cell raster NrCell = count_cells(indp[0]) # Get Cellsize of the raster's cellsize = Decimal(get_cellsize(indp[0], gisApi='gdal')) # Calculate UnitArea area_km = Decimal(((cellsize * cellsize) * NrCell) / 1000000.0) UnitArea = Decimal((area_km / ExpNumTP) / 40.0) ConvFac = Decimal((cellsize**2) / 1000000.0 / UnitArea) # Count the number of times that one class has representation in the raster; put this associations in a ditionary sudoDic = {var: frequencies(var) for var in indp} # Calculate Conditional Dependence nr = 1 for rst in indp: l_overall = {} l_ric = {} l_tac = {} # Calculate PredT PredT = foundPredT(sudoDic[rst]) PredT *= ConvFac for _rst_ in indp: # Calculate TStd TStd = foundTStd(sudoDic[_rst_], ConvFac) TS = (PredT - ExpNumTP) / TStd n = ExpNumTP T = PredT P = calcCondIndp(TS) * 100.0 if P > 50.0: overallCI = 100.0 * (100.0 - P) / 50.0 else: overallCI = 100.0 * (100.0 - (50.0 + (50.0 - P))) / 50.0 l_overall[(rst, _rst_)] = "%.2f" % overallCI ric = n / T l_ric[(rst, _rst_)] = "%.2f" % ric l_tac[(rst, _rst_)] = "%.2f" % P nr += 1 return {'OVERALL': l_overall, 'RIC': l_ric, 'TAC': l_tac}
def infovalue(landslides, variables, iv_rst, dataEpsg): """ Informative Value using GDAL Library """ import os import math import numpy from osgeo import gdal from gasp.fm.rst import rst_to_array from gasp.fm import tbl_to_obj from gasp.prop.feat import get_geom_type from gasp.prop.rst import rst_shape from gasp.prop.rst import count_cells from gasp.prop.rst import get_cellsize from gasp.stats.rst import frequencies from gasp.oss.ops import create_folder from gasp.to.rst import array_to_raster # Create Workspace for temporary files workspace = create_folder(os.path.join(os.path.dirname(landslides), 'tmp')) # Get Variables Raster Shape and see if there is any difference varShapes = rst_shape(variables, gisApi='gdal') for i in range(1, len(variables)): if varShapes[variables[i - 1]] != varShapes[variables[i]]: raise ValueError( ('All rasters must have the same dimension! ' 'Raster {} and Raster {} have not the same shape!').format( variables[i - 1], variables[i])) # See if landslides are raster or not # Try to open as raster try: land_rst = rst_to_array(landslides) lrows, lcols = land_rst.shape if [lrows, lcols] != varShapes[variables[0]]: raise ValueError( ("Raster with Landslides ({}) has to have the same " "dimension that Raster Variables").format(landslides)) except: # Landslides are not Raster # Open as Feature Class # See if is Point or Polygon land_df = tbl_to_obj(landslides) geomType = get_geom_type(land_df, geomCol="geometry", gisApi='pandas') if geomType == 'Polygon' or geomType == 'MultiPolygon': # it will be converted to raster bellow land_poly = landslides elif geomType == 'Point' or geomType == 'MultiPoint': # Do a Buffer from gasp.anls.prox.bf import geodf_buffer_to_shp land_poly = geodf_buffer_to_shp( land_df, 100, os.path.join(workspace, 'landslides_buffer.shp')) # Convert To Raster from gasp.to.rst import shp_to_raster land_raster = shp_to_raster(land_poly, None, get_cellsize(variables[0], gisApi='gdal'), -9999, os.path.join(workspace, 'landslides_rst.tif'), rst_template=variables[0], api='gdal') land_rst = rst_to_array(land_raster) # Get Number of cells of each raster and number of cells # with landslides landsldCells = frequencies(land_raster)[1] totalCells = count_cells(variables[0]) # Get number of cells by classe in variable freqVar = {r: frequencies(r) for r in variables} for rst in freqVar: for cls in freqVar[rst]: if cls == 0: freqVar[rst][-1] = freqVar[rst][cls] del freqVar[rst][cls] else: continue # Get cell number with landslides by class varArray = {r: rst_to_array(r) for r in variables} for r in varArray: numpy.place(varArray[r], varArray[r] == 0, -1) landArray = {r: land_rst * varArray[r] for r in varArray} freqLndVar = {r: frequencies(landArray[r]) for r in landArray} # Estimate VI for each class on every variable vi = {} for var in freqVar: vi[var] = {} for cls in freqVar[var]: if cls in freqLndVar[var]: vi[var][cls] = math.log10( (float(freqLndVar[var][cls]) / freqVar[var][cls]) / (float(landsldCells) / totalCells)) else: vi[var][cls] = 9999 # Replace Classes without VI, from 9999 to minimum VI vis = [] for d in vi.values(): vis += d.values() min_vi = min(vis) for r in vi: for cls in vi[r]: if vi[r][cls] == 9999: vi[r][cls] = min_vi else: continue # Replace cls by vi in rst_arrays resultArrays = {v: numpy.zeros(varArray[v].shape) for v in varArray} for v in varArray: numpy.place(resultArrays[v], resultArrays[v] == 0, -128) for v in varArray: for cls in vi[v]: numpy.place(resultArrays[v], varArray[v] == cls, vi[v][cls]) # Sum all arrays and save the result as raster vi_rst = resultArrays[variables[0]] + resultArrays[variables[1]] for v in range(2, len(variables)): vi_rst = vi_rst + resultArrays[variables[v]] numpy.place(vi_rst, vi_rst == len(variables) * -128, -128) result = array_to_raster(vi_rst, iv_rst, variables[i], dataEpsg, gdal.GDT_Float32, noData=-128, gisApi='gdal') return iv_rst
def gdal_slope(dem, srs, slope, unit='DEGREES'): """ Create Slope Raster TODO: Test and see if is running correctly """ import numpy import math from osgeo import gdal from scipy.ndimage import convolve from gasp.fm.rst import rst_to_array from gasp.prop.rst import get_cellsize, get_nodata from gasp.to.rst import array_to_raster # ################ # # Global Variables # # ################ # cellsize = get_cellsize(dem, gisApi='gdal') # Get Nodata Value NoData = get_nodata(dem, gisApi='gdal') # #################### # # Produce Slope Raster # # #################### # # Get Elevation array arr_dem = rst_to_array(dem) # We have to get a array with the number of nearst cells with values with_data = numpy.zeros((arr_dem.shape[0], arr_dem.shape[1])) numpy.place(with_data, arr_dem!=NoData, 1.0) mask = numpy.array([[1,1,1], [1,0,1], [1,1,1]]) arr_neigh = convolve(with_data, mask, mode='constant') numpy.place(arr_dem, arr_dem==NoData, 0.0) # The rate of change in the x direction for the center cell e is: kernel_dz_dx_left = numpy.array([[0,0,1], [0,0,2], [0,0,1]]) kernel_dz_dx_right = numpy.array([[1,0,0], [2,0,0], [1,0,0]]) dz_dx = (convolve(arr_dem, kernel_dz_dx_left, mode='constant')-convolve(arr_dem, kernel_dz_dx_right, mode='constant')) / (arr_neigh * cellsize) # The rate of change in the y direction for cell e is: kernel_dz_dy_left = numpy.array([[0,0,0], [0,0,0], [1,2,1]]) kernel_dz_dy_right = numpy.array([[1,2,1], [0,0,0], [0,0,0]]) dz_dy = (convolve(arr_dem, kernel_dz_dy_left, mode='constant')-convolve(arr_dem, kernel_dz_dy_right, mode='constant')) / (arr_neigh * cellsize) # Taking the rate of change in the x and y direction, the slope for the center cell e is calculated using rise_run = ((dz_dx)**2 + (dz_dy)**2)**0.5 if unit=='DEGREES': arr_slope = numpy.arctan(rise_run) * 57.29578 elif unit =='PERCENT_RISE': arr_slope = numpy.tan(numpy.arctan(rise_run)) * 100.0 # Estimate the slope for the cells with less than 8 neigh aux_dem = rst_to_array(dem) index_vizinhos = numpy.where(arr_neigh<8) for idx in range(len(index_vizinhos[0])): # Get Value of the cell lnh = index_vizinhos[0][idx] col = index_vizinhos[1][idx] e = aux_dem[lnh][col] a = aux_dem[lnh-1][col-1] if a == NoData: a = e if lnh==0 or col==0: a=e b = aux_dem[lnh-1][col] if b == NoData: b = e if lnh==0: b=e try: c = aux_dem[lnh-1][col+1] if c == NoData: c=e if lnh==0: c=e except: c = e d = aux_dem[lnh][col-1] if d == NoData: d = e if col==0: d=e try: f = aux_dem[lnh][col+1] if f == NoData: f=e except: f=e try: g = aux_dem[lnh+1][col-1] if g == NoData: g=e if col==0: g=e except: g=e try: h = aux_dem[lnh+1][col] if h ==NoData: h = e except: h=e try: i = aux_dem[lnh+1][col+1] if i == NoData: i = e except: i=e dz_dx = ((c + 2*f + i) - (a + 2*d + g)) / (8 * cellsize) dz_dy = ((g + 2*h + i) - (a + 2*b + c)) / (8 * cellsize) rise_sun = ((dz_dx)**2 + (dz_dy)**2)**0.5 if unit == 'DEGREES': arr_slope[lnh][col] = math.atan(rise_sun) * 57.29578 elif unit == 'PERCENT_RISE': arr_slope[lnh][col] = math.tan(math.atan(rise_sun)) * 100.0 # Del value originally nodata numpy.place(arr_slope, aux_dem==NoData, numpy.nan) #arr_slope[lnh][col] = slope_degres array_to_raster( arr_slope, slope, dem, srs, cellsize, gdal.GDT_Float64, gisApi='gdal' )
def cost_surface(dem, lulc, cls_lulc, prod_lulc, roads, kph, barr, grass_location, output, grass_path=None): """ Tool for make a cost surface based on the roads, slope, land use and physical barriers. ach cell has a value that represents the resistance to the movement. """ import os from gasp.oss.ops import create_folder from gasp.os import os_name from gasp.session import run_grass from gasp.prop.rst import get_cellsize from gasp.prop.rst import rst_distinct from .constants import lulc_weight from .constants import get_slope_categories """ Auxiliar Methods """ def edit_lulc(shp, fld_cls, new_cls): FT_TF_GRASS(shp, 'lulc', 'None') add_field('lulc', 'leg', 'INT') for key in new_cls.keys(): l = new_cls[key]['cls'] sql = " OR ".join([ "{campo}='{value}'".format(campo=fld_cls, value=i) for i in l ]) update_table('lulc', 'leg', int(key), sql) return {'shp': 'lulc', 'fld': 'leg'} def combine_to_cost(rst_combined, lst_rst, work, slope_weight, rdv_cos_weight, cellsize, mode_movement): # The tool r.report doesn't work properly, for that we need some aditional information l = [] for i in lst_rst: FT_TF_GRASS(i, os.path.join(work, i + '.tif'), 'None') values = rst_distinct(os.path.join(work, i + '.tif'), gisApi='gdal') l.append(min(values)) # ****** # Now, we can procede normaly txt_file = os.path.join(work, 'text_combine.txt') raster_report(rst_combined, txt_file) open_txt = open(txt_file, 'r') c = 0 dic_combine = {} for line in open_txt.readlines(): try: if c == 4: dic_combine[0] = [str(l[0]), str(l[1])] elif c >= 5: pl = line.split('|') cat = pl[2].split('; ') cat1 = cat[0].split(' ') cat2 = cat[1].split(' ') dic_combine[int(pl[1])] = [cat1[1], cat2[1]] c += 1 except: break cst_dic = {} for key in dic_combine.keys(): cls_slope = int(dic_combine[key][0]) cos_vias = int(dic_combine[key][1]) if cos_vias >= 6: weight4slope = slope_weight[cls_slope]['rdv'] if mode_movement == 'pedestrian': weight4other = (3600.0 * cellsize) / (5.0 * 1000.0) else: weight4other = (3600.0 * cellsize) / (cos_vias * 1000.0) else: weight4slope = slope_weight[cls_slope]['cos'] weight4other = rdv_cos_weight[cos_vias]['weight'] cst_dic[key] = (weight4slope * weight4other) * 10000000.0 return cst_dic def Rules4CstSurface(dic, work): txt = open(os.path.join(work, 'cst_surface.txt'), 'w') for key in dic.keys(): txt.write('{cat} = {cst}\n'.format(cat=str(key), cst=str(dic[key]))) txt.close() return os.path.join(work, 'cst_surface.txt') """ Prepare GRASS GIS Environment """ workspace = os.path.dirname(grass_location) location = os.path.basename(grass_location) # Start GRASS GIS Engine grass_base = run_grass(workspace, location, dem, win_path=grass_path) import grass.script as grass import grass.script.setup as gsetup gsetup.init(grass_base, workspace, location, 'PERMANENT') # Import GRASS GIS Modules from gasp.cpu.grs import grass_converter from gasp.spanlst.surf import slope from gasp.spanlst.rcls import reclassify from gasp.spanlst.rcls import interval_rules from gasp.spanlst.rcls import category_rules from gasp.spanlst.rcls import grass_set_null from gasp.mng.grstbl import add_field, update_table from gasp.anls.ovlay import union from gasp.to.rst import rst_to_grs, grs_to_rst from gasp.to.rst import shp_to_raster from gasp.to.shp.grs import shp_to_grs from gasp.cpu.grs.spanlst import mosaic_raster from gasp.spanlst.local import combine from gasp.spanlst.algebra import rstcalc from gasp.cpu.grs.spanlst import raster_report """Global variables""" # Workspace for temporary files wTmp = create_folder(os.path.join(workspace, 'tmp')) # Cellsize cellsize = float(get_cellsize(dem), gisApi='gdal') # Land Use Land Cover weights lulcWeight = lulc_weight(prod_lulc, cellsize) # Slope classes and weights slope_cls = get_slope_categories() """Make Cost Surface""" # Generate slope raster rst_to_grs(dem, 'dem') slope('dem', 'rst_slope', api="pygrass") # Reclassify Slope rulesSlope = interval_rules(slope_cls, os.path.join(wTmp, 'slope.txt')) reclassify('rst_slope', 'recls_slope', rulesSlope) # LULC - Dissolve, union with barriers and conversion to raster lulc_shp = edit_lulc(lulc, cls_lulc, lulc_weight) shp_to_grs(barr, 'barriers') union(lulc_shp['shp'], 'barriers', 'barrcos', api_gis="grass") update_table('barrcos', 'a_' + lulc_shp['fld'], 99, 'b_cat=1') shp_to_raster('barrcos', 'a_' + lulc_shp['fld'], None, None, 'rst_barrcos', api='pygrass') # Reclassify this raster - convert the values 99 to NULL or NODATA grass_set_null('rst_barrcos', 99) # Add the roads layer to the GRASS GIS shp_to_grs(roads, 'rdv') if kph == 'pedestrian': add_field('rdv', 'foot', 'INT') update_table('rdv', 'foot', 50, 'foot IS NULL') shp_to_raster('rdv', 'foot', None, None, 'rst_rdv', api='pygrass') else: shp_to_raster('rdv', kph, None, None, 'rst_rdv', api='pygrass') # Merge LULC/BARR and Roads mosaic_raster('rst_rdv', 'rst_barrcos', 'rdv_barrcos') # Combine LULC/BARR/ROADS with Slope combine('recls_slope', 'rdv_barrcos', 'rst_combine', api="pygrass") """ Estimating cost for every combination at rst_combine The order of the rasters on the following list has to be the same of GRASS Combine""" cst = combine_to_cost('rst_combine', ['recls_slope', 'rdv_barrcos'], wTmp, slope_cls, lulc_weight, cell_size, kph) # Reclassify combined rst rulesSurface = category_rules(cst, os.path.join('r_surface.txt')) reclassify('rst_combine', 'cst_tmp', rulesSurface) rstcalc('cst_tmp / 10000000.0', 'cst_surface', api='pygrass') grs_to_rst('cst_surface', output)
def cstDistance_with_motorway(cst_surface, motorway, fld_motorway, nodes_start, nodes_end, pnt_destiny, grass_location, isolines): """ Produce a surface representing the acumulated cost of each cell to a destination point considering the false intersections caused by a non planar graph """ import os from gasp.oss.ops import create_folder from gasp.prop.ff import drv_name from gasp.cpu.grs.spanlst import rseries from gasp.spanlst.algebra import rstcalc from gasp.spanlst.dist import rcost from gasp.to.rst import rst_to_grs from gasp.to.rst import shp_to_raster from gasp.cpu.gdl.sampling import gdal_values_to_points from pysage.tools_thru_api.gdal.ogr import OGR_CreateNewShape """ Auxiliar Methods """ def dist_to_nodes(pnt_shp, cstSurface, string, w): nodes = ogr.GetDriverByName(drv_name(pnt_shp)).Open(pnt_shp, 0) nodesLyr = nodes.GetLayer() c = 0 dicNodes = {} for pnt in nodesLyr: geom = pnt.GetGeometryRef() point = geom.ExportToWkb() OGR_CreateNewShape( OGR_GetDriverName(pnt_shp), os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))), ogr.wkbPoint, [point]) FT_TF_GRASS( os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))), '{pnt}_{o}'.format(pnt=string, o=str(c)), 'None') GRASS_CostDistance(cstSurface, '{pnt}_{o}'.format(pnt=string, o=str(c)), 'cst_{pnt}_{a}'.format(pnt=string, a=str(c))) dicNodes['{pnt}_{o}'.format(pnt=string, o=str(c))] = [ os.path.join(w, '{pnt}_{o}.shp'.format(pnt=string, o=str(c))), 'cst_{pnt}_{a}'.format(pnt=string, a=str(c)) ] c += 1 return dicNodes """GRASS GIS Configuration""" # Workspace for temporary files wTmp = create_folder(os.path.join(os.path.dirname(grass_location), 'tmp')) """Make Accessibility Map""" # Add Cost Surface to GRASS GIS convert(cst_surface, 'cst_surface') # Add Destination To GRASS convert(pnt_destiny, 'destination') # Run r.cost with only with a secundary roads network rcost('cst_surface', 'destination', 'cst_dist_secun') # We have to know if the path through motorway implies minor cost. # Add primary roads to grass convert(motorway, 'rdv_prim', 'None') # We need a cost surface only with the cost of motorway roads shp_to_raster('rdv_prim', fld_motorway, None, None, 'rst_rdv', api='pygrass') rstcalc('(3600.0 * {cs}) / (rst_rdv * 1000.0)'.format( cs=get_cellsize(cst_surface, gisApi='gdal')), 'cst_motorway', api='grass') # For each node of entrance into a motorway, we need to know: # - the distance to the entrance node; # - the distance between the entrance and every exit node # - the distance between the exit and the destination # Geting the distance to the entrance node entranceNodes = dist_to_nodes(nodes_start, 'cst_surface', 'start', wTmp) # Geting the distances to all entrance nodes exitNodes = dist_to_nodes(nodes_end, 'cst_surface', 'exit', wTmp) # Getting the values needed for start_pnt in entranceNodes.keys(): for exit_pnt in exitNodes.keys(): GRASS_CostDistance( 'cst_motorway', exit_pnt, 'cst2exit_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))) FT_TF_GRASS( 'cst2exit_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1])), os.path.join( wTmp, 'cst2exit_{a}_{b}.tif'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))), 'None') cst_start_exit = GDAL_ExtractValuesByPoint( entranceNodes[start_pnt][0], os.path.join( wTmp, 'cst2exit_{a}_{b}.tif'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1])))) if os.path.isfile( os.path.join(wTmp, exitNodes[exit_pnt][1] + '.tif')) == False: FT_TF_GRASS( exitNodes[exit_pnt][1], os.path.join(wTmp, exitNodes[exit_pnt][1] + '.tif'), 'None') cst_exit_destination = GDAL_ExtractValuesByPoint( pnt_destiny, os.path.join(wTmp, exitNodes[exit_pnt][1] + '.tif')) GRASS_RasterCalculator( '{rst} + {a} + {b}'.format(rst=entranceNodes[start_pnt][1], a=str(cst_start_exit[0]), b=str(min(cst_exit_destination))), 'cst_path_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))) lst_outputs.append('cst_path_{a}_{b}'.format(a=str(start_pnt[-1]), b=str(exit_pnt[-1]))) lst_outputs.append('cst_dist_secun') rseries(lst_outputs, 'isocronas', 'minimum')
def update_globe_land_cover(original_globe_raster, osm_urban_atlas_raster, osm_globe_raster, epsg, updated_globe_raster, detailed_globe_raster): """ Update the original Glob Land 30 with the result of the conversion of OSM DATA to the Globe Land Cover nomenclature; Also updates he previous updated Glob Land 30 with the result of the conversion of osm data to the Urban Atlas Nomenclature """ import os import numpy as np from gasp.fm.rst import rst_to_array from gasp.prop.rst import get_cellsize from gasp.prop.rst import get_nodata from gasp.to.rst import array_to_raster # ############################# # # Convert images to numpy array # # ############################# # np_globe_original = rst_to_array(original_globe_raster) np_globe_osm = rst_to_array(osm_globe_raster) np_ua_osm = rst_to_array(osm_urban_atlas_raster) # ################################## # # Check the dimension of both images # # ################################## # if np_globe_original.shape != np_globe_osm.shape: return ( 'The Globe Land 30 raster (original) do not have the same number' ' of columns/lines comparing with the Globe Land 30 derived ' 'from OSM data') elif np_globe_original.shape != np_ua_osm.shape: return ( 'The Globe Land 30 raster (original) do not have the same ' 'number of columns/lines comparing with the Urban Atlas raster ' 'derived from OSM data') elif np_globe_osm.shape != np_ua_osm.shape: return ( 'The Globe Land 30 derived from OSM data do not have the same ' 'number of columns/lines comparing with the Urban Atlas raster ' 'derived from OSM data') # ############## # # Check Cellsize # # ############## # cell_of_rsts = get_cellsize( [original_globe_raster, osm_globe_raster, osm_urban_atlas_raster], xy=True, gisApi='gdal') cell_globe_original = cell_of_rsts[original_globe_raster] cell_globe_osm = cell_of_rsts[osm_globe_raster] cell_ua_osm = cell_of_rsts[osm_urban_atlas_raster] if cell_globe_original != cell_globe_osm: return ( 'The cellsize of the Globe Land 30 raster (original) is not the ' 'same comparing with the Globe Land 30 derived from OSM data') elif cell_globe_original != cell_ua_osm: return ( 'The cellsize of the Globe Land 30 raster (original) is not the ' 'same comparing with the Urban Atlas raster derived from OSM data') elif cell_ua_osm != cell_globe_osm: return ( 'The cellsize of the Globe Land 30 derived from OSM data is not ' 'the same comparing with the Urban Atlas raster derived from ' 'OSM data') # ############################# # # Get the Value of Nodata Cells # # ############################# # nodata_glob_original = get_nodata(original_globe_raster, gisApi='gdal') nodata_glob_osm = get_nodata(osm_globe_raster, gisApi='gdal') nodata_ua_osm = get_nodata(osm_urban_atlas_raster, gisApi='gdal') # ######################################## # # Create a new map - Globe Land 30 Updated # # ######################################## # """ Create a new array with zeros... 1) The zeros will be replaced by the values in the Globe Land derived from OSM. 2) The zeros will be replaced by the values in the Original Globe Land at the cells with NULL data in the Globe Land derived from OSM. The meta array will identify values origins in the updated raster: 1 - Orinal Raster 2 - OSM Derived Raster """ update_array = np.zeros( (np_globe_original.shape[0], np_globe_original.shape[1])) update_meta_array = np.zeros( (np_globe_original.shape[0], np_globe_original.shape[1])) # 1) np.copyto(update_array, np_globe_osm, 'no', np_globe_osm != nodata_glob_osm) # 1) meta np.place(update_meta_array, update_array != 0, 2) # 2) meta np.place(update_meta_array, update_array == 0, 1) # 2) np.copyto(update_array, np_globe_original, 'no', update_array == 0) # 2) meta np.place(update_meta_array, update_array == nodata_glob_original, int(nodata_glob_original)) # noData to int np.place(update_array, update_array == nodata_glob_original, int(nodata_glob_original)) updated_meta = os.path.join( os.path.dirname(updated_globe_raster), '{n}_meta{e}'.format( n=os.path.splitext(os.path.basename(updated_globe_raster))[0], e=os.path.splitext(os.path.basename(updated_globe_raster))[1])) # Create Updated Globe Cover 30 array_to_raster(update_array, updated_globe_raster, original_globe_raster, epsg, gdal.GDT_Int32, noData=int(nodata_glob_original), gisApi='gdal') # Create Updated Globe Cover 30 meta array_to_raster(update_meta_array, updated_meta, original_globe_raster, epsg, gdal.GDT_Int32, noData=int(nodata_glob_original), gisApi='gdal') # ################################################# # # Create a new map - Globe Land 30 Detailed with UA # # ################################################# # np_update = rst_to_array(updated_globe_raster) detailed_array = np.zeros((np_update.shape[0], np_update.shape[1])) detailed_meta_array = np.zeros((np_update.shape[0], np_update.shape[1])) """ Replace 80 Globe Land for 11, 12, 13, 14 of Urban Atlas The meta array will identify values origins in the detailed raster: 1 - Updated Raster 2 - UA Derived Raster from OSM """ # Globe - Mantain some classes np.place(detailed_array, np_update == 30, 8) np.place(detailed_array, np_update == 30, 1) np.place(detailed_array, np_update == 40, 9) np.place(detailed_array, np_update == 40, 1) np.place(detailed_array, np_update == 50, 10) np.place(detailed_array, np_update == 50, 1) np.place(detailed_array, np_update == 10, 5) np.place(detailed_array, np_update == 10, 1) # Water bodies np.place(detailed_array, np_ua_osm == 50 or np_update == 60, 7) np.place(detailed_meta_array, np_ua_osm == 50 or np_update == 60, 1) # Urban - Where Urban Atlas IS NOT NULL np.place(detailed_array, np_ua_osm == 11, 1) np.place(detailed_meta_array, np_ua_osm == 11, 2) np.place(detailed_array, np_ua_osm == 12, 2) np.place(detailed_meta_array, np_ua_osm == 12, 2) np.place(detailed_array, np_ua_osm == 13, 3) np.place(detailed_meta_array, np_ua_osm == 13, 2) np.place(detailed_array, np_ua_osm == 14, 4) np.place(detailed_meta_array, np_ua_osm == 14, 2) # Urban Atlas - Class 30 to 6 np.place(detailed_array, np_ua_osm == 30, 6) np.place(detailed_meta_array, np_ua_osm == 30, 2) # Create Detailed Globe Cover 30 array_to_raster(detailed_array, detailed_globe_raster, original_globe_raster, epsg, gdal.GDT_Int32, noData=0, gisApi='gdal') # Create Detailed Globe Cover 30 meta detailed_meta = os.path.join( os.path.dirname(detailed_globe_raster), '{n}_meta{e}'.format( n=os.path.splitext(os.path.basename(detailed_meta))[0], e=os.path.splitext(os.path.basename(detailed_meta))[1])) array_to_raster(detailed_meta_array, detailed_meta, original_globe_raster, epsg, gdal.GDT_Int32, noData=0, gisApi='gdal')
def GDAL_Hidric_Balance(meta_file=os.path.join( os.path.dirname(os.path.abspath(__file__)), 'HidricBalance_example.json')): """ Proper description """ import os from gasp.fm.rst import rst_to_array from gasp.to.rst import array_to_raster from gasp.prop.rst import get_cellsize def DecodeJson(json_file): import json t = open(json_file, 'r') d = json.load(t) t.close() return d def SomaRstOnLst(l): for i in range(1, len(l)): l[i] = l[i] + l[i - 1] return l[-1] def indexCaloricoAnual(tempMensal): lst_ICM = [] c = 0 for rst in tempMensal: rst_array = RasterToArray(rst) rst_icm = (rst_array / 5.0)**1.514 lst_ICM.append(rst_icm) ica = SomaRstOnLst(lst_ICM) return ica def EvapotranspiracaoPotencial(tMensal, ICAnual, insolacao): dias_mes = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] a = 0.492 + (0.0179 * ICAnual) - (0.0000771 * ICAnual**2) + ( 0.000000675 * ICAnual**3) lst_k = [] ETP_value = [] for mes in range(len(dias_mes)): k = (float(insolacao[mes]) * float(dias_mes[mes])) / 360.0 lst_k.append(k) for raster in range(len(tMensal)): rst_array = RasterToArray(tMensal[raster]) etp = 16.0 * ((10.0 * rst_array / ICAnual)**a) ETP = etp * lst_k[raster] ETP_value.append(ETP) return ETP_value def DefClimatico(precipitacao, EvapoT_Potencial): Exd_Hid = [] dClimaC = [] for raster in range(len(precipitacao)): rst_array = RasterToArray(precipitacao[raster]) excedente_hidrico = rst_array - EvapoT_Potencial[raster] Exd_Hid.append(excedente_hidrico) for rst in range(len(Exd_Hid)): cop = np.zeros((Exd_Hid[rst].shape[0], Exd_Hid[rst].shape[1])) np.copyto(cop, Exd_Hid[rst], 'no') if rst == 0: np.place(cop, cop > 0, 0) dClimaC.append(cop) else: np.place(cop, cop > 0, 0) dClimaC.append(cop + dClimaC[rst - 1]) return [Exd_Hid, dClimaC] def reservaUtil(textura, excedenteHid, defice): lst_ru = [] for rst in range(len(excedenteHid)): ru = textura * np.exp(defice[rst] / textura) np.copyto(ru, textura, 'no', defice[rst] == 0) if rst == 0: lst_ru.append(ru) else: ex_hid_mes_anterior = np.zeros((ru.shape[0], ru.shape[1])) np.place(ex_hid_mes_anterior, excedenteHid[rst - 1] < 0, 1) ex_hid_este_mes = np.zeros((ru.shape[0], ru.shape[1])) np.place(ex_hid_este_mes, excedenteHid[rst] > 0, 1) recarga = ex_hid_mes_anterior + ex_hid_este_mes no_caso_recarga = lst_ru[rst - 1] + excedenteHid[rst] if 2 in np.unique(recarga): np.copyto(ru, no_caso_recarga, 'no', recarga == 2) else: ex_hid_mes_anterior = np.zeros((ru.shape[0], ru.shape[1])) np.place(ex_hid_mes_anterior, excedenteHid[rst - 1] > 0, 1) ex_hid_este_mes = np.zeros((ru.shape[0], ru.shape[1])) np.place(ex_hid_este_mes, excedenteHid[rst] > excedenteHid[rst - 1], 1) recarga = ex_hid_mes_anterior + ex_hid_este_mes no_caso_recarga = lst_ru[rst - 1] + excedenteHid[rst] np.copyto(ru, no_caso_recarga, 'no', recarga == 2) lst_ru.append(ru) return lst_ru def VariacaoReservaUtil(lst_ru): lst_vru = [] for rst in range(len(lst_ru)): if rst == 0: vru = lst_ru[-1] - lst_ru[rst] else: vru = lst_ru[rst - 1] - lst_ru[rst] lst_vru.append(vru) return lst_vru def ETR(precipitacao, vru, etp): lst_etr = [] for rst in range(len(precipitacao)): p_array = RasterToArray(precipitacao[rst]) etr = p_array + vru[rst] np.copyto(etr, etp[rst], 'no', p_array > etp[rst]) lst_etr.append(etr) return lst_etr def DeficeHidrico(etp, etr): return [etp[rst] - etr[rst] for rst in range(len(etp))] rst_textura = rst_to_array(raster_textura) # Lista Raster com valores de precipitacao precipitacao = ListRaster(rst_precipitacao, "img") ica = indexCaloricoAnual(temperatura) n_dias = fileTexto(file_insolacao) EvapotranspiracaoP = EvapotranspiracaoPotencial(temperatura, ica, n_dias) Defice_climatico = DefClimatico(precipitacao, EvapotranspiracaoP) excedente_hidrico = Defice_climatico[0] defice_climatico_cumulativo = Defice_climatico[1] reserva_util = reservaUtil(rst_textura, excedente_hidrico, defice_climatico_cumulativo) vru = VariacaoReservaUtil(reserva_util) etr = ETR(precipitacao, vru, EvapotranspiracaoP) def_hidrico = DeficeHidrico(EvapotranspiracaoP, etr) # Soma defice hidrico rst_hidrico = SomaRstOnLst(def_hidrico) array_to_raster(rst_hidrico, rst_saida, temperatura[0], epsg, get_cellsize(temperatura[0], gisApi='gdal'), gdal.GDT_Float64, gisApi='gdal')
def osm2lulc(osmdata, nomenclature, refRaster, lulcRst, epsg=3857, overwrite=None, dataStore=None, roadsAPI='SQLITE'): """ Convert OSM data into Land Use/Land Cover Information A matrix based approach roadsAPI Options: * SQLITE * POSTGIS """ # ************************************************************************ # # Python Modules from Reference Packages # # ************************************************************************ # import os import numpy import datetime import json from threading import Thread from osgeo import gdal # ************************************************************************ # # Dependencies # # ************************************************************************ # from gasp.fm.rst import rst_to_array from gasp.prop.rst import get_cellsize from gasp.oss.ops import create_folder, copy_file if roadsAPI == 'POSTGIS': from gasp.sql.mng.db import create_db from gasp.osm2lulc.utils import osm_to_pgsql from gasp.osm2lulc.mod2 import pg_num_roads else: from gasp.osm2lulc.utils import osm_to_sqdb from gasp.osm2lulc.mod2 import num_roads from gasp.osm2lulc.utils import osm_project, add_lulc_to_osmfeat from gasp.osm2lulc.mod1 import num_selection from gasp.osm2lulc.m3_4 import num_selbyarea from gasp.osm2lulc.mod5 import num_base_buffer from gasp.osm2lulc.mod6 import num_assign_builds from gasp.to.rst import array_to_raster # ************************************************************************ # # Global Settings # # ************************************************************************ # if not os.path.exists(os.path.dirname(lulcRst)): raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst))) conPGSQL = json.load( open( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'con-postgresql.json'), 'r')) if roadsAPI == 'POSTGIS' else None time_a = datetime.datetime.now().replace(microsecond=0) from gasp.osm2lulc.var import osmTableData, PRIORITIES workspace = os.path.join(os.path.dirname(lulcRst), 'num_osmto') if not dataStore else dataStore # Check if workspace exists: if os.path.exists(workspace): if overwrite: create_folder(workspace, overwrite=True) else: raise ValueError('Path {} already exists'.format(workspace)) else: create_folder(workspace, overwrite=None) CELLSIZE = get_cellsize(refRaster, xy=False, gisApi='gdal') time_b = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Convert OSM file to SQLITE DB or to POSTGIS DB # # ************************************************************************ # if roadsAPI == 'POSTGIS': conPGSQL["DATABASE"] = create_db(conPGSQL, os.path.splitext( os.path.basename(osmdata))[0], overwrite=True) osm_db = osm_to_pgsql(osmdata, conPGSQL) else: osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite')) time_c = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Add Lulc Classes to OSM_FEATURES by rule # # ************************************************************************ # add_lulc_to_osmfeat(conPGSQL if roadsAPI == 'POSTGIS' else osm_db, osmTableData, nomenclature, api=roadsAPI) time_d = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Transform SRS of OSM Data # # ************************************************************************ # osmTableData = osm_project( conPGSQL if roadsAPI == 'POSTGIS' else osm_db, epsg, api=roadsAPI, isGlobeLand=None if nomenclature != "GLOBE_LAND_30" else True) time_e = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # MapResults # # ************************************************************************ # mergeOut = {} timeCheck = {} RULES = [1, 2, 3, 4, 5, 7] def run_rule(ruleID): time_start = datetime.datetime.now().replace(microsecond=0) _osmdb = copy_file( osm_db, os.path.splitext(osm_db)[0] + '_r{}.sqlite'.format(ruleID)) if roadsAPI == 'SQLITE' else None # ******************************************************************** # # 1 - Selection Rule # # ******************************************************************** # if ruleID == 1: res, tm = num_selection(conPGSQL if not _osmdb else _osmdb, osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, api=roadsAPI) # ******************************************************************** # # 2 - Get Information About Roads Location # # ******************************************************************** # elif ruleID == 2: res, tm = num_roads( _osmdb, nomenclature, osmTableData['lines'], osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster) if _osmdb else pg_num_roads( conPGSQL, nomenclature, osmTableData['lines'], osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster) # ******************************************************************** # # 3 - Area Upper than # # ******************************************************************** # elif ruleID == 3: if nomenclature != "GLOBE_LAND_30": res, tm = num_selbyarea(conPGSQL if not _osmdb else _osmdb, osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, UPPER=True, api=roadsAPI) else: return # ******************************************************************** # # 4 - Area Lower than # # ******************************************************************** # elif ruleID == 4: if nomenclature != "GLOBE_LAND_30": res, tm = num_selbyarea(conPGSQL if not _osmdb else _osmdb, osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, UPPER=False, api=roadsAPI) else: return # ******************************************************************** # # 5 - Get data from lines table (railway | waterway) # # ******************************************************************** # elif ruleID == 5: res, tm = num_base_buffer(conPGSQL if not _osmdb else _osmdb, osmTableData['lines'], workspace, CELLSIZE, epsg, refRaster, api=roadsAPI) # ******************************************************************** # # 7 - Assign untagged Buildings to tags # # ******************************************************************** # elif ruleID == 7: if nomenclature != "GLOBE_LAND_30": res, tm = num_assign_builds(conPGSQL if not _osmdb else _osmdb, osmTableData['points'], osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, apidb=roadsAPI) else: return time_end = datetime.datetime.now().replace(microsecond=0) mergeOut[ruleID] = res timeCheck[ruleID] = {'total': time_end - time_start, 'detailed': tm} thrds = [] for r in RULES: thrds.append( Thread(name="to_{}".format(str(r)), target=run_rule, args=(r, ))) for t in thrds: t.start() for t in thrds: t.join() # Merge all results into one Raster compileResults = {} for rule in mergeOut: for cls in mergeOut[rule]: if cls not in compileResults: if type(mergeOut[rule][cls]) == list: compileResults[cls] = mergeOut[rule][cls] else: compileResults[cls] = [mergeOut[rule][cls]] else: if type(mergeOut[rule][cls]) == list: compileResults[cls] += mergeOut[rule][cls] else: compileResults[cls].append(mergeOut[rule][cls]) time_m = datetime.datetime.now().replace(microsecond=0) # All Rasters to Array arrayRst = {} for cls in compileResults: for raster in compileResults[cls]: if not raster: continue array = rst_to_array(raster) if cls not in arrayRst: arrayRst[cls] = [array.astype(numpy.uint8)] else: arrayRst[cls].append(array.astype(numpy.uint8)) time_n = datetime.datetime.now().replace(microsecond=0) # Sum Rasters of each class for cls in arrayRst: if len(arrayRst[cls]) == 1: sumArray = arrayRst[cls][0] else: sumArray = arrayRst[cls][0] for i in range(1, len(arrayRst[cls])): sumArray = sumArray + arrayRst[cls][i] arrayRst[cls] = sumArray time_o = datetime.datetime.now().replace(microsecond=0) # Apply priority rule __priorities = PRIORITIES[nomenclature + "_NUMPY"] for lulcCls in __priorities: __lulcCls = 1222 if lulcCls == 98 else 1221 if lulcCls == 99 else \ 802 if lulcCls == 82 else 801 if lulcCls == 81 else lulcCls if __lulcCls not in arrayRst: continue else: numpy.place(arrayRst[__lulcCls], arrayRst[__lulcCls] > 0, lulcCls) for i in range(len(__priorities)): lulc_i = 1222 if __priorities[i] == 98 else 1221 \ if __priorities[i] == 99 else 802 if __priorities[i] == 82 \ else 801 if __priorities[i] == 81 else __priorities[i] if lulc_i not in arrayRst: continue else: for e in range(i + 1, len(__priorities)): lulc_e = 1222 if __priorities[e] == 98 else 1221 \ if __priorities[e] == 99 else \ 802 if __priorities[e] == 82 else 801 \ if __priorities[e] == 81 else __priorities[e] if lulc_e not in arrayRst: continue else: numpy.place(arrayRst[lulc_e], arrayRst[lulc_i] == __priorities[i], 0) time_p = datetime.datetime.now().replace(microsecond=0) # Merge all rasters startCls = 'None' for i in range(len(__priorities)): lulc_i = 1222 if __priorities[i] == 98 else 1221 \ if __priorities[i] == 99 else 802 if __priorities[i] == 82 \ else 801 if __priorities[i] == 81 else __priorities[i] if lulc_i in arrayRst: resultSum = arrayRst[lulc_i] startCls = i break if startCls == 'None': return 'NoResults' for i in range(startCls + 1, len(__priorities)): lulc_i = 1222 if __priorities[i] == 98 else 1221 \ if __priorities[i] == 99 else 802 if __priorities[i] == 82 \ else 801 if __priorities[i] == 81 else __priorities[i] if lulc_i not in arrayRst: continue resultSum = resultSum + arrayRst[lulc_i] # Save Result numpy.place(resultSum, resultSum == 0, 1) array_to_raster(resultSum, lulcRst, refRaster, epsg, gdal.GDT_Byte, noData=1, gisApi='gdal') time_q = datetime.datetime.now().replace(microsecond=0) return lulcRst, { 0: ('set_settings', time_b - time_a), 1: ('osm_to_sqdb', time_c - time_b), 2: ('cls_in_sqdb', time_d - time_c), 3: ('proj_data', time_e - time_d), 4: ('rule_1', timeCheck[1]['total'], timeCheck[1]['detailed']), 5: ('rule_2', timeCheck[2]['total'], timeCheck[2]['detailed']), 6: None if 3 not in timeCheck else ('rule_3', timeCheck[3]['total'], timeCheck[3]['detailed']), 7: None if 4 not in timeCheck else ('rule_4', timeCheck[4]['total'], timeCheck[4]['detailed']), 8: ('rule_5', timeCheck[5]['total'], timeCheck[5]['detailed']), 9: None if 7 not in timeCheck else ('rule_7', timeCheck[7]['total'], timeCheck[7]['detailed']), 10: ('rst_to_array', time_n - time_m), 11: ('sum_cls', time_o - time_n), 12: ('priority_rule', time_p - time_o), 13: ('merge_rst', time_q - time_p) }