def kernel_density(pnt_feat, popField, radius, template, outRst): """ Kernel density estimation. If any point is currently in selection only selected points are taken into account. """ import os from glass.g.it.rst import saga_to_tif from glass.g.prop.rst import rst_ext, get_cellsize from glass.pys.oss import fprop left, right, bottom, top = rst_ext(template) cellsize = get_cellsize(template) SAGA_RASTER = os.path.join(os.path.dirname(outRst), 'saga_{}.sgrd'.format(fprop(outRst, 'fn'))) cmd = ("saga_cmd grid_gridding 6 -POINTS {} -POPULATION {} " "-RADIUS {} -TARGET_DEFINITION 0 -TARGET_USER_SIZE {} " "-TARGET_USER_XMIN {} -TARGET_USER_XMAX {} " "-TARGET_USER_YMIN {} -TARGET_USER_YMAX {} " "-TARGET_OUT_GRID {}").format(pnt_feat, popField, str(radius), str(abs(cellsize)), str(left), str(right), str(bottom), str(top), SAGA_RASTER) outcmd = execmd(cmd) # Convert to tiff saga_to_tif(SAGA_RASTER, outRst) return outRst
def rstext_to_rst(inrst, outrst, cellsize=None, epsg=None, rstval=None): """ Raster Extent to Raster """ from glass.g.prop.rst import rst_ext, get_cellsize from glass.g.wt.rst import ext_to_rst # Get Raster Extent left, right, bottom, top = rst_ext(inrst) # GET EPSG if not epsg: from glass.g.prop.prj import get_rst_epsg epsg = get_rst_epsg(inrst) # Create raster ext_to_rst( (left, top), (right, bottom), outrst, cellsize=get_cellsize(inrst) if not cellsize else cellsize, epsg=epsg, rstvalue=rstval ) return outrst
def lnd8_dn_to_ref(folder, img_format, meta_json, outWorkspace, srs): """ Landsat8 digital numbers to surface reflectance """ import math import json import os from glass.pys.oss import lst_ff from glass.g.rd.rst import rst_to_array from glass.g.prop.rst import get_cellsize, rst_stats from glass.g.wt.rst import obj_to_rst def Get_RTA(Ml, Qcalc, Al): """ Obtem Radiancia no Topo da Atmosfera Ml - relacao da radiancia multibanda Qcalc - imagem de satelite original Al - radiancia add band """ Llambda = Ml * Qcalc + Al return Llambda def GetIrraSolar(d, Lmax, Pmax): """ d - distancia da terra ao sol (com base no dia do ano em que a imagem foi recolhida) ESUN - irradiancia solar media exoatmosferica Lmax - radiancia maxima Pmax - reflectancia maxima """ return (math.pi * d**2) * (Lmax / Pmax) def GetRefAparente(d, esun, rta, Z): """ Reflectancia aparente Z - angulo zenital do sol """ pp = math.pi * rta * d**2 / esun * math.cos(Z) return pp def GetRefSuperfice(DNmin, Ml, Al, IrrSolar, Z, d, RefAparente): """Reflectancia a superficie""" Lp = (Ml * DNmin + Al - 0.01 * IrrSolar) * (math.cos(Z) / math.pi * d**2) p = math.pi * (RefAparente - Lp) * d**2 / IrrSolar * math.cos(Z) return p lst_bands = lst_ff(folder, file_format=img_format) json_file = open(meta_json, 'r') json_data = json.load(json_file) cellsize = get_cellsize(lst_bands[0], gisApi='gdal') # Estimate Surface Reflectance for each band for bnd in lst_bands: # Convert images to numpy array img = rst_to_array(bnd) # Calculations of each pixel; store results on a new numpy array rta_array = Get_RTA( json_data[u"RADIANCE_MULT_BAND"][os.path.basename(bnd)], img, json_data[u"RADIANCE_ADD_BAND"][os.path.basename(bnd)]) solar_irradiation = GetIrraSolar( json_data[u"EARTH_SUN_DISTANCE"], json_data[u"RADIANCE_MAXIMUM_BAND"][os.path.basename(bnd)], json_data[u"REFLECTANCE_MAXIMUM_BAND"][os.path.basename(bnd)]) ref_aparente_array = GetRefAparente(json_data[u"EARTH_SUN_DISTANCE"], solar_irradiation, rta_array, 90 - json_data[u"SUN_ELEVATION"]) new_map = GetRefSuperfice( rst_stats(bnd, api='gdal')['MIN'], json_data[u"RADIANCE_MULT_BAND"][os.path.basename(bnd)], json_data[u"RADIANCE_ADD_BAND"][os.path.basename(bnd)], solar_irradiation, 90 - json_data[u"SUN_ELEVATION"], json_data[u"EARTH_SUN_DISTANCE"], ref_aparente_array) obj_to_rst(new_map, os.path.join(outWorkspace, os.path.basename(bnd)), bnd)
def update_globe_land_cover(original_globe_raster, osm_urban_atlas_raster, osm_globe_raster, epsg, updated_globe_raster, detailed_globe_raster): """ Update the original Glob Land 30 with the result of the conversion of OSM DATA to the Globe Land Cover nomenclature; Also updates he previous updated Glob Land 30 with the result of the conversion of osm data to the Urban Atlas Nomenclature """ import os import numpy as np from glass.g.rd.rst import rst_to_array from glass.g.prop.rst import get_cellsize, get_nodata from glass.g.wt.rst import obj_to_rst # ############################# # # Convert images to numpy array # # ############################# # np_globe_original = rst_to_array(original_globe_raster) np_globe_osm = rst_to_array(osm_globe_raster) np_ua_osm = rst_to_array(osm_urban_atlas_raster) # ################################## # # Check the dimension of both images # # ################################## # if np_globe_original.shape != np_globe_osm.shape: return ( 'The Globe Land 30 raster (original) do not have the same number' ' of columns/lines comparing with the Globe Land 30 derived ' 'from OSM data') elif np_globe_original.shape != np_ua_osm.shape: return ( 'The Globe Land 30 raster (original) do not have the same ' 'number of columns/lines comparing with the Urban Atlas raster ' 'derived from OSM data') elif np_globe_osm.shape != np_ua_osm.shape: return ( 'The Globe Land 30 derived from OSM data do not have the same ' 'number of columns/lines comparing with the Urban Atlas raster ' 'derived from OSM data') # ############## # # Check Cellsize # # ############## # cell_of_rsts = get_cellsize( [original_globe_raster, osm_globe_raster, osm_urban_atlas_raster], xy=True, gisApi='gdal') cell_globe_original = cell_of_rsts[original_globe_raster] cell_globe_osm = cell_of_rsts[osm_globe_raster] cell_ua_osm = cell_of_rsts[osm_urban_atlas_raster] if cell_globe_original != cell_globe_osm: return ( 'The cellsize of the Globe Land 30 raster (original) is not the ' 'same comparing with the Globe Land 30 derived from OSM data') elif cell_globe_original != cell_ua_osm: return ( 'The cellsize of the Globe Land 30 raster (original) is not the ' 'same comparing with the Urban Atlas raster derived from OSM data') elif cell_ua_osm != cell_globe_osm: return ( 'The cellsize of the Globe Land 30 derived from OSM data is not ' 'the same comparing with the Urban Atlas raster derived from ' 'OSM data') # ############################# # # Get the Value of Nodata Cells # # ############################# # nodata_glob_original = get_nodata(original_globe_raster, gisApi='gdal') nodata_glob_osm = get_nodata(osm_globe_raster, gisApi='gdal') nodata_ua_osm = get_nodata(osm_urban_atlas_raster, gisApi='gdal') # ######################################## # # Create a new map - Globe Land 30 Updated # # ######################################## # """ Create a new array with zeros... 1) The zeros will be replaced by the values in the Globe Land derived from OSM. 2) The zeros will be replaced by the values in the Original Globe Land at the cells with NULL data in the Globe Land derived from OSM. The meta array will identify values origins in the updated raster: 1 - Orinal Raster 2 - OSM Derived Raster """ update_array = np.zeros( (np_globe_original.shape[0], np_globe_original.shape[1])) update_meta_array = np.zeros( (np_globe_original.shape[0], np_globe_original.shape[1])) # 1) np.copyto(update_array, np_globe_osm, 'no', np_globe_osm != nodata_glob_osm) # 1) meta np.place(update_meta_array, update_array != 0, 2) # 2) meta np.place(update_meta_array, update_array == 0, 1) # 2) np.copyto(update_array, np_globe_original, 'no', update_array == 0) # 2) meta np.place(update_meta_array, update_array == nodata_glob_original, int(nodata_glob_original)) # noData to int np.place(update_array, update_array == nodata_glob_original, int(nodata_glob_original)) updated_meta = os.path.join( os.path.dirname(updated_globe_raster), '{n}_meta{e}'.format( n=os.path.splitext(os.path.basename(updated_globe_raster))[0], e=os.path.splitext(os.path.basename(updated_globe_raster))[1])) # Create Updated Globe Cover 30 obj_to_rst(update_array, updated_globe_raster, original_globe_raster, noData=int(nodata_glob_original)) # Create Updated Globe Cover 30 meta obj_to_rst(update_meta_array, updated_meta, original_globe_raster, noData=int(nodata_glob_original)) # ################################################# # # Create a new map - Globe Land 30 Detailed with UA # # ################################################# # np_update = rst_to_array(updated_globe_raster) detailed_array = np.zeros((np_update.shape[0], np_update.shape[1])) detailed_meta_array = np.zeros((np_update.shape[0], np_update.shape[1])) """ Replace 80 Globe Land for 11, 12, 13, 14 of Urban Atlas The meta array will identify values origins in the detailed raster: 1 - Updated Raster 2 - UA Derived Raster from OSM """ # Globe - Mantain some classes np.place(detailed_array, np_update == 30, 8) np.place(detailed_array, np_update == 30, 1) np.place(detailed_array, np_update == 40, 9) np.place(detailed_array, np_update == 40, 1) np.place(detailed_array, np_update == 50, 10) np.place(detailed_array, np_update == 50, 1) np.place(detailed_array, np_update == 10, 5) np.place(detailed_array, np_update == 10, 1) # Water bodies np.place(detailed_array, np_ua_osm == 50 or np_update == 60, 7) np.place(detailed_meta_array, np_ua_osm == 50 or np_update == 60, 1) # Urban - Where Urban Atlas IS NOT NULL np.place(detailed_array, np_ua_osm == 11, 1) np.place(detailed_meta_array, np_ua_osm == 11, 2) np.place(detailed_array, np_ua_osm == 12, 2) np.place(detailed_meta_array, np_ua_osm == 12, 2) np.place(detailed_array, np_ua_osm == 13, 3) np.place(detailed_meta_array, np_ua_osm == 13, 2) np.place(detailed_array, np_ua_osm == 14, 4) np.place(detailed_meta_array, np_ua_osm == 14, 2) # Urban Atlas - Class 30 to 6 np.place(detailed_array, np_ua_osm == 30, 6) np.place(detailed_meta_array, np_ua_osm == 30, 2) # Create Detailed Globe Cover 30 obj_to_rst(detailed_array, detailed_globe_raster, original_globe_raster, noData=0) # Create Detailed Globe Cover 30 meta detailed_meta = os.path.join( os.path.dirname(detailed_globe_raster), '{n}_meta{e}'.format( n=os.path.splitext(os.path.basename(detailed_meta))[0], e=os.path.splitext(os.path.basename(detailed_meta))[1])) obj_to_rst(detailed_meta_array, detailed_meta, original_globe_raster, noData=0)
def conditional_dependence(movs, indp): """ Estimate conditional dependence between several rasters """ import math from decimal import Decimal from glass.g.prop.feat import feat_count from glass.g.prop.rst import get_cellsize, count_cells from glass.g.prop.rst import frequencies def foundPredT(dic): PredT = 0.0 for value in dic.keys(): count = dic[value] PredT += (float(value) * count) return PredT def foundTStd(dic, c): TVar = 0.0 for value in dic.keys(): count = dic[value] TVar += (float(value) * count * c)**2 TStd = math.sqrt(TVar) return TStd def calcCondIndp(Z): pi = math.pi const6 = 0.2316419 b1 = 0.31938153 b2 = -0.356563782 b3 = 1.781477937 b4 = -1.821255978 b5 = 1.330274429 Z = float(Z) X = Z if X < 0.0: X = -Z t = 1.0 / (const6 * X + 1.0) pid = 2.0 * pi XX = -X * X / 2.0 XX = math.exp(XX) / math.sqrt(pid) PZ = (b1 * t) + (b2 * t * t) + (b3 * (t**3)) + (b4 * (t**4)) + (b5 * (t**5)) PZ = 1.0 - (PZ * XX) if Z < 0: PZ = 1.0 - PZ return PZ # Count phenomena ocorrences - total number of landslides ExpNumTP = Decimal(feat_count(movs, gisApi='ogr')) # Count the number of cell raster NrCell = count_cells(indp[0]) # Get Cellsize of the raster's cellsize = Decimal(get_cellsize(indp[0], gisApi='gdal')) # Calculate UnitArea area_km = Decimal(((cellsize * cellsize) * NrCell) / 1000000.0) UnitArea = Decimal((area_km / ExpNumTP) / 40.0) ConvFac = Decimal((cellsize**2) / 1000000.0 / UnitArea) # Count the number of times that one class has representation in the raster; put this associations in a ditionary sudoDic = {var: frequencies(var) for var in indp} # Calculate Conditional Dependence nr = 1 for rst in indp: l_overall = {} l_ric = {} l_tac = {} # Calculate PredT PredT = foundPredT(sudoDic[rst]) PredT *= ConvFac for _rst_ in indp: # Calculate TStd TStd = foundTStd(sudoDic[_rst_], ConvFac) TS = (PredT - ExpNumTP) / TStd n = ExpNumTP T = PredT P = calcCondIndp(TS) * 100.0 if P > 50.0: overallCI = 100.0 * (100.0 - P) / 50.0 else: overallCI = 100.0 * (100.0 - (50.0 + (50.0 - P))) / 50.0 l_overall[(rst, _rst_)] = "%.2f" % overallCI ric = n / T l_ric[(rst, _rst_)] = "%.2f" % ric l_tac[(rst, _rst_)] = "%.2f" % P nr += 1 return {'OVERALL': l_overall, 'RIC': l_ric, 'TAC': l_tac}
def gdal_slope(dem, srs, slope, unit='DEGREES'): """ Create Slope Raster TODO: Test and see if is running correctly """ import numpy; import math from osgeo import gdal from scipy.ndimage import convolve from glass.g.rd.rst import rst_to_array from glass.g.wt.rst import obj_to_rst from glass.g.prop.rst import get_cellsize, get_nodata # ################ # # Global Variables # # ################ # cellsize = get_cellsize(dem, gisApi='gdal') # Get Nodata Value NoData = get_nodata(dem) # #################### # # Produce Slope Raster # # #################### # # Get Elevation array arr_dem = rst_to_array(dem) # We have to get a array with the number of nearst cells with values with_data = numpy.zeros((arr_dem.shape[0], arr_dem.shape[1])) numpy.place(with_data, arr_dem!=NoData, 1.0) mask = numpy.array([[1,1,1], [1,0,1], [1,1,1]]) arr_neigh = convolve(with_data, mask, mode='constant') numpy.place(arr_dem, arr_dem==NoData, 0.0) # The rate of change in the x direction for the center cell e is: kernel_dz_dx_left = numpy.array([[0,0,1], [0,0,2], [0,0,1]]) kernel_dz_dx_right = numpy.array([[1,0,0], [2,0,0], [1,0,0]]) dz_dx = (convolve(arr_dem, kernel_dz_dx_left, mode='constant')-convolve(arr_dem, kernel_dz_dx_right, mode='constant')) / (arr_neigh * cellsize) # The rate of change in the y direction for cell e is: kernel_dz_dy_left = numpy.array([[0,0,0], [0,0,0], [1,2,1]]) kernel_dz_dy_right = numpy.array([[1,2,1], [0,0,0], [0,0,0]]) dz_dy = (convolve(arr_dem, kernel_dz_dy_left, mode='constant')-convolve(arr_dem, kernel_dz_dy_right, mode='constant')) / (arr_neigh * cellsize) # Taking the rate of change in the x and y direction, the slope for the center cell e is calculated using rise_run = ((dz_dx)**2 + (dz_dy)**2)**0.5 if unit=='DEGREES': arr_slope = numpy.arctan(rise_run) * 57.29578 elif unit =='PERCENT_RISE': arr_slope = numpy.tan(numpy.arctan(rise_run)) * 100.0 # Estimate the slope for the cells with less than 8 neigh aux_dem = rst_to_array(dem) index_vizinhos = numpy.where(arr_neigh<8) for idx in range(len(index_vizinhos[0])): # Get Value of the cell lnh = index_vizinhos[0][idx] col = index_vizinhos[1][idx] e = aux_dem[lnh][col] a = aux_dem[lnh-1][col-1] if a == NoData: a = e if lnh==0 or col==0: a=e b = aux_dem[lnh-1][col] if b == NoData: b = e if lnh==0: b=e try: c = aux_dem[lnh-1][col+1] if c == NoData: c=e if lnh==0: c=e except: c = e d = aux_dem[lnh][col-1] if d == NoData: d = e if col==0: d=e try: f = aux_dem[lnh][col+1] if f == NoData: f=e except: f=e try: g = aux_dem[lnh+1][col-1] if g == NoData: g=e if col==0: g=e except: g=e try: h = aux_dem[lnh+1][col] if h ==NoData: h = e except: h=e try: i = aux_dem[lnh+1][col+1] if i == NoData: i = e except: i=e dz_dx = ((c + 2*f + i) - (a + 2*d + g)) / (8 * cellsize) dz_dy = ((g + 2*h + i) - (a + 2*b + c)) / (8 * cellsize) rise_sun = ((dz_dx)**2 + (dz_dy)**2)**0.5 if unit == 'DEGREES': arr_slope[lnh][col] = math.atan(rise_sun) * 57.29578 elif unit == 'PERCENT_RISE': arr_slope[lnh][col] = math.tan(math.atan(rise_sun)) * 100.0 # Del value originally nodata numpy.place(arr_slope, aux_dem==NoData, numpy.nan) #arr_slope[lnh][col] = slope_degres obj_to_rst(arr_slope, slope, dem)
def adjust_ext_to_snap(outExt, snapRst): """ Adjust extent for a output raster to snap with other raster """ from glass.g.prop import check_isShp, check_isRaster from glass.g.prop.rst import rst_ext, get_cellsize from glass.g.gobj import new_pnt, create_polygon # Check if outExt is a raster or not isRst = check_isRaster(outExt) if isRst: shpAExt = rst_ext(outExt) else: isShp = check_isShp(outExt) if isShp: from glass.g.prop.feat import get_ext shpAExt = get_ext(outExt) else: raise ValueError( ("outExt value should be a path to a SHP or to a Raster file")) # Check if snapRst is a raster isRst = check_isRaster(snapRst) if not isRst: raise ValueError(("snapRst should be a path to a raster file")) # Get snapRst Extent snapRstExt = rst_ext(snapRst) # Get cellsize csize = get_cellsize(snapRst) # Find extent point of outExt inside the two extents # This will be used as pseudo origin snapRstPnt = [ new_pnt(snapRstExt[0], snapRstExt[3]), new_pnt(snapRstExt[1], snapRstExt[3]), new_pnt(snapRstExt[1], snapRstExt[2]), new_pnt(snapRstExt[0], snapRstExt[2]), new_pnt(snapRstExt[0], snapRstExt[3]), ] poly_snap_rst = create_polygon(snapRstPnt) outExtPnt = { 'top_left': new_pnt(shpAExt[0], shpAExt[3]), 'top_right': new_pnt(shpAExt[1], shpAExt[3]), 'bottom_right': new_pnt(shpAExt[1], shpAExt[2]), 'bottom_left': new_pnt(shpAExt[0], shpAExt[2]) } out_rst_pseudo = {} for pnt in outExtPnt: out_rst_pseudo[pnt] = outExtPnt[pnt].Intersects(poly_snap_rst) pseudoOrigin = outExtPnt['top_left'] if out_rst_pseudo['top_left'] else \ outExtPnt['bottom_left'] if out_rst_pseudo['bottom_left'] else \ outExtPnt['top_right'] if out_rst_pseudo['top_right'] else \ outExtPnt['bottom_right'] if out_rst_pseudo['bottom_right'] else None if not pseudoOrigin: raise ValueError(('Extents doesn\'t have overlapping areas')) pseudoOriginName = 'top_left' if out_rst_pseudo['top_left'] else \ 'bottom_left' if out_rst_pseudo['bottom_left'] else \ 'top_right' if out_rst_pseudo['top_right'] else \ 'bottom_right' if out_rst_pseudo['bottom_right'] else None # Get out Raster Shape n_col = int((shpAExt[1] - shpAExt[0]) / csize) n_row = int((shpAExt[3] - shpAExt[2]) / csize) # Get Output Raster real origin/top left yName, xName = pseudoOriginName.split('_') if xName == 'left': # Obtain left of output Raster left_out_rst = snapRstExt[0] + (csize * int( (shpAExt[0] - snapRstExt[0]) / csize)) else: # obtain right of output Raster right_out_rst = snapRstExt[1] - (csize * int( (snapRstExt[1] - shpAExt[1]) / csize)) # Use right to obtain left coordinate left_out_rst = right_out_rst - (n_col * csize) if yName == 'top': # Obtain top of output Raster top_out_rst = snapRstExt[3] - (csize * int( (snapRstExt[3] - shpAExt[3]) / csize)) else: # obtain bottom of output raster bot_out_rst = snapRstExt[2] + (csize * int( (shpAExt[2] - snapRstExt[2]) / csize)) # use bottom to find the top of the output raster top_out_rst = bot_out_rst + (n_row * csize) return left_out_rst, top_out_rst, n_row, n_col, csize
def osm2lulc(osmdata, nomenclature, refRaster, lulcRst, overwrite=None, dataStore=None, roadsAPI='POSTGIS'): """ Convert OSM data into Land Use/Land Cover Information A matrix based approach roadsAPI Options: * SQLITE * POSTGIS """ # ************************************************************************ # # Python Modules from Reference Packages # # ************************************************************************ # import os import numpy import datetime from threading import Thread from osgeo import gdal # ************************************************************************ # # Dependencies # # ************************************************************************ # from glass.g.rd.rst import rst_to_array from glass.g.prop import check_isRaster from glass.g.prop.rst import get_cellsize from glass.pys.oss import mkdir, copy_file from glass.pys.oss import fprop if roadsAPI == 'POSTGIS': from glass.ng.sql.db import create_db from glass.g.it.db import osm_to_psql from glass.ete.osm2lulc.mod2 import pg_num_roads from glass.ng.sql.bkup import dump_db from glass.ng.sql.db import drop_db else: from glass.g.it.osm import osm_to_sqdb from glass.ete.osm2lulc.mod2 import num_roads from glass.ete.osm2lulc.utils import osm_project, add_lulc_to_osmfeat from glass.ete.osm2lulc.utils import osmlulc_rsttbl from glass.ete.osm2lulc.utils import get_ref_raster from glass.ete.osm2lulc.mod1 import num_selection from glass.ete.osm2lulc.m3_4 import num_selbyarea from glass.ete.osm2lulc.mod5 import num_base_buffer from glass.ete.osm2lulc.mod6 import num_assign_builds from glass.g.wt.rst import obj_to_rst # ************************************************************************ # # Global Settings # # ************************************************************************ # # Check if input parameters exists! if not os.path.exists(os.path.dirname(lulcRst)): raise ValueError('{} does not exist!'.format(os.path.dirname(lulcRst))) if not os.path.exists(osmdata): raise ValueError( 'File with OSM DATA ({}) does not exist!'.format(osmdata)) if not os.path.exists(refRaster): raise ValueError( 'File with reference area ({}) does not exist!'.format(refRaster)) # Check if Nomenclature is valid nomenclature = "URBAN_ATLAS" if nomenclature != "URBAN_ATLAS" and \ nomenclature != "CORINE_LAND_COVER" and \ nomenclature == "GLOBE_LAND_30" else nomenclature time_a = datetime.datetime.now().replace(microsecond=0) workspace = os.path.join(os.path.dirname(lulcRst), 'num_osmto') if not dataStore else dataStore # Check if workspace exists: if os.path.exists(workspace): if overwrite: mkdir(workspace, overwrite=True) else: raise ValueError('Path {} already exists'.format(workspace)) else: mkdir(workspace, overwrite=None) # Get Ref Raster and EPSG refRaster, epsg = get_ref_raster(refRaster, workspace, cellsize=2) CELLSIZE = get_cellsize(refRaster, gisApi='gdal') from glass.ete.osm2lulc import osmTableData, PRIORITIES time_b = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Convert OSM file to SQLITE DB or to POSTGIS DB # # ************************************************************************ # if roadsAPI == 'POSTGIS': osm_db = create_db(fprop(osmdata, 'fn', forceLower=True), overwrite=True) osm_db = osm_to_psql(osmdata, osm_db) else: osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite')) time_c = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Add Lulc Classes to OSM_FEATURES by rule # # ************************************************************************ # add_lulc_to_osmfeat(osm_db, osmTableData, nomenclature, api=roadsAPI) time_d = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # Transform SRS of OSM Data # # ************************************************************************ # osmTableData = osm_project( osm_db, epsg, api=roadsAPI, isGlobeLand=None if nomenclature != "GLOBE_LAND_30" else True) time_e = datetime.datetime.now().replace(microsecond=0) # ************************************************************************ # # MapResults # # ************************************************************************ # mergeOut = {} timeCheck = {} RULES = [1, 2, 3, 4, 5, 7] def run_rule(ruleID): time_start = datetime.datetime.now().replace(microsecond=0) _osmdb = copy_file( osm_db, os.path.splitext(osm_db)[0] + '_r{}.sqlite'.format(ruleID)) if roadsAPI == 'SQLITE' else None # ******************************************************************** # # 1 - Selection Rule # # ******************************************************************** # if ruleID == 1: res, tm = num_selection(_osmdb if _osmdb else osm_db, osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, api=roadsAPI) # ******************************************************************** # # 2 - Get Information About Roads Location # # ******************************************************************** # elif ruleID == 2: res, tm = num_roads( _osmdb, nomenclature, osmTableData['lines'], osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster) if _osmdb else pg_num_roads( osm_db, nomenclature, osmTableData['lines'], osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster) # ******************************************************************** # # 3 - Area Upper than # # ******************************************************************** # elif ruleID == 3: if nomenclature != "GLOBE_LAND_30": res, tm = num_selbyarea(osm_db if not _osmdb else _osmdb, osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, UPPER=True, api=roadsAPI) else: return # ******************************************************************** # # 4 - Area Lower than # # ******************************************************************** # elif ruleID == 4: if nomenclature != "GLOBE_LAND_30": res, tm = num_selbyarea(osm_db if not _osmdb else _osmdb, osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, UPPER=False, api=roadsAPI) else: return # ******************************************************************** # # 5 - Get data from lines table (railway | waterway) # # ******************************************************************** # elif ruleID == 5: res, tm = num_base_buffer(osm_db if not _osmdb else _osmdb, osmTableData['lines'], workspace, CELLSIZE, epsg, refRaster, api=roadsAPI) # ******************************************************************** # # 7 - Assign untagged Buildings to tags # # ******************************************************************** # elif ruleID == 7: if nomenclature != "GLOBE_LAND_30": res, tm = num_assign_builds(osm_db if not _osmdb else _osmdb, osmTableData['points'], osmTableData['polygons'], workspace, CELLSIZE, epsg, refRaster, apidb=roadsAPI) else: return time_end = datetime.datetime.now().replace(microsecond=0) mergeOut[ruleID] = res timeCheck[ruleID] = {'total': time_end - time_start, 'detailed': tm} thrds = [] for r in RULES: thrds.append( Thread(name="to_{}".format(str(r)), target=run_rule, args=(r, ))) for t in thrds: t.start() for t in thrds: t.join() # Merge all results into one Raster compileResults = {} for rule in mergeOut: for cls in mergeOut[rule]: if cls not in compileResults: if type(mergeOut[rule][cls]) == list: compileResults[cls] = mergeOut[rule][cls] else: compileResults[cls] = [mergeOut[rule][cls]] else: if type(mergeOut[rule][cls]) == list: compileResults[cls] += mergeOut[rule][cls] else: compileResults[cls].append(mergeOut[rule][cls]) time_m = datetime.datetime.now().replace(microsecond=0) # All Rasters to Array arrayRst = {} for cls in compileResults: for raster in compileResults[cls]: if not raster: continue array = rst_to_array(raster) if cls not in arrayRst: arrayRst[cls] = [array.astype(numpy.uint8)] else: arrayRst[cls].append(array.astype(numpy.uint8)) time_n = datetime.datetime.now().replace(microsecond=0) # Sum Rasters of each class for cls in arrayRst: if len(arrayRst[cls]) == 1: sumArray = arrayRst[cls][0] else: sumArray = arrayRst[cls][0] for i in range(1, len(arrayRst[cls])): sumArray = sumArray + arrayRst[cls][i] arrayRst[cls] = sumArray time_o = datetime.datetime.now().replace(microsecond=0) # Apply priority rule __priorities = PRIORITIES[nomenclature + "_NUMPY"] for lulcCls in __priorities: __lulcCls = rstcls_map(lulcCls) if __lulcCls not in arrayRst: continue else: numpy.place(arrayRst[__lulcCls], arrayRst[__lulcCls] > 0, lulcCls) for i in range(len(__priorities)): lulc_i = rstcls_map(__priorities[i]) if lulc_i not in arrayRst: continue else: for e in range(i + 1, len(__priorities)): lulc_e = rstcls_map(__priorities[e]) if lulc_e not in arrayRst: continue else: numpy.place(arrayRst[lulc_e], arrayRst[lulc_i] == __priorities[i], 0) time_p = datetime.datetime.now().replace(microsecond=0) # Merge all rasters startCls = 'None' for i in range(len(__priorities)): lulc_i = rstcls_map(__priorities[i]) if lulc_i in arrayRst: resultSum = arrayRst[lulc_i] startCls = i break if startCls == 'None': return 'NoResults' for i in range(startCls + 1, len(__priorities)): lulc_i = rstcls_map(__priorities[i]) if lulc_i not in arrayRst: continue resultSum = resultSum + arrayRst[lulc_i] # Save Result outIsRst = check_isRaster(lulcRst) if not outIsRst: from glass.pys.oss import fprop lulcRst = os.path.join(os.path.dirname(lulcRst), fprop(lulcRst, 'fn') + '.tif') numpy.place(resultSum, resultSum == 0, 1) obj_to_rst(resultSum, lulcRst, refRaster, noData=1) osmlulc_rsttbl( nomenclature + "_NUMPY", os.path.join(os.path.dirname(lulcRst), os.path.basename(lulcRst) + '.vat.dbf')) time_q = datetime.datetime.now().replace(microsecond=0) # Dump Database if PostGIS was used # Drop Database if PostGIS was used if roadsAPI == 'POSTGIS': dump_db(osm_db, os.path.join(workspace, osm_db + '.sql'), api='psql') drop_db(osm_db) return lulcRst, { 0: ('set_settings', time_b - time_a), 1: ('osm_to_sqdb', time_c - time_b), 2: ('cls_in_sqdb', time_d - time_c), 3: ('proj_data', time_e - time_d), 4: ('rule_1', timeCheck[1]['total'], timeCheck[1]['detailed']), 5: ('rule_2', timeCheck[2]['total'], timeCheck[2]['detailed']), 6: None if 3 not in timeCheck else ('rule_3', timeCheck[3]['total'], timeCheck[3]['detailed']), 7: None if 4 not in timeCheck else ('rule_4', timeCheck[4]['total'], timeCheck[4]['detailed']), 8: ('rule_5', timeCheck[5]['total'], timeCheck[5]['detailed']), 9: None if 7 not in timeCheck else ('rule_7', timeCheck[7]['total'], timeCheck[7]['detailed']), 10: ('rst_to_array', time_n - time_m), 11: ('sum_cls', time_o - time_n), 12: ('priority_rule', time_p - time_o), 13: ('merge_rst', time_q - time_p) }