def sample(self, expression): """ Sampling the current values of 'expression' at the given locations for the current timestep """ arrayRowPos = self._userModel.currentTimeStep( ) - self._userModel.firstTimeStep() #if isinstance(expression, float): # expression = pcraster.scalar(expression) try: # store the data type for tss file header if self._spatialDatatype == None: self._spatialDatatype = str(expression.dataType()) except AttributeError as e: datatype, sep, tail = str(e).partition(" ") msg = "Argument must be a PCRaster map, type %s given. If necessary use data conversion functions like scalar()" % ( datatype) raise AttributeError(msg) if self._spatialIdGiven: if expression.dataType() == pcraster.Scalar or expression.dataType( ) == pcraster.Directional: tmp = pcraster.areaaverage(pcraster.spatial(expression), pcraster.spatial(self._spatialId)) else: tmp = pcraster.areamajority(pcraster.spatial(expression), pcraster.spatial(self._spatialId)) col = 0 for cellIndex in self._sampleAddresses: value, valid = pcraster.cellvalue(tmp, cellIndex) if not valid: value = Decimal("NaN") self._sampleValues[arrayRowPos][col] = value col += 1 else: if expression.dataType() == pcraster.Scalar or expression.dataType( ) == pcraster.Directional: tmp = pcraster.maptotal(pcraster.spatial(expression))\ / pcraster.maptotal(pcraster.scalar(pcraster.defined(pcraster.spatial(expression)))) else: tmp = pcraster.mapmaximum(pcraster.maptotal(pcraster.areamajority(pcraster.spatial(expression),\ pcraster.spatial(pcraster.nominal(1))))) value, valid = pcraster.cellvalue(tmp, 1) if not valid: value = Decimal("NaN") self._sampleValues[arrayRowPos] = value if self._userModel.currentTimeStep() == self._userModel.nrTimeSteps(): self._writeTssFile()
def sample(self, expression): """ Sampling the current values of 'expression' at the given locations for the current timestep """ arrayRowPos = self._userModel.currentTimeStep() - self._userModel.firstTimeStep() #if isinstance(expression, float): # expression = pcraster.scalar(expression) try: # store the data type for tss file header if self._spatialDatatype == None: self._spatialDatatype = str(expression.dataType()) except AttributeError as e: datatype, sep, tail = str(e).partition(" ") msg = "Argument must be a PCRaster map, type %s given. If necessary use data conversion functions like scalar()" % (datatype) raise AttributeError(msg) if self._spatialIdGiven: if expression.dataType() == pcraster.Scalar or expression.dataType() == pcraster.Directional: tmp = pcraster.areaaverage(pcraster.spatial(expression), pcraster.spatial(self._spatialId)) else: tmp = pcraster.areamajority(pcraster.spatial(expression), pcraster.spatial(self._spatialId)) col = 0 for cellIndex in self._sampleAddresses: value, valid = pcraster.cellvalue(tmp, cellIndex) if not valid: value = Decimal("NaN") self._sampleValues[arrayRowPos][col] = value col += 1 else: if expression.dataType() == pcraster.Scalar or expression.dataType() == pcraster.Directional: tmp = pcraster.maptotal(pcraster.spatial(expression))\ / pcraster.maptotal(pcraster.scalar(pcraster.defined(pcraster.spatial(expression)))) else: tmp = pcraster.mapmaximum(pcraster.maptotal(pcraster.areamajority(pcraster.spatial(expression),\ pcraster.spatial(pcraster.nominal(1))))) value, valid = pcraster.cellvalue(tmp, 1) if not valid: value = Decimal("NaN") self._sampleValues[arrayRowPos] = value if self._userModel.currentTimeStep() == self._userModel.nrTimeSteps(): self._writeTssFile()
def spatialInterpolation2PCR(fieldArray, pcrType, MV): #-interpolates the field array to the full extent field = pcr.numpy2pcr(pcrType, fieldArray, MV) cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) if pcrType == pcr.Scalar: field = pcr.areaaverage(field, zoneID) else: field = pcr.areamajority(field, zoneID) return field
def main(): # output folder clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # tmp folder tmp_folder = out_folder + "/tmp/" if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder) os.makedirs(tmp_folder) # set the clone map print("set the clone map") pcr.setclone(global_clone_map_file) # read ldd map print("define the ldd") # ~ ldd_map = pcr.readmap(global_ldd_inp_file) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(vos.readPCRmapClone(v = global_ldd_inp_file, \ cloneMapFileName = global_clone_map_file, \ tmpDir = tmp_folder, \ absolutePath = None, \ isLddMap = True, \ cover = None, \ isNomMap = False)))) # define the landmask if landmask_map_file == None: print("define the landmask based on the ldd input") # ~ landmask = pcr.defined(pcr.readmap(global_ldd_inp_file)) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) else: print("define the landmask based on the input landmask_map_file") landmask = pcr.readmap(landmask_map_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(ldd_map))) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) # save ldd files used # - global ldd cmd = "cp " + str(global_ldd_inp_file) + " ." print(cmd) os.system(cmd) # - ldd map that is used pcr.report(ldd_map, "lddmap_used.map") # make catchment map print("make catchment map") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) # read global subdomain file print("read global subdomain file") global_subdomain_map = vos.readPCRmapClone( v=global_subdomain_file, cloneMapFileName=global_clone_map_file, tmpDir=tmp_folder, absolutePath=None, isLddMap=False, cover=None, isNomMap=True) # set initial subdomain print("assign subdomains to all catchments") subdomains_initial = pcr.areamajority(global_subdomain_map, catchment_map) subdomains_initial = pcr.ifthen(landmask, subdomains_initial) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) process_this_clone = False if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(mask_selected_boolean)), 1, 1)[0] > 0: process_this_clone = True # ~ if nr == 1: pcr.aguila(mask_selected_boolean) # - initial check value check_ok = True if process_this_clone: xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False # ~ # ignore checking # ~ check_ok = True if check_ok == True and process_this_clone == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False and process_this_clone == True: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") print("Making the clone and landmask maps for all subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) # clone and mask folders clone_folder = out_folder + "/clone/" if os.path.exists(clone_folder): shutil.rmtree(clone_folder) os.makedirs(clone_folder) mask_folder = out_folder + "/mask/" if os.path.exists(mask_folder): shutil.rmtree(mask_folder) os.makedirs(mask_folder) print("") print("") for nr in range(1, num_of_masks + 1, 1): msg = "Processing the subdomain %s" % (str(nr)) print(msg) # set the global clone pcr.setclone(global_clone_map_file) mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) mask_selected_nominal = pcr.ifthen(subdomains_final == nr, pcr.nominal(nr)) mask_file = "mask/mask_%s.map" % (str(nr)) pcr.report(mask_selected_nominal, mask_file) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) # cellsize in arcdegree cellsize = cellsize_in_arcmin / 60. # number of rows and cols num_rows = int(round(ymax - ymin) / cellsize) num_cols = int(round(xmax - xmin) / cellsize) # make the clone map using mapattr clonemap_mask_file = "clone/clonemap_mask_%s.map" % (str(nr)) cmd = "mapattr -s -R %s -C %s -B -P yb2t -x %s -y %s -l %s %s" % ( str(num_rows), str(num_cols), str(xmin), str(ymax), str(cellsize), clonemap_mask_file) print(cmd) os.system(cmd) # set the local landmask for the clump pcr.setclone(clonemap_mask_file) local_mask = vos.readPCRmapClone(v = mask_file, \ cloneMapFileName = clonemap_mask_file, tmpDir = tmp_folder, \ absolutePath = None, isLddMap = False, cover = None, isNomMap = True) local_mask_boolean = pcr.defined(local_mask) local_mask_boolean = pcr.ifthen(local_mask_boolean, local_mask_boolean) pcr.report(local_mask_boolean, mask_file) print("") print("") print("") print(num_of_masks)
def getParameterFiles(self,currTimeStep,cellArea,ldd,\ initial_condition_dictionary = None,\ currTimeStepInDateTimeFormat = False): # parameters for Water Bodies: fracWat # waterBodyIds # waterBodyOut # waterBodyArea # waterBodyTyp # waterBodyCap # cell surface area (m2) and ldd self.cellArea = cellArea ldd = pcr.ifthen(self.landmask, ldd) # date used for accessing/extracting water body information if currTimeStepInDateTimeFormat: date_used = currTimeStep year_used = currTimeStep.year else: date_used = currTimeStep.fulldate year_used = currTimeStep.year if self.onlyNaturalWaterBodies == True: date_used = self.dateForNaturalCondition year_used = self.dateForNaturalCondition[0:4] # fracWat = fraction of surface water bodies (dimensionless) self.fracWat = pcr.scalar(0.0) if self.useNetCDF: self.fracWat = vos.netcdf2PCRobjClone(self.ncFileInp,'fracWaterInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.fracWat = vos.readPCRmapClone(\ self.fracWaterInp+str(year_used)+".map", self.cloneMap,self.tmpDir,self.inputDir) self.fracWat = pcr.cover(self.fracWat, 0.0) self.fracWat = pcr.max(0.0, self.fracWat) self.fracWat = pcr.min(1.0, self.fracWat) self.waterBodyIds = pcr.nominal(0) # waterBody ids self.waterBodyOut = pcr.boolean(0) # waterBody outlets self.waterBodyArea = pcr.scalar(0.) # waterBody surface areas # water body ids if self.useNetCDF: self.waterBodyIds = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyIds', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.waterBodyIds = vos.readPCRmapClone(\ self.waterBodyIdsInp+str(year_used)+".map",\ self.cloneMap,self.tmpDir,self.inputDir,False,None,True) # self.waterBodyIds = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ pcr.nominal(self.waterBodyIds)) # water body outlets (correcting outlet positions) wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd) self.waterBodyOut = pcr.ifthen(wbCatchment ==\ pcr.areamaximum(wbCatchment, \ self.waterBodyIds),\ self.waterBodyIds) # = outlet ids # This may give more than two outlets, particularly if there are more than one cells that have largest upstream areas # - make sure that there is only one outlet for each water body self.waterBodyOut = pcr.ifthen(\ pcr.areaorder(pcr.scalar(self.waterBodyOut), \ self.waterBodyOut) == 1., self.waterBodyOut) self.waterBodyOut = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ self.waterBodyOut) # TODO: Please also consider endorheic lakes! # correcting water body ids self.waterBodyIds = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ pcr.subcatchment(ldd,self.waterBodyOut)) # boolean map for water body outlets: self.waterBodyOut = pcr.ifthen(\ pcr.scalar(self.waterBodyOut) > 0.,\ pcr.boolean(1)) # reservoir surface area (m2): if self.useNetCDF: resSfArea = 1000. * 1000. * \ vos.netcdf2PCRobjClone(self.ncFileInp,'resSfAreaInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: resSfArea = 1000. * 1000. * vos.readPCRmapClone( self.resSfAreaInp+str(year_used)+".map",\ self.cloneMap,self.tmpDir,self.inputDir) resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds) resSfArea = pcr.cover(resSfArea, 0.) # water body surface area (m2): (lakes and reservoirs) self.waterBodyArea = pcr.max(pcr.areatotal(\ pcr.cover(\ self.fracWat*self.cellArea, 0.0), self.waterBodyIds), pcr.areaaverage(\ pcr.cover(resSfArea, 0.0) , self.waterBodyIds)) self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.,\ self.waterBodyArea) # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0) self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0., self.waterBodyIds) self.waterBodyOut = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyOut) # water body types: # - 2 = reservoirs (regulated discharge) # - 1 = lakes (weirFormula) # - 0 = non lakes or reservoirs (e.g. wetland) self.waterBodyTyp = pcr.nominal(0) if self.useNetCDF: self.waterBodyTyp = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyTyp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.waterBodyTyp = vos.readPCRmapClone( self.waterBodyTypInp+str(year_used)+".map",\ self.cloneMap,self.tmpDir,self.inputDir,False,None,True) # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs # self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyTyp) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.areamajority(self.waterBodyTyp,\ self.waterBodyIds) # choose only one type: either lake or reservoir self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyTyp) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyTyp) # correcting lakes and reservoirs ids and outlets self.waterBodyIds = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds) self.waterBodyOut = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut) # reservoir maximum capacity (m3): self.resMaxCap = pcr.scalar(0.0) self.waterBodyCap = pcr.scalar(0.0) if self.useNetCDF: self.resMaxCap = 1000. * 1000. * \ vos.netcdf2PCRobjClone(self.ncFileInp,'resMaxCapInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.resMaxCap = 1000. * 1000. * vos.readPCRmapClone(\ self.resMaxCapInp+str(year_used)+".map", \ self.cloneMap,self.tmpDir,self.inputDir) self.resMaxCap = pcr.ifthen(self.resMaxCap > 0,\ self.resMaxCap) self.resMaxCap = pcr.areaaverage(self.resMaxCap,\ self.waterBodyIds) # water body capacity (m3): (lakes and reservoirs) self.waterBodyCap = pcr.cover( self.resMaxCap, 0.0) # Note: Most of lakes have capacities > 0. self.waterBodyCap = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyCap) # correcting water body types: # Reservoirs that have zero capacities will be assumed as lakes. self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyTyp) self.waterBodyTyp = pcr.ifthenelse(self.waterBodyCap > 0.,\ self.waterBodyTyp,\ pcr.ifthenelse(pcr.scalar(self.waterBodyTyp) == 2,\ pcr.nominal(1),\ self.waterBodyTyp)) # final corrections: self.waterBodyTyp = pcr.ifthen(self.waterBodyArea > 0.,\ self.waterBodyTyp) # make sure that all lakes and/or reservoirs have surface areas self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyTyp) # make sure that only types 1 and 2 will be considered in lake/reservoir functions self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyIds) # make sure that all lakes and/or reservoirs have ids self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.,\ self.waterBodyOut) # make sure that all lakes and/or reservoirs have outlets # for a natural run (self.onlyNaturalWaterBodies == True) # which uses only the year 1900, assume all reservoirs are lakes if self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition: logger.info( "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes." ) self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ pcr.nominal(1)) # check that all lakes and/or reservoirs have types, ids, surface areas and outlets: test = pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) &\ pcr.defined(self.waterBodyIds) & pcr.boolean(pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds)) a, b, c = vos.getMinMaxMean( pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0)) threshold = 1e-3 if abs(a) > threshold or abs(b) > threshold: logger.warning( "Missing information in some lakes and/or reservoirs.") # at the beginning of simulation period (timeStepPCR = 1) # - we have to define/get the initial conditions # if initial_condition_dictionary != None and currTimeStep.timeStepPCR == 1: self.getICs(initial_condition_dictionary) # For each new reservoir (introduced at the beginning of the year) # initiating storage, average inflow and outflow # PS: THIS IS NOT NEEDED FOR OFFLINE MODFLOW RUN! # try: self.waterBodyStorage = pcr.cover(self.waterBodyStorage, 0.0) self.avgInflow = pcr.cover(self.avgInflow, 0.0) self.avgOutflow = pcr.cover(self.avgOutflow, 0.0) self.waterBodyStorage = pcr.ifthen(self.landmask, self.waterBodyStorage) self.avgInflow = pcr.ifthen(self.landmask, self.avgInflow) self.avgOutflow = pcr.ifthen(self.landmask, self.avgOutflow) except: # PS: FOR OFFLINE MODFLOW RUN! pass # TODO: Remove try and except # cropping only in the landmask region: self.fracWat = pcr.ifthen(self.landmask, self.fracWat) self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds) self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut) self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea) self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp) self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap)
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only.map") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only.map") # - based on the 06min input landmask_06min = define_landmask(input_file = global_landmask_06min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_06min_only.map") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only.map") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only.map") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_06min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_extended_30min.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_extended_30min.map") # ~ pcr.aguila(ldd_map) # catchment map and size catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ pcr.aguila(catchment_size) # identify small islands print("identify small islands") # - maps of islands smaller than 15000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 15000., island_map) # ~ # - use catchments (instead of islands) # ~ island_map = catchment_map # ~ island_size = catchment_size # ~ island_map = pcr.ifthen(island_size < 10000., island_map) # - sort from the largest island # -- take one cell per island as a representative island_map_rep_size = pcr.ifthen( pcr.areaorder(island_size, island_map) == 1.0, island_size) # -- sort from the largest island island_map_rep_ids = pcr.areaorder( island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(island_map_rep_size), pcr.nominal(1.0))) # -- map of smaller islands, sorted from the largest one island_map = pcr.areamajority(pcr.nominal(island_map_rep_ids), island_map) # identify the biggest island for every group of small islands within a certain window (arcdeg cells) print("the biggest island for every group of small islands") large_island_map = pcr.ifthen( pcr.scalar(island_map) == pcr.windowminimum(pcr.scalar(island_map), 15.), island_map) # ~ pcr.aguila(large_island_map) # identify big catchments print("identify large catchments") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ # - identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 # ~ large_catchment_map = pcr.ifthen(catchment_size >= 50, catchment_map) # ~ # - identify all large catchments with size >= 10 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 10, catchment_map) # ~ # - identify all large catchments with size >= 5 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 5, catchment_map) # ~ # - identify all large catchments with size >= 20 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 20, catchment_map) # - identify all large catchments with size >= 25 cells (at the resolution of 30 arcmin) large_catchment_map = pcr.ifthen(catchment_size >= 25, catchment_map) # - give the codes that are different than islands large_catchment_map = pcr.nominal( pcr.scalar(large_catchment_map) + 10. * vos.getMinMaxMean(pcr.scalar(large_island_map))[1]) # merge biggest islands and big catchments print("merge large catchments and islands") large_catchment_and_island_map = pcr.cover(large_catchment_map, large_island_map) # ~ large_catchment_and_island_map = pcr.cover(large_island_map, large_catchment_map) large_catchment_and_island_map_size = pcr.areatotal( pcr.spatial(pcr.scalar(1.0)), large_catchment_and_island_map) # - sort from the largest one # -- take one cell per island as a representative large_catchment_and_island_map_rep_size = pcr.ifthen( pcr.areaorder(large_catchment_and_island_map_size, large_catchment_and_island_map) == 1.0, large_catchment_and_island_map_size) # -- sort from the largest large_catchment_and_island_map_rep_ids = pcr.areaorder( large_catchment_and_island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(large_catchment_and_island_map_rep_size), pcr.nominal(1.0))) # -- map of largest catchments and islands, sorted from the largest one large_catchment_and_island_map = pcr.areamajority( pcr.nominal(large_catchment_and_island_map_rep_ids), large_catchment_and_island_map) # ~ pcr.report(large_catchment_and_island_map, "large_catchments_and_islands.map") # ~ # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments # ~ print("spatial interpolation/extrapolation using cdo fillmiss2 to get initial subdomains") # ~ cmd = "gdal_translate -of NETCDF large_catchments_and_islands.map large_catchments_and_islands.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "cdo fillmiss2 large_catchments_and_islands.nc large_catchments_and_islands_filled.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "gdal_translate -of PCRaster large_catchments_and_islands_filled.nc large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ # - initial subdomains # ~ subdomains_initial = pcr.nominal(pcr.readmap("large_catchments_and_islands_filled.map")) # ~ subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) # ~ pcr.aguila(subdomains_initial) # spatial interpolation/extrapolation in order to merge the small catchments to the nearest large catchments print("spatial interpolation/extrapolation to get initial subdomains") field = large_catchment_and_island_map cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_initial = field subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_30min_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) # ~ if nr == 1: pcr.aguila(mask_selected_boolean) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok # - initial check value check_ok = True reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False if check_ok == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_30min_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") for nr in range(1, num_of_masks + 1, 1): mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) print("") print("") print("") print("Number of subdomains: " + str(num_of_masks)) print("") print("") print("") # spatial extrapolation in order to cover the entire map print("spatial interpolation/extrapolation to cover the entire map") field = subdomains_final cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_final_filled = field pcr.aguila(subdomains_final_filled) pcr.report(subdomains_final_filled, "global_subdomains_30min_final_filled.map")
def getParameterFiles(self, date_given, cellArea, ldd): # parameters for Water Bodies: fracWat # waterBodyIds # waterBodyOut # waterBodyArea # waterBodyTyp # waterBodyCap # cell surface area (m2) and ldd self.cellArea = cellArea ldd = pcr.ifthen(self.landmask, ldd) # date and year used for accessing/extracting water body information date_used = date_given if self.onlyNaturalWaterBodies == True: date_used = self.dateForNaturalCondition year_used = date_used[0:4] # fracWat = fraction of surface water bodies (dimensionless) self.fracWat = pcr.scalar(0.0) if self.useNetCDF: self.fracWat = vos.netcdf2PCRobjClone(self.ncFileInp,'fracWaterInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) self.fracWat = pcr.cover(self.fracWat, 0.0) self.fracWat = pcr.max(0.0, self.fracWat) self.fracWat = pcr.min(1.0, self.fracWat) self.waterBodyIds = pcr.nominal(0) # waterBody ids self.waterBodyOut = pcr.boolean(0) # waterBody outlets self.waterBodyArea = pcr.scalar(0.) # waterBody surface areas # water body ids if self.useNetCDF: self.waterBodyIds = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyIds', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) self.waterBodyIds = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ pcr.nominal(self.waterBodyIds)) # water body outlets (correcting outlet positions) wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd) self.waterBodyOut = pcr.ifthen(wbCatchment ==\ pcr.areamaximum(wbCatchment, \ self.waterBodyIds),\ self.waterBodyIds) # = outlet ids self.waterBodyOut = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ self.waterBodyOut) # TODO: Please also consider endorheic lakes! # correcting water body ids self.waterBodyIds = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ pcr.subcatchment(ldd,self.waterBodyOut)) # boolean map for water body outlets: self.waterBodyOut = pcr.ifthen(\ pcr.scalar(self.waterBodyOut) > 0.,\ pcr.boolean(1)) # reservoir surface area (m2): if self.useNetCDF: resSfArea = 1000. * 1000. * \ vos.netcdf2PCRobjClone(self.ncFileInp,'resSfAreaInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds) resSfArea = pcr.cover(resSfArea, 0.) # water body surface area (m2): (lakes and reservoirs) self.waterBodyArea = pcr.max(pcr.areatotal(\ pcr.cover(\ self.fracWat*self.cellArea, 0.0), self.waterBodyIds), pcr.areaaverage(\ pcr.cover(resSfArea, 0.0) , self.waterBodyIds)) self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.,\ self.waterBodyArea) # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0) self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0., self.waterBodyIds) self.waterBodyOut = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyOut) # water body types: # - 2 = reservoirs (regulated discharge) # - 1 = lakes (weirFormula) # - 0 = non lakes or reservoirs (e.g. wetland) self.waterBodyTyp = pcr.nominal(0) if self.useNetCDF: self.waterBodyTyp = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyTyp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs # self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyTyp) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.areamajority(self.waterBodyTyp,\ self.waterBodyIds) # choose only one type: either lake or reservoir self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyTyp) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyTyp) # correcting lakes and reservoirs ids and outlets self.waterBodyIds = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds) self.waterBodyOut = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut) # reservoir maximum capacity (m3): self.resMaxCap = pcr.scalar(0.0) self.waterBodyCap = pcr.scalar(0.0) if self.useNetCDF: self.resMaxCap = 1000. * 1000. * \ vos.netcdf2PCRobjClone(self.ncFileInp,'resMaxCapInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) self.resMaxCap = pcr.ifthen(self.resMaxCap > 0,\ self.resMaxCap) self.resMaxCap = pcr.areaaverage(self.resMaxCap,\ self.waterBodyIds) # water body capacity (m3): (lakes and reservoirs) self.waterBodyCap = pcr.cover( self.resMaxCap, 0.0) # Note: Most of lakes have capacities > 0. self.waterBodyCap = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyCap) # correcting water body types: # Reservoirs that have zero capacities will be assumed as lakes. self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyTyp) self.waterBodyTyp = pcr.ifthenelse(self.waterBodyCap > 0.,\ self.waterBodyTyp,\ pcr.ifthenelse(pcr.scalar(self.waterBodyTyp) == 2,\ pcr.nominal(1),\ self.waterBodyTyp)) # final corrections: self.waterBodyTyp = pcr.ifthen(self.waterBodyArea > 0.,\ self.waterBodyTyp) # make sure that all lakes and/or reservoirs have surface areas self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyTyp) # make sure that only types 1 and 2 will be considered in lake/reservoir functions self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyIds) # make sure that all lakes and/or reservoirs have ids self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.,\ self.waterBodyOut) # make sure that all lakes and/or reservoirs have outlets # for a natural run (self.onlyNaturalWaterBodies == True) # which uses only the year 1900, assume all reservoirs are lakes if self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition: logger.info( "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes." ) self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ pcr.nominal(1)) # check that all lakes and/or reservoirs have types, ids, surface areas and outlets: test = pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) &\ pcr.defined(self.waterBodyIds) & pcr.boolean(pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds)) a, b, c = vos.getMinMaxMean( pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0)) threshold = 1e-3 if abs(a) > threshold or abs(b) > threshold: logger.warning( "Missing information in some lakes and/or reservoirs.") # cropping only in the landmask region: self.fracWat = pcr.ifthen(self.landmask, self.fracWat) self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds) self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut) self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea) self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp) self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap)
# rasterize catchment map catchments_tif = workdir + "catchments.tif" catchments_map = workdir + "catchments.map" call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map, catchments_tif)) if alltouching: call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) else: call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, catchments_tif, catchments_map)) catchments = pcr.readmap(catchments_map) riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn))) rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique) #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0)) catchments = pcr.cover( pcr.ifthen(catchments > 0, pcr.ordinal(catchments)), pcr.ifthen( riverburn > 0, pcr.ordinal( pcr.spreadzone(pcr.nominal(catchments), pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1)))) rivercatch_map = workdir + "catchments_river.map" catchclip_map = workdir + "catchments_clip.map" pcr.report(rivercatch, rivercatch_map) pcr.report(catchments, catchclip_map) ds = ogr.Open(workdir + "temp.shp") lyr = ds.GetLayerByName("temp")
def main(): clone_map = "mask\mask.map" clone_shp = "mask\mask.shp" clone_prj = "mask\mask.prj" workdir = "work\\" resultdir = "staticmaps\\" ''' read commandline arguments ''' argv = sys.argv clone_EPSG = False try: opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA') except getopt.error: print 'fout' Usage() sys.exit(1) inifile = None rivshp = None catchshp = None dem_in = None landuse = None soiltype = None clean = False gaugeshp = None alltouching = False for o, a in opts: if o == '-i': inifile = a if o == '-p': clone_EPSG = 'EPSG:' + a if o == '-r': rivshp = a if o == '-c': catchshp = a if o == '-d': dem_in = a if o == '-l': landuse = a if o == '-s': soiltype = a if o == '-C': clean = True if o == '-g': gaugeshp = a if o == '-A': alltouching = True if inifile == None or rivshp == None or catchshp == None or dem_in == None: print 'the following files are compulsory:' print ' - ini-file' print ' - DEM (raster)' print ' - river (shape)' print ' - catchment (shape)' Usage() sys.exit(1) if landuse == None: print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain' if soiltype == None: print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain' ''' read mask ''' if not os.path.exists(clone_map): print 'Mask not found. Make sure the file mask\mask.map exists' print 'This file is usually created with the CreateGrid script' sys.exit(1) else: pcr.setclone(clone_map) ds = gdal.Open(clone_map, GA_ReadOnly) clone_trans = ds.GetGeoTransform() cellsize = clone_trans[1] clone_rows = ds.RasterYSize clone_columns = ds.RasterXSize extent_mask = [ clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize, clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3] ] xmin, ymin, xmax, ymax = map(str, extent_mask) ds = None ones = pcr.scalar(pcr.readmap(clone_map)) zeros = ones * 0 empty = pcr.ifthen(ones == 0, pcr.scalar(0)) ''' read projection from mask.shp ''' # TODO: check how to deal with projections (add .prj to mask.shp in creategrid) if not os.path.exists(clone_prj): print 'please add prj-file to mask.shp' sys.exit(1) if os.path.exists(clone_shp): ds = ogr.Open(clone_shp) file_att = os.path.splitext(os.path.basename(clone_shp))[0] lyr = ds.GetLayerByName(file_att) spatialref = lyr.GetSpatialRef() if not spatialref == None: srs_clone = osr.SpatialReference() srs_clone.ImportFromWkt(spatialref.ExportToWkt()) srs_clone.AutoIdentifyEPSG() unit_clone = False unit_clone = srs_clone.GetAttrValue('UNIT').lower() #clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1) # TODO: fix hard EPSG code below clone_EPSG = 'EPSG:' + '4167' print 'EPSG-code is read from mask.shp: ' + clone_EPSG spatialref == None if not clone_EPSG: print 'EPSG-code cannot be read from mask.shp' print 'please add prj-file to mask.shp or specify on command line' print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)' ds = None clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)]) ''' open config-file ''' config = wt.OpenConf(inifile) ''' read settings ''' snapgaugestoriver = bool( int(wt.configget(config, "settings", "snapgaugestoriver", "1"))) burnalltouching = bool( int(wt.configget(config, "settings", "burncatchalltouching", "1"))) burninorder = bool( int(wt.configget(config, "settings", "burncatchalltouching", "0"))) verticetollerance = float( wt.configget(config, "settings", "vertice_tollerance", "0.0001")) ''' read parameters ''' burn_outlets = int( wt.configget(config, "parameters", "burn_outlets", 10000)) burn_rivers = int(wt.configget(config, "parameters", "burn_rivers", 200)) burn_connections = int( wt.configget(config, "parameters", "burn_connections", 100)) burn_gauges = int(wt.configget(config, "parameters", "burn_gauges", 100)) minorder = int(wt.configget(config, "parameters", "riverorder_min", 3)) exec "percentile=tr.array(" + wt.configget(config, "parameters", "statisticmaps", [0, 100]) + ")" if not unit_clone: print 'failed to read unit (meter or degree) from mask projection' unit_clone = str(wt.configget(config, "settings", "unit", 'meter')) print 'unit read from settings: ' + unit_clone if unit_clone == 'degree': cellsize_hr = float( wt.configget(config, "parameters", "highres_degree", 0.0005)) elif (unit_clone == 'metre') or (unit_clone == 'meter'): cellsize_hr = float( wt.configget(config, "parameters", "highres_metre", 50)) cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2) rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2) hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0, -cellsize_hr) ''' read staticmap locations ''' catchment_map = wt.configget(config, "staticmaps", "catchment", "wflow_catchment.map") dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map") demmax_map = wt.configget(config, "staticmaps", "demmax", "wflow_demmax.map") demmin_map = wt.configget(config, "staticmaps", "demmin", "wflow_demmin.map") gauges_map = wt.configget(config, "staticmaps", "gauges", "wflow_gauges.map") landuse_map = wt.configget(config, "staticmaps", "landuse", "wflow_landuse.map") ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map") river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map") outlet_map = wt.configget(config, "staticmaps", "outlet", "wflow_outlet.map") riverlength_fact_map = wt.configget(config, "staticmaps", "riverlength_fact", "wflow_riverlength_fact.map") soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map") streamorder_map = wt.configget(config, "staticmaps", "streamorder", "wflow_streamorder.map") subcatch_map = wt.configget(config, "staticmaps", "subcatch", "wflow_subcatch.map") ''' read mask location (optional) ''' masklayer = wt.configget(config, "mask", "masklayer", catchshp) ''' create directories ''' if os.path.isdir(workdir): shutil.rmtree(workdir) os.makedirs(workdir) if os.path.isdir(resultdir): shutil.rmtree(resultdir) os.makedirs(resultdir) ''' Preperation steps ''' zero_map = workdir + "zero.map" zero_tif = workdir + "zero.tif" pcr.report(zeros, zero_map) # TODO: replace gdal_translate call call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, zero_tif)) pcr.setglobaloption("lddin") ''' resample DEM ''' dem_resample = workdir + "dem_resampled.tif" ds = gdal.Open(dem_in, GA_ReadOnly) band = ds.GetRasterBand(1) nodata = band.GetNoDataValue() proj = ds.GetGeoTransform() cellsize_dem = proj[1] ''' read DEM projection ''' spatialref == None spatialref = ds.GetProjection() if not spatialref == None: srs = osr.SpatialReference() srs.ImportFromWkt(spatialref) srs.AutoIdentifyEPSG() dem_EPSG = 'EPSG:' + srs.GetAttrValue("AUTHORITY", 1) print 'EPSG-code is read from ' + os.path.basename( dem_in) + ': ' + dem_EPSG spatialref == None dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)]) srs_DEM = osr.SpatialReference() srs_DEM.ImportFromEPSG(dem_EPSG_int) clone2dem_transform = osr.CoordinateTransformation(srs_clone, srs_DEM) else: dem_EPSG = clone_EPSG print 'No projection defined for ' + os.path.basename(dem_in) print 'Assumed to be the same as model projection (' + clone_EPSG + ')' ds = None print 'Resampling DEM...' if nodata == None: call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-dstnodata', str(-9999), '-r', 'cubic', dem_in, dem_resample)) else: call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-srcnodata', str(nodata), '-dstnodata', str(nodata), '-r', 'cubic', dem_in, dem_resample)) ''' create dem.map and statistic maps ''' dem_resample_map = resultdir + dem_map call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', dem_resample, dem_resample_map)) print 'Computing DEM statistics ....' stats = wt.windowstats(dem_in, clone_rows, clone_columns, clone_trans, srs_clone, resultdir, percentile) ''' burn DEM ''' ds = ogr.Open(rivshp) file_att = os.path.splitext(os.path.basename(rivshp))[0] lyr = ds.GetLayerByName(file_att) spatialref = lyr.GetSpatialRef() # if not spatialref == None: # srs = osr.SpatialReference() # srs.ImportFromWkt(spatialref.ExportToWkt()) # srs.AutoIdentifyEPSG() # rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) # spatialref == None # else: rivshp_EPSG = clone_EPSG print 'No projection defined for ' + file_att + '.shp' print 'Assumed to be the same as model projection (' + clone_EPSG + ')' # strip rivers to nodes xminc = str(float(xmin) + 0.5 * cellsize) yminc = str(float(ymin) + 0.5 * cellsize) xmaxc = str(float(xmax) - 0.5 * cellsize) ymaxc = str(float(ymax) - 0.5 * cellsize) if rivshp_EPSG == clone_EPSG: rivclipshp = workdir + 'rivshape_clip.shp' call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat', xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc, rivclipshp, rivshp)) else: rivprojshp = workdir + 'rivshape_proj.shp' rivclipshp = workdir + 'rivshape_clip.shp' call(('ogr2ogr', '-s_srs', rivshp_EPSG, '-t_srs', clone_EPSG, '-spat', xmin, ymin, xmax, ymax, rivprojshp, rivshp)) call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat', xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc, rivclipshp, rivprojshp)) rivshp = rivclipshp #### BURNING BELOW #### # TODO: check if extraction can be done within memory and retun a burn layer shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int, cellsize * verticetollerance, workdir) outlets = shapes[1] connections = shapes[2] outlets_att = os.path.splitext(os.path.basename(outlets))[0] connections_att = os.path.splitext(os.path.basename(connections))[0] dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0] connections_tif = workdir + connections_att + ".tif" outlets_tif = workdir + outlets_att + ".tif" # TODO: make the burning in memory call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, connections_tif)) call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, outlets_tif)) call(('gdal_rasterize', '-burn', '1', '-l', outlets_att, outlets, outlets_tif)) call(('gdal_rasterize', '-burn', '1', '-l', connections_att, connections, connections_tif)) # convert rivers to order rivshp_att = os.path.splitext(os.path.basename(rivshp))[0] rivers_tif = workdir + rivshp_att + ".tif" call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot', 'Float32', zero_map, rivers_tif)) if burninorder: # make river shape with an order attribute OrderSHPs = wt.ReachOrder(rivshp, clone_EPSG_int, cellsize * verticetollerance, workdir) wt.Burn2Tif(OrderSHPs, 'order', rivers_tif) else: call(('gdal_rasterize', '-burn', '1', '-l', rivshp_att, rivshp, rivers_tif)) # convert 2 maps connections_map = workdir + connections_att + ".map" rivers_map = workdir + rivshp_att + ".map" outlets_map = workdir + outlets_att + ".map" call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', connections_tif, connections_map)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', rivers_tif, rivers_map)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot', 'Float32', outlets_tif, outlets_map)) # burn the layers in DEM outletsburn = pcr.scalar( pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets) connectionsburn = pcr.scalar( pcr.readmap(connections_map)) * pcr.scalar(burn_connections) riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers) ldddem = pcr.cover(dem_resample_map, pcr.ifthen(riverburn > 0, pcr.scalar(0))) ldddem = ldddem - outletsburn - connectionsburn - riverburn ldddem = pcr.cover(ldddem, pcr.scalar(0)) pcr.report(ldddem, workdir + "dem_burn.map") ''' create ldd for multi-catchments ''' ldd = pcr.ldd(empty) # reproject catchment shape-file ds = ogr.Open(catchshp) file_att = os.path.splitext(os.path.basename(catchshp))[0] lyr = ds.GetLayerByName(file_att) spatialref = lyr.GetSpatialRef() # if not spatialref == None: # srs = osr.SpatialReference() # srs.ImportFromWkt(spatialref.ExportToWkt()) # srs.AutoIdentifyEPSG() # catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) # spatialref == None # else: catchshp_EPSG = clone_EPSG print 'No projection defined for ' + file_att + '.shp' print 'Assumed to be the same as model projection (' + clone_EPSG + ')' if not rivshp_EPSG == clone_EPSG: catchprojshp = workdir + 'catchshape_proj.shp' call(('ogr2ogr', '-s_srs', catchshp_EPSG, '-t_srs', clone_ESPG, catchprojshp, catchshp)) catchshp = catchprojshp ds.Destroy() ds = ogr.Open(catchshp) file_att = os.path.splitext(os.path.basename(catchshp))[0] lyr = ds.GetLayerByName(file_att) fieldDef = ogr.FieldDefn("ID", ogr.OFTString) fieldDef.SetWidth(12) TEMP_out = Driver.CreateDataSource(workdir + "temp.shp") if not srs == None: TEMP_LYR = TEMP_out.CreateLayer("temp", srs, geom_type=ogr.wkbMultiPolygon) else: TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon) TEMP_LYR.CreateField(fieldDef) for i in range(lyr.GetFeatureCount()): orgfeature = lyr.GetFeature(i) geometry = orgfeature.geometry() feature = ogr.Feature(TEMP_LYR.GetLayerDefn()) feature.SetGeometry(geometry) feature.SetField("ID", str(i + 1)) TEMP_LYR.CreateFeature(feature) TEMP_out.Destroy() ds.Destroy # rasterize catchment map catchments_tif = workdir + "catchments.tif" catchments_map = workdir + "catchments.map" call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map, catchments_tif)) if alltouching: call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) else: call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp', catchments_tif)) call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, catchments_tif, catchments_map)) catchments = pcr.readmap(catchments_map) riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn))) rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique) #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0)) catchments = pcr.cover( pcr.ifthen(catchments > 0, pcr.ordinal(catchments)), pcr.ifthen( riverburn > 0, pcr.ordinal( pcr.spreadzone(pcr.nominal(catchments), pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1)))) rivercatch_map = workdir + "catchments_river.map" catchclip_map = workdir + "catchments_clip.map" pcr.report(rivercatch, rivercatch_map) pcr.report(catchments, catchclip_map) ds = ogr.Open(workdir + "temp.shp") lyr = ds.GetLayerByName("temp") print 'calculating ldd' for i in range(lyr.GetFeatureCount()): feature = lyr.GetFeature(i) catch = int(feature.GetField("ID")) print "calculating ldd for catchment: " + str(i + 1) + "/" + str( lyr.GetFeatureCount()) + "...." ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch, catchments)) * 0 + 1 * ldddem ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"), float("1E35"), float("1E35")) ldd = pcr.cover(ldd, ldd_select) pcr.report(ldd, resultdir + ldd_map) ds.Destroy() ''' report stream order, river and dem ''' streamorder = pcr.ordinal(pcr.streamorder(ldd)) river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1)) mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999))) dem_resample_map = pcr.cover(dem_resample_map, pcr.scalar(river) * 0 + mindem) pcr.report(dem_resample_map, resultdir + dem_map) pcr.report(streamorder, resultdir + streamorder_map) pcr.report(river, resultdir + river_map) ''' deal with your catchments ''' if gaugeshp == None: print 'No gauges defined, using outlets instead' gauges = pcr.ordinal( pcr.uniqueid( pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1))))) pcr.report(gauges, resultdir + gauges_map) # ds = ogr.Open(gaugeshp) # file_att = os.path.splitext(os.path.basename(gaugeshp))[0] # lyr = ds.GetLayerByName(file_att) # spatialref = lyr.GetSpatialRef() ## if not spatialref == None: ## srs = osr.SpatialReference() ## srs.ImportFromWkt(spatialref.ExportToWkt()) ## srs.AutoIdentifyEPSG() ## gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1) ## spatialref == None # #else: # gaugeshp_EPSG = clone_EPSG # print 'No projection defined for ' + file_att + '.shp' # print 'Assumed to be the same as model projection (' + clone_EPSG + ')' # # # reproject gauge shape if necesarry # if not gaugeshp_EPSG == clone_EPSG: # gaugeprojshp = workdir + 'gaugeshape_proj.shp' # call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp)) # gaugeshp = gaugeprojshp # # file_att = os.path.splitext(os.path.basename(gaugeshp))[0] # gaugestif = workdir + file_att + '.tif' # gaugesmap = workdir + file_att + '.map' # call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif)) # call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif)) # call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap)) # gaugelocs = pcr.readmap(gaugesmap) # snapgaugestoriver = True # # if snapgaugestoriver: # print "Snapping gauges to river" # gauges = pcr.uniqueid(pcr.boolean(gaugelocs)) # gauges= wt.snaptomap(pcr.ordinal(gauges),river) # # gaugesmap = pcr.ifthen(gauges > 0, gauges) ''' report riverlengthfrac ''' riv_hr = workdir + 'river_highres.tif' wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs_clone, 0) file_att = os.path.splitext(os.path.basename(rivshp))[0] call(('gdal_rasterize', '-burn', '1', '-l', file_att, rivshp, riv_hr)) print 'Computing river length...' #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform) riverlength = wt.windowstats(riv_hr, clone_rows, clone_columns, clone_trans, srs_clone, resultdir, 'frac') ''' report outlet map ''' pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)), resultdir + outlet_map) ''' report map ''' catchment = pcr.ifthen(catchments > 0, pcr.ordinal(1)) pcr.report(catchment, resultdir + catchment_map) ''' report subcatchment map ''' subcatchment = pcr.subcatchment(ldd, gauges) pcr.report(pcr.ordinal(subcatchment), resultdir + subcatch_map) ''' report landuse map ''' if landuse == None: pcr.report(pcr.nominal(ones), resultdir + landuse_map) else: landuse_resample = workdir + 'landuse.tif' landuse_map = resultdir + landuse_map transform = wt.GetRasterTranform(landuse, srs_clone) if not transform[0]: call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-r', 'mode', landuse, landuse_resample)) else: call(('gdalwarp', '-overwrite', '-s_srs', transform[1], '-t_srs', clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-r', 'mode', landuse, landuse_resample)) call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32', landuse_resample, landuse_map)) landuse_work = pcr.readmap(landuse_map) pcr.report(pcr.nominal(landuse_work), landuse_map) ''' report soil map ''' if soiltype == None: pcr.report(pcr.nominal(ones), resultdir + soil_map) else: soiltype_resample = workdir + 'soiltype.tif' soil_map = resultdir + soil_map #transform = wt.GetRasterTranform(soiltype,srs_clone) # if not transform[0]: call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-r', 'mode', soiltype, soiltype_resample)) # else: # call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample)) call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32', soiltype_resample, soil_map)) soiltype_work = pcr.readmap(soil_map) pcr.report(pcr.nominal(soiltype_work), soil_map) if clean: wt.DeleteList(glob.glob(os.getcwd() + '\\' + resultdir + '/*.xml'))
class TimeoutputTimeseries(object): """ Class to create pcrcalc timeoutput style timeseries """ def __init__(self, tssFilename, model, idMap=None, noHeader=False): """ """ if not isinstance(tssFilename, str): raise Exception( "timeseries output filename must be of type string") self._outputFilename = tssFilename self._maxId = 1 self._spatialId = None self._spatialDatatype = None self._spatialIdGiven = False self._userModel = model self._writeHeader = not noHeader # array to store the timestep values self._sampleValues = None _idMap = False if isinstance(idMap, str) or isinstance(idMap, pcraster._pcraster.Field): _idMap = True nrRows = self._userModel.nrTimeSteps() - self._userModel.firstTimeStep( ) + 1 if _idMap: self._spatialId = idMap if isinstance(idMap, str): self._spatialId = pcraster.readmap(idMap) _allowdDataTypes = [ pcraster.Nominal, pcraster.Ordinal, pcraster.Boolean ] if self._spatialId.dataType() not in _allowdDataTypes: raise Exception( "idMap must be of type Nominal, Ordinal or Boolean") if self._spatialId.isSpatial(): self._maxId, valid = pcraster.cellvalue( pcraster.mapmaximum(pcraster.ordinal(self._spatialId)), 1) else: self._maxId = 1 # cell indices of the sample locations self._sampleAddresses = [] for cellId in range(1, self._maxId + 1): self._sampleAddresses.append(self._getIndex(cellId)) self._spatialIdGiven = True nrCols = self._maxId self._sampleValues = [[Decimal("NaN")] * nrCols for _ in [0] * nrRows] else: self._sampleValues = [[Decimal("NaN")] * 1 for _ in [0] * nrRows] def _getIndex(self, cellId): """ returns the cell index of a sample location """ nrCells = pcraster.clone().nrRows() * pcraster.clone().nrCols() found = False cell = 1 index = 0 while found == False: if pcraster.cellvalue(self._spatialId, cell)[1] == True and pcraster.cellvalue( self._spatialId, cell)[0] == cellId: index = cell found = True cell += 1 if cell > nrCells: raise RuntimeError( "could not find a cell with the index number %d" % (cellId)) return index def sample(self, expression): """ Sampling the current values of 'expression' at the given locations for the current timestep """ arrayRowPos = self._userModel.currentTimeStep( ) - self._userModel.firstTimeStep() #if isinstance(expression, float): # expression = pcraster.scalar(expression) try: # store the data type for tss file header if self._spatialDatatype == None: self._spatialDatatype = str(expression.dataType()) except AttributeError, e: datatype, sep, tail = str(e).partition(" ") msg = "Argument must be a PCRaster map, type %s given. If necessary use data conversion functions like scalar()" % ( datatype) raise AttributeError(msg) if self._spatialIdGiven: if expression.dataType() == pcraster.Scalar or expression.dataType( ) == pcraster.Directional: tmp = pcraster.areaaverage(pcraster.spatial(expression), pcraster.spatial(self._spatialId)) else: tmp = pcraster.areamajority(pcraster.spatial(expression), pcraster.spatial(self._spatialId)) col = 0 for cellIndex in self._sampleAddresses: value, valid = pcraster.cellvalue(tmp, cellIndex) if not valid: value = Decimal("NaN") self._sampleValues[arrayRowPos][col] = value col += 1 else: if expression.dataType() == pcraster.Scalar or expression.dataType( ) == pcraster.Directional: tmp = pcraster.maptotal(pcraster.spatial(expression))\ / pcraster.maptotal(pcraster.scalar(pcraster.defined(pcraster.spatial(expression)))) else: tmp = pcraster.mapmaximum(pcraster.maptotal(pcraster.areamajority(pcraster.spatial(expression),\ pcraster.spatial(pcraster.nominal(1))))) value, valid = pcraster.cellvalue(tmp, 1) if not valid: value = Decimal("NaN") self._sampleValues[arrayRowPos] = value if self._userModel.currentTimeStep() == self._userModel.nrTimeSteps(): self._writeTssFile()
def getParameterFiles( self, currTimeStep, cellArea, ldd, initial_condition_dictionary=None ): # parameters for Water Bodies: fracWat # waterBodyIds # waterBodyOut # waterBodyArea # waterBodyTyp # waterBodyCap # cell surface area (m2) and ldd self.cellArea = cellArea ldd = pcr.ifthen(self.landmask, ldd) # date used for accessing/extracting water body information date_used = currTimeStep.fulldate year_used = currTimeStep.year if self.onlyNaturalWaterBodies == True: date_used = self.dateForNaturalCondition year_used = self.dateForNaturalCondition[0:4] # fracWat = fraction of surface water bodies (dimensionless) self.fracWat = pcr.scalar(0.0) if self.useNetCDF: self.fracWat = vos.netcdf2PCRobjClone( self.ncFileInp, "fracWaterInp", date_used, useDoy="yearly", cloneMapFileName=self.cloneMap, ) else: self.fracWat = vos.readPCRmapClone( self.fracWaterInp + str(year_used) + ".map", self.cloneMap, self.tmpDir, self.inputDir, ) self.fracWat = pcr.cover(self.fracWat, 0.0) self.fracWat = pcr.max(0.0, self.fracWat) self.fracWat = pcr.min(1.0, self.fracWat) self.waterBodyIds = pcr.nominal(0) # waterBody ids self.waterBodyOut = pcr.boolean(0) # waterBody outlets self.waterBodyArea = pcr.scalar(0.0) # waterBody surface areas # water body ids if self.useNetCDF: self.waterBodyIds = vos.netcdf2PCRobjClone( self.ncFileInp, "waterBodyIds", date_used, useDoy="yearly", cloneMapFileName=self.cloneMap, ) else: self.waterBodyIds = vos.readPCRmapClone( self.waterBodyIdsInp + str(year_used) + ".map", self.cloneMap, self.tmpDir, self.inputDir, False, None, True, ) # self.waterBodyIds = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0.0, pcr.nominal(self.waterBodyIds) ) # water body outlets (correcting outlet positions) wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd) self.waterBodyOut = pcr.ifthen( wbCatchment == pcr.areamaximum(wbCatchment, self.waterBodyIds), self.waterBodyIds, ) # = outlet ids self.waterBodyOut = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0.0, self.waterBodyOut ) # TODO: Please also consider endorheic lakes! # correcting water body ids self.waterBodyIds = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0.0, pcr.subcatchment(ldd, self.waterBodyOut), ) # boolean map for water body outlets: self.waterBodyOut = pcr.ifthen( pcr.scalar(self.waterBodyOut) > 0.0, pcr.boolean(1) ) # reservoir surface area (m2): if self.useNetCDF: resSfArea = ( 1000.0 * 1000.0 * vos.netcdf2PCRobjClone( self.ncFileInp, "resSfAreaInp", date_used, useDoy="yearly", cloneMapFileName=self.cloneMap, ) ) else: resSfArea = ( 1000.0 * 1000.0 * vos.readPCRmapClone( self.resSfAreaInp + str(year_used) + ".map", self.cloneMap, self.tmpDir, self.inputDir, ) ) resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds) resSfArea = pcr.cover(resSfArea, 0.0) # water body surface area (m2): (lakes and reservoirs) self.waterBodyArea = pcr.max( pcr.areatotal( pcr.cover(self.fracWat * self.cellArea, 0.0), self.waterBodyIds ), pcr.areaaverage(pcr.cover(resSfArea, 0.0), self.waterBodyIds), ) self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.0, self.waterBodyArea) # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0) self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0.0, self.waterBodyIds) self.waterBodyOut = pcr.ifthen( pcr.boolean(self.waterBodyIds), self.waterBodyOut ) # water body types: # - 2 = reservoirs (regulated discharge) # - 1 = lakes (weirFormula) # - 0 = non lakes or reservoirs (e.g. wetland) self.waterBodyTyp = pcr.nominal(0) if self.useNetCDF: self.waterBodyTyp = vos.netcdf2PCRobjClone( self.ncFileInp, "waterBodyTyp", date_used, useDoy="yearly", cloneMapFileName=self.cloneMap, ) else: self.waterBodyTyp = vos.readPCRmapClone( self.waterBodyTypInp + str(year_used) + ".map", self.cloneMap, self.tmpDir, self.inputDir, False, None, True, ) # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs # self.waterBodyTyp = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0, pcr.nominal(self.waterBodyTyp) ) self.waterBodyTyp = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0, pcr.nominal(self.waterBodyTyp) ) self.waterBodyTyp = pcr.areamajority( self.waterBodyTyp, self.waterBodyIds ) # choose only one type: either lake or reservoir self.waterBodyTyp = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0, pcr.nominal(self.waterBodyTyp) ) self.waterBodyTyp = pcr.ifthen( pcr.boolean(self.waterBodyIds), self.waterBodyTyp ) # correcting lakes and reservoirs ids and outlets self.waterBodyIds = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds ) self.waterBodyOut = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut ) # reservoir maximum capacity (m3): self.resMaxCap = pcr.scalar(0.0) self.waterBodyCap = pcr.scalar(0.0) if self.useNetCDF: self.resMaxCap = ( 1000.0 * 1000.0 * vos.netcdf2PCRobjClone( self.ncFileInp, "resMaxCapInp", date_used, useDoy="yearly", cloneMapFileName=self.cloneMap, ) ) else: self.resMaxCap = ( 1000.0 * 1000.0 * vos.readPCRmapClone( self.resMaxCapInp + str(year_used) + ".map", self.cloneMap, self.tmpDir, self.inputDir, ) ) self.resMaxCap = pcr.ifthen(self.resMaxCap > 0, self.resMaxCap) self.resMaxCap = pcr.areaaverage(self.resMaxCap, self.waterBodyIds) # water body capacity (m3): (lakes and reservoirs) self.waterBodyCap = pcr.cover( self.resMaxCap, 0.0 ) # Note: Most of lakes have capacities > 0. self.waterBodyCap = pcr.ifthen( pcr.boolean(self.waterBodyIds), self.waterBodyCap ) # correcting water body types: # Reservoirs that have zero capacities will be assumed as lakes. self.waterBodyTyp = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0.0, self.waterBodyTyp ) self.waterBodyTyp = pcr.ifthenelse( self.waterBodyCap > 0.0, self.waterBodyTyp, pcr.ifthenelse( pcr.scalar(self.waterBodyTyp) == 2, pcr.nominal(1), self.waterBodyTyp ), ) # final corrections: self.waterBodyTyp = pcr.ifthen( self.waterBodyArea > 0.0, self.waterBodyTyp ) # make sure that all lakes and/or reservoirs have surface areas self.waterBodyTyp = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0.0, self.waterBodyTyp ) # make sure that only types 1 and 2 will be considered in lake/reservoir functions self.waterBodyIds = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0.0, self.waterBodyIds ) # make sure that all lakes and/or reservoirs have ids self.waterBodyOut = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0.0, self.waterBodyOut ) # make sure that all lakes and/or reservoirs have outlets # for a natural run (self.onlyNaturalWaterBodies == True) # which uses only the year 1900, assume all reservoirs are lakes if ( self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition ): logger.info( "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes." ) self.waterBodyTyp = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0.0, pcr.nominal(1) ) # check that all lakes and/or reservoirs have types, ids, surface areas and outlets: test = ( pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) & pcr.defined(self.waterBodyIds) & pcr.boolean( pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds) ) ) a, b, c = vos.getMinMaxMean(pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0)) threshold = 1e-3 if abs(a) > threshold or abs(b) > threshold: logger.warning("Missing information in some lakes and/or reservoirs.") # at the beginning of simulation period (timeStepPCR = 1) # - we have to define/get the initial conditions # if currTimeStep.timeStepPCR == 1: self.getICs(initial_condition_dictionary) # For each new reservoir (introduced at the beginning of the year) # initiating storage, average inflow and outflow # self.waterBodyStorage = pcr.cover(self.waterBodyStorage, 0.0) self.avgInflow = pcr.cover(self.avgInflow, 0.0) self.avgOutflow = pcr.cover(self.avgOutflow, 0.0) # cropping only in the landmask region: self.fracWat = pcr.ifthen(self.landmask, self.fracWat) self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds) self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut) self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea) self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp) self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap) self.waterBodyStorage = pcr.ifthen(self.landmask, self.waterBodyStorage) self.avgInflow = pcr.ifthen(self.landmask, self.avgInflow) self.avgOutflow = pcr.ifthen(self.landmask, self.avgOutflow)
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_30min_final.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_final.map") # ~ pcr.aguila(ldd_map) # identify small islands print("identify small islands") # - maps of islands smaller than 10000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 10000., island_map) UNTIL_THIS # identify the biggest island for every group of small islands within the windows 10x10 arcdeg cells # - sort from the largest catchment catchment_pits_boolean = pcr.ifthen( pcr.scalar(pcr.pit(ldd_map)) > 0.0, pcr.boolean(1.0)) catchment_pits_boolean = pcr.ifthen(catchment_pits_boolean, catchment_pits_boolean) pcr.aguila(catchment_pits_boolean) catchment_map = pcr.nominal( pcr.areaorder(catchment_size * -1.0, pcr.nominal(catchment_pits_boolean))) island_map = pcr.ifthen(island_size == pcr.windowmaximum(island_size, 10.), island_map) pcr.aguila(island_map) # identify big catchments # merge biggest islands and big catchments # make catchment and island map print("make catchment and island map") # - catchment map catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) pcr.report(catchment_map, "global_catchment_not_sorted.map") os.system("mapattr -p global_catchment_not_sorted.map") num_of_catchments = int(vos.getMinMaxMean(pcr.scalar(catchment_map))[1]) # - maps of islands smaller than 10000 cells island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 10000., island_map) island_map = pcr.nominal( pcr.scalar(pcr.ifthen(landmask, pcr.clump(island_map))) + pcr.scalar(num_of_catchments) * 100.) pcr.aguila(island_map) island_size = pcr.ifthen(pcr.defined(island_map), island_size) island_map = pcr.ifthen(island_size == pcr.windowmaximum(island_size, 10.), island_map) pcr.aguila(island_map) catchment_map = pcr.cover(island_map, catchment_map) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # - calculate the size catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # - sort from the largest catchment catchment_pits_boolean = pcr.ifthen( pcr.scalar(pcr.pit(ldd_map)) > 0.0, pcr.boolean(1.0)) catchment_pits_boolean = pcr.ifthen(catchment_pits_boolean, catchment_pits_boolean) pcr.aguila(catchment_pits_boolean) catchment_map = pcr.nominal( pcr.areaorder(catchment_size * -1.0, pcr.nominal(catchment_pits_boolean))) catchment_map = pcr.catchment(ldd_map, catchment_map) pcr.report(catchment_map, "global_catchment_final.map") os.system("mapattr -p global_catchment_final.map") # - calculate the size catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) pcr.report(catchment_size, "global_catchment_size_in_number_of_cells.map") # number of catchments num_of_catchments = int(vos.getMinMaxMean(pcr.scalar(catchment_map))[1]) # size of the largest catchment catchment_size_max = vos.getMinMaxMean(catchment_size)[1] print("") print(str(float(catchment_size_max))) print("") # identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 print("identify catchments with the minimum size of 50 cells") catchment_map_ge_50 = pcr.ifthen(catchment_size >= 50, catchment_map) pcr.report(catchment_map_ge_50, "global_catchment_ge_50_cells.map") # include the island catchment_map_ge_50 = pcr.cover(island_map, catchment_map_ge_50) # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments cmd = "gdal_translate -of NETCDF global_catchment_ge_50_cells.map global_catchment_ge_50_cells.nc" print(cmd) os.system(cmd) cmd = "cdo fillmiss2 global_catchment_ge_50_cells.nc global_catchment_ge_50_cells_filled.nc" print(cmd) os.system(cmd) cmd = "cdo fillmiss2 global_catchment_ge_50_cells_filled.nc global_catchment_ge_50_cells_filled.nc" print(cmd) os.system(cmd) cmd = "gdal_translate -of PCRaster global_catchment_ge_50_cells_filled.nc global_catchment_ge_50_cells_filled.map" print(cmd) os.system(cmd) cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "global_catchment_ge_50_cells_filled.map" print(cmd) os.system(cmd) # - initial subdomains subdomains_initial = pcr.nominal( pcr.readmap("global_catchment_ge_50_cells_filled.map")) subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) # - initial subdomains clump subdomains_initial_clump = pcr.clump(subdomains_initial) pcr.aguila(subdomains_initial_clump) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) # - remove temporay files (not used) cmd = "rm global_catchment_ge_50_cells_filled*" print(cmd) os.system(cmd) # clone code that will be assigned assigned_number = 0