def getParameterFiles(self,currTimeStep,cellArea,ldd,\ initial_condition_dictionary = None,\ currTimeStepInDateTimeFormat = False): # parameters for Water Bodies: fracWat # waterBodyIds # waterBodyOut # waterBodyArea # waterBodyTyp # waterBodyCap # cell surface area (m2) and ldd self.cellArea = cellArea ldd = pcr.ifthen(self.landmask, ldd) # date used for accessing/extracting water body information if currTimeStepInDateTimeFormat: date_used = currTimeStep year_used = currTimeStep.year else: date_used = currTimeStep.fulldate year_used = currTimeStep.year if self.onlyNaturalWaterBodies == True: date_used = self.dateForNaturalCondition year_used = self.dateForNaturalCondition[0:4] # fracWat = fraction of surface water bodies (dimensionless) self.fracWat = pcr.scalar(0.0) if self.useNetCDF: self.fracWat = vos.netcdf2PCRobjClone(self.ncFileInp,'fracWaterInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.fracWat = vos.readPCRmapClone(\ self.fracWaterInp+str(year_used)+".map", self.cloneMap,self.tmpDir,self.inputDir) self.fracWat = pcr.cover(self.fracWat, 0.0) self.fracWat = pcr.max(0.0, self.fracWat) self.fracWat = pcr.min(1.0, self.fracWat) self.waterBodyIds = pcr.nominal(0) # waterBody ids self.waterBodyOut = pcr.boolean(0) # waterBody outlets self.waterBodyArea = pcr.scalar(0.) # waterBody surface areas # water body ids if self.useNetCDF: self.waterBodyIds = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyIds', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.waterBodyIds = vos.readPCRmapClone(\ self.waterBodyIdsInp+str(year_used)+".map",\ self.cloneMap,self.tmpDir,self.inputDir,False,None,True) # self.waterBodyIds = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ pcr.nominal(self.waterBodyIds)) # water body outlets (correcting outlet positions) wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd) self.waterBodyOut = pcr.ifthen(wbCatchment ==\ pcr.areamaximum(wbCatchment, \ self.waterBodyIds),\ self.waterBodyIds) # = outlet ids # This may give more than two outlets, particularly if there are more than one cells that have largest upstream areas # - make sure that there is only one outlet for each water body self.waterBodyOut = pcr.ifthen(\ pcr.areaorder(pcr.scalar(self.waterBodyOut), \ self.waterBodyOut) == 1., self.waterBodyOut) self.waterBodyOut = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ self.waterBodyOut) # TODO: Please also consider endorheic lakes! # correcting water body ids self.waterBodyIds = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0.,\ pcr.subcatchment(ldd,self.waterBodyOut)) # boolean map for water body outlets: self.waterBodyOut = pcr.ifthen(\ pcr.scalar(self.waterBodyOut) > 0.,\ pcr.boolean(1)) # reservoir surface area (m2): if self.useNetCDF: resSfArea = 1000. * 1000. * \ vos.netcdf2PCRobjClone(self.ncFileInp,'resSfAreaInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: resSfArea = 1000. * 1000. * vos.readPCRmapClone( self.resSfAreaInp+str(year_used)+".map",\ self.cloneMap,self.tmpDir,self.inputDir) resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds) resSfArea = pcr.cover(resSfArea, 0.) # water body surface area (m2): (lakes and reservoirs) self.waterBodyArea = pcr.max(pcr.areatotal(\ pcr.cover(\ self.fracWat*self.cellArea, 0.0), self.waterBodyIds), pcr.areaaverage(\ pcr.cover(resSfArea, 0.0) , self.waterBodyIds)) self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.,\ self.waterBodyArea) # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0) self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0., self.waterBodyIds) self.waterBodyOut = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyOut) # water body types: # - 2 = reservoirs (regulated discharge) # - 1 = lakes (weirFormula) # - 0 = non lakes or reservoirs (e.g. wetland) self.waterBodyTyp = pcr.nominal(0) if self.useNetCDF: self.waterBodyTyp = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyTyp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.waterBodyTyp = vos.readPCRmapClone( self.waterBodyTypInp+str(year_used)+".map",\ self.cloneMap,self.tmpDir,self.inputDir,False,None,True) # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs # self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyTyp) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyIds) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.areamajority(self.waterBodyTyp,\ self.waterBodyIds) # choose only one type: either lake or reservoir self.waterBodyTyp = pcr.ifthen(\ pcr.scalar(self.waterBodyTyp) > 0,\ pcr.nominal(self.waterBodyTyp)) self.waterBodyTyp = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyTyp) # correcting lakes and reservoirs ids and outlets self.waterBodyIds = pcr.ifthen( pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds) self.waterBodyOut = pcr.ifthen( pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut) # reservoir maximum capacity (m3): self.resMaxCap = pcr.scalar(0.0) self.waterBodyCap = pcr.scalar(0.0) if self.useNetCDF: self.resMaxCap = 1000. * 1000. * \ vos.netcdf2PCRobjClone(self.ncFileInp,'resMaxCapInp', \ date_used, useDoy = 'yearly',\ cloneMapFileName = self.cloneMap) else: self.resMaxCap = 1000. * 1000. * vos.readPCRmapClone(\ self.resMaxCapInp+str(year_used)+".map", \ self.cloneMap,self.tmpDir,self.inputDir) self.resMaxCap = pcr.ifthen(self.resMaxCap > 0,\ self.resMaxCap) self.resMaxCap = pcr.areaaverage(self.resMaxCap,\ self.waterBodyIds) # water body capacity (m3): (lakes and reservoirs) self.waterBodyCap = pcr.cover( self.resMaxCap, 0.0) # Note: Most of lakes have capacities > 0. self.waterBodyCap = pcr.ifthen(pcr.boolean(self.waterBodyIds), self.waterBodyCap) # correcting water body types: # Reservoirs that have zero capacities will be assumed as lakes. self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyTyp) self.waterBodyTyp = pcr.ifthenelse(self.waterBodyCap > 0.,\ self.waterBodyTyp,\ pcr.ifthenelse(pcr.scalar(self.waterBodyTyp) == 2,\ pcr.nominal(1),\ self.waterBodyTyp)) # final corrections: self.waterBodyTyp = pcr.ifthen(self.waterBodyArea > 0.,\ self.waterBodyTyp) # make sure that all lakes and/or reservoirs have surface areas self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyTyp) # make sure that only types 1 and 2 will be considered in lake/reservoir functions self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ self.waterBodyIds) # make sure that all lakes and/or reservoirs have ids self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.,\ self.waterBodyOut) # make sure that all lakes and/or reservoirs have outlets # for a natural run (self.onlyNaturalWaterBodies == True) # which uses only the year 1900, assume all reservoirs are lakes if self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition: logger.info( "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes." ) self.waterBodyTyp = \ pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\ pcr.nominal(1)) # check that all lakes and/or reservoirs have types, ids, surface areas and outlets: test = pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) &\ pcr.defined(self.waterBodyIds) & pcr.boolean(pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds)) a, b, c = vos.getMinMaxMean( pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0)) threshold = 1e-3 if abs(a) > threshold or abs(b) > threshold: logger.warning( "Missing information in some lakes and/or reservoirs.") # at the beginning of simulation period (timeStepPCR = 1) # - we have to define/get the initial conditions # if initial_condition_dictionary != None and currTimeStep.timeStepPCR == 1: self.getICs(initial_condition_dictionary) # For each new reservoir (introduced at the beginning of the year) # initiating storage, average inflow and outflow # PS: THIS IS NOT NEEDED FOR OFFLINE MODFLOW RUN! # try: self.waterBodyStorage = pcr.cover(self.waterBodyStorage, 0.0) self.avgInflow = pcr.cover(self.avgInflow, 0.0) self.avgOutflow = pcr.cover(self.avgOutflow, 0.0) self.waterBodyStorage = pcr.ifthen(self.landmask, self.waterBodyStorage) self.avgInflow = pcr.ifthen(self.landmask, self.avgInflow) self.avgOutflow = pcr.ifthen(self.landmask, self.avgOutflow) except: # PS: FOR OFFLINE MODFLOW RUN! pass # TODO: Remove try and except # cropping only in the landmask region: self.fracWat = pcr.ifthen(self.landmask, self.fracWat) self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds) self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut) self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea) self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp) self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap)
def main(): """ :ivar masterdem: digital elevation model :ivar dem: digital elevation model :ivar river: optional river map """ # Default values strRiver = 8 masterdem = "dem.map" step1dir = "step1" step2dir = "step2" workdir = "." inifile = "wflow_prepare.ini" recreate = False snapgaugestoriver = False try: opts, args = getopt.getopt(sys.argv[1:], "W:hI:f") except getopt.error as msg: usage(msg) for o, a in opts: if o == "-W": workdir = a if o == "-I": inifile = a if o == "-h": usage() if o == "-f": recreate = True pcr.setglobaloption("unitcell") os.chdir(workdir) config = OpenConf(workdir + "/" + inifile) masterdem = configget(config, "files", "masterdem", "dem.map") pcr.setclone(masterdem) strRiver = int(configget(config, "settings", "riverorder", "4")) try: gauges_x = config.get("settings", "gauges_x") gauges_y = config.get("settings", "gauges_y") except: print("gauges_x and gauges_y are required entries in the ini file") sys.exit(1) step1dir = configget(config, "directories", "step1dir", "step1") step2dir = configget(config, "directories", "step2dir", "step2") # upscalefactor = float(config.get("settings","upscalefactor")) corevolume = float(configget(config, "settings", "corevolume", "1E35")) catchmentprecipitation = float( configget(config, "settings", "catchmentprecipitation", "1E35")) corearea = float(configget(config, "settings", "corearea", "1E35")) outflowdepth = float( configget(config, "settings", "lddoutflowdepth", "1E35")) initialscale = int(configget(config, "settings", "initialscale", "1")) csize = float(configget(config, "settings", "cellsize", "1")) snapgaugestoriver = bool( int(configget(config, "settings", "snapgaugestoriver", "1"))) lddglobaloption = configget(config, "settings", "lddglobaloption", "lddout") pcr.setglobaloption(lddglobaloption) lu_water = configget(config, "files", "lu_water", "") lu_paved = configget(config, "files", "lu_paved", "") # X/Y coordinates of the gauges the system exec("X=tr.array(" + gauges_x + ")") exec("Y=tr.array(" + gauges_y + ")") tr.Verbose = 1 # make the directories to save results in mkoutputdirs(step1dir, step2dir) ldddem = readdem(initialscale, masterdem, step1dir) dem = ldddem try: catchmask = config.get("files", "catchment_mask") except: print("No catchment mask...") else: print("clipping DEM with mask.....") mask = pcr.readmap(catchmask) ldddem = pcr.ifthen(pcr.boolean(mask), ldddem) dem = pcr.ifthen(pcr.boolean(mask), dem) # See if there is a shape file of the river to burn in try: rivshp = config.get("files", "river") except: print("no river file specified") outletpointX = float( configget(config, "settings", "outflowpointX", "0.0")) outletpointY = float( configget(config, "settings", "outflowpointY", "0.0")) else: print("river file specified.....") try: outletpointX = float( configget(config, "settings", "outflowpointX", "0.0")) outletpointY = float( configget(config, "settings", "outflowpointY", "0.0")) except: print( "Need to specify the river outletpoint (a point at the end of the river within the current map)" ) exit(1) outletpointmap = tr.points_to_map(dem, outletpointX, outletpointY, 0.5) pcr.report(outletpointmap, step1dir + "/outletpoint.map") rivshpattr = config.get("files", "riverattr") pcr.report(dem * 0.0, step1dir + "/nilmap.map") thestr = ("gdal_translate -of GTiff " + step1dir + "/nilmap.map " + step1dir + "/riverburn.tif") os.system(thestr) os.system("gdal_rasterize -burn 1 -l " + rivshpattr + " " + rivshp + " " + step1dir + "/riverburn.tif") thestr = ("gdal_translate -of PCRaster " + step1dir + "/riverburn.tif " + step1dir + "/riverburn.map") os.system(thestr) riverburn = pcr.readmap(step1dir + "/riverburn.map") # Determine regional slope assuming that is the way the river should run pcr.setglobaloption("unitcell") demregional = pcr.windowaverage(dem, 100) ldddem = pcr.ifthenelse(riverburn >= 1.0, demregional - 1000, dem) pcr.setglobaloption("unittrue") upscalefactor = int(csize / pcr.celllength()) print("Creating ldd...") ldd = tr.lddcreate_save( step1dir + "/ldd.map", ldddem, recreate, outflowdepth=outflowdepth, corevolume=corevolume, catchmentprecipitation=catchmentprecipitation, corearea=corearea, ) print("Determining streamorder...") stro = pcr.streamorder(ldd) pcr.report(stro, step1dir + "/streamorder.map") strdir = pcr.ifthen(stro >= strRiver, stro) pcr.report(strdir, step1dir + "/streamorderrive.map") pcr.report(pcr.boolean(pcr.ifthen(stro >= strRiver, stro)), step1dir + "/rivers.map") pcr.setglobaloption("unittrue") # outlet (and other gauges if given) # TODO: check is x/y set if not skip this print("Outlet...") outlmap = tr.points_to_map(dem, X, Y, 0.5) if snapgaugestoriver: print("Snapping gauges to nearest river cells...") pcr.report(outlmap, step1dir + "/orggauges.map") outlmap = tr.snaptomap(outlmap, strdir) # noutletmap = tr.points_to_map(dem,XX,YY,0.5) # pcr.report(noutletmap,'noutlet.map') pcr.report(outlmap, step1dir + "/gauges.map") # check if there is a pre-define catchment map try: catchmask = config.get("files", "catchment_mask") except: print("No catchment mask, finding outlet") # Find catchment (overall) outlet = tr.find_outlet(ldd) sub = pcr.subcatch(ldd, outlet) pcr.report(sub, step1dir + "/catchment_overall.map") else: print("reading and converting catchment mask.....") os.system("resample -r " + str(initialscale) + " " + catchmask + " " + step1dir + "/catchment_overall.map") sub = pcr.readmap(step1dir + "/catchment_overall.map") print("Scatch...") sd = pcr.subcatch(ldd, pcr.ifthen(outlmap > 0, outlmap)) pcr.report(sd, step1dir + "/scatch.map") pcr.setglobaloption("unitcell") print("Upscalefactor: " + str(upscalefactor)) if upscalefactor > 1: gc.collect() print("upscale river length1 (checkerboard map)...") ck = tr.checkerboard(dem, upscalefactor) pcr.report(ck, step1dir + "/ck.map") pcr.report(dem, step1dir + "/demck.map") print("upscale river length2...") fact = tr.area_riverlength_factor(ldd, ck, upscalefactor) pcr.report(fact, step1dir + "/riverlength_fact.map") # print("make dem statistics...") dem_ = pcr.areaaverage(dem, ck) pcr.report(dem_, step1dir + "/demavg.map") print("Create DEM statistics...") dem_ = pcr.areaminimum(dem, ck) pcr.report(dem_, step1dir + "/demmin.map") dem_ = pcr.areamaximum(dem, ck) pcr.report(dem_, step1dir + "/demmax.map") # calculate percentiles order = pcr.areaorder(dem, ck) n = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), ck) #: calculate 25 percentile perc = tr.area_percentile(dem, ck, n, order, 25.0) pcr.report(perc, step1dir + "/dem25.map") perc = tr.area_percentile(dem, ck, n, order, 10.0) pcr.report(perc, step1dir + "/dem10.map") perc = tr.area_percentile(dem, ck, n, order, 50.0) pcr.report(perc, step1dir + "/dem50.map") perc = tr.area_percentile(dem, ck, n, order, 33.0) pcr.report(perc, step1dir + "/dem33.map") perc = tr.area_percentile(dem, ck, n, order, 66.0) pcr.report(perc, step1dir + "/dem66.map") perc = tr.area_percentile(dem, ck, n, order, 75.0) pcr.report(perc, step1dir + "/dem75.map") perc = tr.area_percentile(dem, ck, n, order, 90.0) pcr.report(perc, step1dir + "/dem90.map") else: print("No fancy scaling done. Going strait to step2....") pcr.report(dem, step1dir + "/demavg.map") Xul = float(config.get("settings", "Xul")) Yul = float(config.get("settings", "Yul")) Xlr = float(config.get("settings", "Xlr")) Ylr = float(config.get("settings", "Ylr")) gdalstr = ("gdal_translate -projwin " + str(Xul) + " " + str(Yul) + " " + str(Xlr) + " " + str(Ylr) + " -of PCRaster ") # gdalstr = "gdal_translate -a_ullr " + str(Xul) + " " + str(Yul) + " " +str(Xlr) + " " +str(Ylr) + " -of PCRaster " print(gdalstr) pcr.report(pcr.cover(1.0), step1dir + "/wflow_riverlength_fact.map") # Now us gdat tp convert the maps os.system(gdalstr + step1dir + "/wflow_riverlength_fact.map" + " " + step2dir + "/wflow_riverlength_fact.map") os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_dem.map") os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmin.map") os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmax.map") os.system(gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_gauges.map") os.system(gdalstr + step1dir + "/rivers.map" + " " + step2dir + "/wflow_river.map") os.system(gdalstr + step1dir + "/streamorder.map" + " " + step2dir + "/wflow_streamorder.map") os.system(gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_outlet.map") os.system(gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_catchment.map") os.system(gdalstr + step1dir + "/ldd.map" + " " + step2dir + "/wflow_ldd.map") os.system(gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_subcatch.map") if lu_water: os.system(gdalstr + lu_water + " " + step2dir + "/WaterFrac.map") if lu_paved: os.system(gdalstr + lu_paved + " " + step2dir + "/PathFrac.map") try: lumap = config.get("files", "landuse") except: print("no landuse map...creating uniform map") # clone=pcr.readmap(step2dir + "/wflow_dem.map") pcr.setclone(step2dir + "/wflow_dem.map") pcr.report(pcr.nominal(1), step2dir + "/wflow_landuse.map") else: os.system("resample --clone " + step2dir + "/wflow_dem.map " + lumap + " " + step2dir + "/wflow_landuse.map") try: soilmap = config.get("files", "soil") except: print("no soil map..., creating uniform map") pcr.setclone(step2dir + "/wflow_dem.map") pcr.report(pcr.nominal(1), step2dir + "/wflow_soil.map") else: os.system("resample --clone " + step2dir + "/wflow_dem.map " + soilmap + " " + step2dir + "/wflow_soil.map") ################################## # Step 2 starts here ################################## pcr.setclone(step2dir + "/cutout.map") strRiver = int(configget(config, "settings", "riverorder_step2", "4")) corevolume = float(configget(config, "settings", "corevolume", "1E35")) catchmentprecipitation = float( configget(config, "settings", "catchmentprecipitation", "1E35")) corearea = float(configget(config, "settings", "corearea", "1E35")) outflowdepth = float( configget(config, "settings", "lddoutflowdepth", "1E35")) lddmethod = configget(config, "settings", "lddmethod", "dem") lddglobaloption = configget(config, "settings", "lddglobaloption", "lddout") pcr.setglobaloption(lddglobaloption) nrrow = round(abs(Yul - Ylr) / csize) nrcol = round(abs(Xlr - Xul) / csize) mapstr = ("mapattr -s -S -R " + str(nrrow) + " -C " + str(nrcol) + " -l " + str(csize) + " -x " + str(Xul) + " -y " + str(Yul) + " -P yb2t " + step2dir + "/cutout.map") os.system(mapstr) pcr.setclone(step2dir + "/cutout.map") lu_water = configget(config, "files", "lu_water", "") lu_paved = configget(config, "files", "lu_paved", "") if lu_water: os.system("resample --clone " + step2dir + "/cutout.map " + lu_water + " " + step2dir + "/wflow_waterfrac.map") if lu_paved: os.system("resample --clone " + step2dir + "/cutout.map " + lu_paved + " " + step2dir + "/PathFrac.map") # try: lumap = config.get("files", "landuse") except: print("no landuse map...creating uniform map") clone = pcr.readmap(step2dir + "/cutout.map") pcr.report(pcr.nominal(clone), step2dir + "/wflow_landuse.map") else: os.system("resample --clone " + step2dir + "/cutout.map " + lumap + " " + step2dir + "/wflow_landuse.map") try: soilmap = config.get("files", "soil") except: print("no soil map..., creating uniform map") clone = pcr.readmap(step2dir + "/cutout.map") pcr.report(pcr.nominal(clone), step2dir + "/wflow_soil.map") else: os.system("resample --clone " + step2dir + "/cutout.map " + soilmap + " " + step2dir + "/wflow_soil.map") resamplemaps(step1dir, step2dir) dem = pcr.readmap(step2dir + "/wflow_dem.map") demmin = pcr.readmap(step2dir + "/wflow_demmin.map") demmax = pcr.readmap(step2dir + "/wflow_demmax.map") catchcut = pcr.readmap(step2dir + "/catchment_cut.map") # now apply the area of interest (catchcut) to the DEM # dem=pcr.ifthen(catchcut >=1 , dem) # # See if there is a shape file of the river to burn in try: rivshp = config.get("files", "river") except: print("no river file specified") riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map") else: print("river file speficied.....") rivshpattr = config.get("files", "riverattr") pcr.report(dem * 0.0, step2dir + "/nilmap.map") thestr = ("gdal_translate -of GTiff " + step2dir + "/nilmap.map " + step2dir + "/wflow_riverburnin.tif") os.system(thestr) os.system("gdal_rasterize -burn 1 -l " + rivshpattr + " " + rivshp + " " + step2dir + "/wflow_riverburnin.tif") thestr = ("gdal_translate -of PCRaster " + step2dir + "/wflow_riverburnin.tif " + step2dir + "/wflow_riverburnin.map") os.system(thestr) riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map") # ldddem = pcr.ifthenelse(riverburn >= 1.0, dem -1000 , dem) # Only burn within the original catchment riverburn = pcr.ifthen(pcr.scalar(catchcut) >= 1, riverburn) # Now setup a very high wall around the catchment that is scale # based on the distance to the catchment so that it slopes away from the # catchment if lddmethod != "river": print("Burning in highres-river ...") disttocatch = pcr.spread(pcr.nominal(catchcut), 0.0, 1.0) demmax = pcr.ifthenelse( pcr.scalar(catchcut) >= 1.0, demmax, demmax + (pcr.celllength() * 100.0) / disttocatch, ) pcr.setglobaloption("unitcell") demregional = pcr.windowaverage(demmin, 100) demburn = pcr.cover( pcr.ifthen(pcr.boolean(riverburn), demregional - 100.0), demmax) else: print("using average dem..") demburn = dem ldd = tr.lddcreate_save( step2dir + "/ldd.map", demburn, True, outflowdepth=outflowdepth, corevolume=corevolume, catchmentprecipitation=catchmentprecipitation, corearea=corearea, ) # Find catchment (overall) outlet = tr.find_outlet(ldd) sub = pcr.subcatch(ldd, outlet) pcr.report(sub, step2dir + "/wflow_catchment.map") pcr.report(outlet, step2dir + "/wflow_outlet.map") # make river map strorder = pcr.streamorder(ldd) pcr.report(strorder, step2dir + "/wflow_streamorder.map") river = pcr.ifthen(pcr.boolean(strorder >= strRiver), strorder) pcr.report(river, step2dir + "/wflow_river.map") # make subcatchments # os.system("col2map --clone " + step2dir + "/cutout.map gauges.col " + step2dir + "/wflow_gauges.map") exec("X=tr.array(" + gauges_x + ")") exec("Y=tr.array(" + gauges_y + ")") pcr.setglobaloption("unittrue") outlmap = tr.points_to_map(dem, X, Y, 0.5) pcr.report(outlmap, step2dir + "/wflow_gauges_.map") if snapgaugestoriver: print("Snapping gauges to river") pcr.report(outlmap, step2dir + "/wflow_orggauges.map") outlmap = tr.snaptomap(outlmap, river) outlmap = pcr.ifthen(outlmap > 0, outlmap) pcr.report(outlmap, step2dir + "/wflow_gauges.map") scatch = pcr.subcatch(ldd, outlmap) pcr.report(scatch, step2dir + "/wflow_subcatch.map")
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only.map") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only.map") # - based on the 06min input landmask_06min = define_landmask(input_file = global_landmask_06min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_06min_only.map") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only.map") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only.map") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_06min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_extended_30min.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_extended_30min.map") # ~ pcr.aguila(ldd_map) # catchment map and size catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ pcr.aguila(catchment_size) # identify small islands print("identify small islands") # - maps of islands smaller than 15000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 15000., island_map) # ~ # - use catchments (instead of islands) # ~ island_map = catchment_map # ~ island_size = catchment_size # ~ island_map = pcr.ifthen(island_size < 10000., island_map) # - sort from the largest island # -- take one cell per island as a representative island_map_rep_size = pcr.ifthen( pcr.areaorder(island_size, island_map) == 1.0, island_size) # -- sort from the largest island island_map_rep_ids = pcr.areaorder( island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(island_map_rep_size), pcr.nominal(1.0))) # -- map of smaller islands, sorted from the largest one island_map = pcr.areamajority(pcr.nominal(island_map_rep_ids), island_map) # identify the biggest island for every group of small islands within a certain window (arcdeg cells) print("the biggest island for every group of small islands") large_island_map = pcr.ifthen( pcr.scalar(island_map) == pcr.windowminimum(pcr.scalar(island_map), 15.), island_map) # ~ pcr.aguila(large_island_map) # identify big catchments print("identify large catchments") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ # - identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 # ~ large_catchment_map = pcr.ifthen(catchment_size >= 50, catchment_map) # ~ # - identify all large catchments with size >= 10 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 10, catchment_map) # ~ # - identify all large catchments with size >= 5 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 5, catchment_map) # ~ # - identify all large catchments with size >= 20 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 20, catchment_map) # - identify all large catchments with size >= 25 cells (at the resolution of 30 arcmin) large_catchment_map = pcr.ifthen(catchment_size >= 25, catchment_map) # - give the codes that are different than islands large_catchment_map = pcr.nominal( pcr.scalar(large_catchment_map) + 10. * vos.getMinMaxMean(pcr.scalar(large_island_map))[1]) # merge biggest islands and big catchments print("merge large catchments and islands") large_catchment_and_island_map = pcr.cover(large_catchment_map, large_island_map) # ~ large_catchment_and_island_map = pcr.cover(large_island_map, large_catchment_map) large_catchment_and_island_map_size = pcr.areatotal( pcr.spatial(pcr.scalar(1.0)), large_catchment_and_island_map) # - sort from the largest one # -- take one cell per island as a representative large_catchment_and_island_map_rep_size = pcr.ifthen( pcr.areaorder(large_catchment_and_island_map_size, large_catchment_and_island_map) == 1.0, large_catchment_and_island_map_size) # -- sort from the largest large_catchment_and_island_map_rep_ids = pcr.areaorder( large_catchment_and_island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(large_catchment_and_island_map_rep_size), pcr.nominal(1.0))) # -- map of largest catchments and islands, sorted from the largest one large_catchment_and_island_map = pcr.areamajority( pcr.nominal(large_catchment_and_island_map_rep_ids), large_catchment_and_island_map) # ~ pcr.report(large_catchment_and_island_map, "large_catchments_and_islands.map") # ~ # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments # ~ print("spatial interpolation/extrapolation using cdo fillmiss2 to get initial subdomains") # ~ cmd = "gdal_translate -of NETCDF large_catchments_and_islands.map large_catchments_and_islands.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "cdo fillmiss2 large_catchments_and_islands.nc large_catchments_and_islands_filled.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "gdal_translate -of PCRaster large_catchments_and_islands_filled.nc large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ # - initial subdomains # ~ subdomains_initial = pcr.nominal(pcr.readmap("large_catchments_and_islands_filled.map")) # ~ subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) # ~ pcr.aguila(subdomains_initial) # spatial interpolation/extrapolation in order to merge the small catchments to the nearest large catchments print("spatial interpolation/extrapolation to get initial subdomains") field = large_catchment_and_island_map cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_initial = field subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_30min_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) # ~ if nr == 1: pcr.aguila(mask_selected_boolean) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok # - initial check value check_ok = True reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False if check_ok == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_30min_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") for nr in range(1, num_of_masks + 1, 1): mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) print("") print("") print("") print("Number of subdomains: " + str(num_of_masks)) print("") print("") print("") # spatial extrapolation in order to cover the entire map print("spatial interpolation/extrapolation to cover the entire map") field = subdomains_final cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_final_filled = field pcr.aguila(subdomains_final_filled) pcr.report(subdomains_final_filled, "global_subdomains_30min_final_filled.map")
clone_map_file, \ tmp_folder, \ None, False, None, True ) water_body_id = pcr.ifthen( pcr.scalar(water_body_id) > 0.00, water_body_id) water_body_id = pcr.ifthen(landmask, water_body_id) # # water body outlet wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd_map_low_resolution) water_body_outlet = pcr.ifthen(wbCatchment ==\ pcr.areamaximum(wbCatchment, \ water_body_id),\ water_body_id) # = outlet ids # This may give more than two outlets, particularly if there are more than one cells that have largest upstream areas # - make sure that there is only one outlet for each water body water_body_outlet = pcr.ifthen(\ pcr.areaorder(pcr.scalar( water_body_outlet), \ water_body_outlet) == 1., water_body_outlet) water_body_outlet = pcr.ifthen(\ pcr.scalar(water_body_outlet) > 0., water_body_outlet) # calculate overbank volume from reservoirs (and lakes) lake_reservoir_volume = pcr.areatotal(extreme_value_map, water_body_id) lake_reservoir_overbank_volume = pcr.cover( pcr.max(0.0, lake_reservoir_volume - reservoir_capacity), 0.0) #~ pcr.aguila(lake_reservoir_overbank_volume) # # transfer 75% of overbank volume to the downstream (several cells downstream) transfer_to_downstream = pcr.cover(\ pcr.ifthen(pcr.scalar(water_body_outlet) > 0., lake_reservoir_overbank_volume * 0.50), 0.0) transfer_to_downstream = pcr.upstream(ldd_map_low_resolution, transfer_to_downstream) transfer_to_downstream = pcr.upstream(ldd_map_low_resolution,
edwin_code_pcrglobwb_catchment_area_km2) # make correction if required abs_diff_value = pcr.cellvalue(pcr.mapmaximum(abs_diff), 1)[0] usgs_drain_area_km2 = pcr.cellvalue(pcr.mapmaximum(usgs_drain_area_km2), 1)[0] if (usgs_drain_area_km2 > 1000.0) and \ (abs_diff_value > 0.10 * usgs_drain_area_km2): # class within 0.1 arc degree windows edwin_code = pcr.windowmajority(edwin_code, 0.1) # find the most accurate cell: areaorder = pcr.areaorder( pcr.windowmaximum(pcr.spatial(pcr.scalar(usgs_drain_area_km2)), 0.1) - pcrglobwb_catchment_area_km2, edwin_code) # select pixel edwin_code = pcr.ifthen(areaorder == 1., edwin_code) # pcrglobwb catchment area edwin_code_pcrglobwb_catchment_area_km2 = pcr.ifthen( areaorder == 1., pcrglobwb_catchment_area_km2) # save using map2col pcr.report(edwin_code, "edwin_code.map") pcr.report(edwin_code_pcrglobwb_catchment_area_km2, "edwin_code_pcrglobwb_catchment_area_km2.map") cmd = "map2col edwin_code.map edwin_code_pcrglobwb_catchment_area_km2.map edwin_code_pcrglobwb_catchment_area_km2.tmp" print(cmd)
def main(): """ :ivar masterdem: digital elevation model :ivar dem: digital elevation model :ivar river: optional river map """ # Default values strRiver = 8 masterdem = "dem.map" step1dir = "step1" step2dir = "step2" workdir = "." inifile = "wflow_prepare.ini" recreate = False snapgaugestoriver = False try: opts, args = getopt.getopt(sys.argv[1:], "W:hI:f",['version']) except getopt.error as msg: usage(msg) for o, a in opts: if o == "-W": workdir = a if o == "-I": inifile = a if o == "-h": usage() if o == "-f": recreate = True if o == "--version": import wflow print("wflow version: ", wflow.__version__) sys.exit(0) pcr.setglobaloption("unitcell") os.chdir(workdir) config = OpenConf(workdir + "/" + inifile) masterdem = configget(config, "files", "masterdem", "dem.map") pcr.setclone(masterdem) strRiver = int(configget(config, "settings", "riverorder", "4")) try: gauges_x = config.get("settings", "gauges_x") gauges_y = config.get("settings", "gauges_y") except: print("gauges_x and gauges_y are required entries in the ini file") sys.exit(1) step1dir = configget(config, "directories", "step1dir", "step1") step2dir = configget(config, "directories", "step2dir", "step2") # upscalefactor = float(config.get("settings","upscalefactor")) corevolume = float(configget(config, "settings", "corevolume", "1E35")) catchmentprecipitation = float( configget(config, "settings", "catchmentprecipitation", "1E35") ) corearea = float(configget(config, "settings", "corearea", "1E35")) outflowdepth = float(configget(config, "settings", "lddoutflowdepth", "1E35")) initialscale = int(configget(config, "settings", "initialscale", "1")) csize = float(configget(config, "settings", "cellsize", "1")) snapgaugestoriver = bool( int(configget(config, "settings", "snapgaugestoriver", "1")) ) lddglobaloption = configget(config, "settings", "lddglobaloption", "lddout") pcr.setglobaloption(lddglobaloption) lu_water = configget(config, "files", "lu_water", "") lu_paved = configget(config, "files", "lu_paved", "") # X/Y coordinates of the gauges the system X = np.fromstring(gauges_x, sep=',') Y = np.fromstring(gauges_y, sep=',') tr.Verbose = 1 # make the directories to save results in if not os.path.isdir(step1dir + "/"): os.makedirs(step1dir) if not os.path.isdir(step2dir): os.makedirs(step2dir) if initialscale > 1: print("Initial scaling of DEM...") os.system( "resample -r " + str(initialscale) + " " + masterdem + " " + step1dir + "/dem_scaled.map" ) print("Reading dem...") dem = pcr.readmap(step1dir + "/dem_scaled.map") ldddem = dem else: print ("Reading dem...") dem = pcr.readmap(masterdem) ldddem = dem try: catchmask = config.get("files", "catchment_mask") except: print("No catchment mask...") else: print("clipping DEM with mask.....") mask = pcr.readmap(catchmask) ldddem = pcr.ifthen(pcr.boolean(mask), ldddem) dem = pcr.ifthen(pcr.boolean(mask), dem) # See if there is a shape file of the river to burn in try: rivshp = config.get("files", "river") except: print("no river file specified") outletpointX = float(configget(config, "settings", "outflowpointX", "0.0")) outletpointY = float(configget(config, "settings", "outflowpointY", "0.0")) else: print("river file specified.....") try: outletpointX = float(configget(config, "settings", "outflowpointX", "0.0")) outletpointY = float(configget(config, "settings", "outflowpointY", "0.0")) except: print( "Need to specify the river outletpoint (a point at the end of the river within the current map)" ) exit(1) outletpointmap = tr.points_to_map(dem, outletpointX, outletpointY, 0.5) pcr.report(outletpointmap, step1dir + "/outletpoint.map") # rivshpattr = config.get("files","riverattr") pcr.report(dem * 0.0, step1dir + "/nilmap.map") thestr = ( "gdal_translate -of GTiff " + step1dir + "/nilmap.map " + step1dir + "/riverburn.tif" ) os.system(thestr) rivshpattr = os.path.splitext(os.path.basename(rivshp))[0] os.system( "gdal_rasterize -burn 1 -l " + rivshpattr + " " + rivshp + " " + step1dir + "/riverburn.tif" ) thestr = ( "gdal_translate -of PCRaster " + step1dir + "/riverburn.tif " + step1dir + "/riverburn.map" ) os.system(thestr) riverburn = pcr.readmap(step1dir + "/riverburn.map") # Determine regional slope assuming that is the way the river should run # Determine regional slope assuming that is the way the river should run # pcr.setglobaloption("unitcell") # demregional=pcr.windowaverage(dem,100) ldddem = pcr.ifthenelse(riverburn >= 1.0, dem - 1000, dem) pcr.setglobaloption("unittrue") upscalefactor = int(csize / pcr.celllength()) print("Creating ldd...") ldd = tr.lddcreate_save( step1dir + "/ldd.map", ldddem, recreate, outflowdepth=outflowdepth, corevolume=corevolume, catchmentprecipitation=catchmentprecipitation, corearea=corearea, ) print("Determining streamorder...") stro = pcr.streamorder(ldd) pcr.report(stro, step1dir + "/streamorder.map") strdir = pcr.ifthen(stro >= strRiver, stro) pcr.report(strdir, step1dir + "/streamorderrive.map") pcr.report(pcr.boolean(pcr.ifthen(stro >= strRiver, stro)), step1dir + "/rivers.map") pcr.setglobaloption("unittrue") # outlet (and other gauges if given) # TODO: check is x/y set if not skip this print("Outlet...") outlmap = tr.points_to_map(dem, X, Y, 0.5) if snapgaugestoriver: print("Snapping gauges to nearest river cells...") pcr.report(outlmap, step1dir + "/orggauges.map") outlmap = tr.snaptomap(outlmap, strdir) # noutletmap = tr.points_to_map(dem,XX,YY,0.5) # pcr.report(noutletmap,'noutlet.map') pcr.report(outlmap, step1dir + "/gauges.map") # check if there is a pre-define catchment map try: catchmask = config.get("files", "catchment_mask") except: print("No catchment mask, finding outlet") # Find catchment (overall) outlet = tr.find_outlet(ldd) sub = tr.subcatch(ldd, outlet) pcr.report(sub, step1dir + "/catchment_overall.map") else: print("reading and converting catchment mask.....") os.system( "resample -r " + str(initialscale) + " " + catchmask + " " + step1dir + "/catchment_overall.map" ) sub = pcr.readmap(step1dir + "/catchment_overall.map") print("Scatch...") sd = tr.subcatch(ldd, pcr.ifthen(outlmap > 0, outlmap)) pcr.report(sd, step1dir + "/scatch.map") pcr.setglobaloption("unitcell") print("Upscalefactor: " + str(upscalefactor)) if upscalefactor > 1: gc.collect() print("upscale river length1 (checkerboard map)...") ck = tr.checkerboard(dem, upscalefactor) pcr.report(ck, step1dir + "/ck.map") pcr.report(dem, step1dir + "/demck.map") print("upscale river length2...") fact = tr.area_riverlength_factor(ldd, ck, upscalefactor) pcr.report(fact, step1dir + "/riverlength_fact.map") # print("make dem statistics...") dem_ = pcr.areaaverage(dem, ck) pcr.report(dem_, step1dir + "/demavg.map") print("Create DEM statistics...") dem_ = pcr.areaminimum(dem, ck) pcr.report(dem_, step1dir + "/demmin.map") dem_ = pcr.areamaximum(dem, ck) pcr.report(dem_, step1dir + "/demmax.map") # calculate percentiles order = pcr.areaorder(dem, ck) n = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), ck) #: calculate 25 percentile perc = tr.area_percentile(dem, ck, n, order, 25.0) pcr.report(perc, step1dir + "/dem25.map") perc = tr.area_percentile(dem, ck, n, order, 10.0) pcr.report(perc, step1dir + "/dem10.map") perc = tr.area_percentile(dem, ck, n, order, 50.0) pcr.report(perc, step1dir + "/dem50.map") perc = tr.area_percentile(dem, ck, n, order, 33.0) pcr.report(perc, step1dir + "/dem33.map") perc = tr.area_percentile(dem, ck, n, order, 66.0) pcr.report(perc, step1dir + "/dem66.map") perc = tr.area_percentile(dem, ck, n, order, 75.0) pcr.report(perc, step1dir + "/dem75.map") perc = tr.area_percentile(dem, ck, n, order, 90.0) pcr.report(perc, step1dir + "/dem90.map") else: print("No fancy scaling done. Going strait to step2....") pcr.report(dem, step1dir + "/demavg.map") Xul = float(config.get("settings", "Xul")) Yul = float(config.get("settings", "Yul")) Xlr = float(config.get("settings", "Xlr")) Ylr = float(config.get("settings", "Ylr")) gdalstr = ( "gdal_translate -projwin " + str(Xul) + " " + str(Yul) + " " + str(Xlr) + " " + str(Ylr) + " -of PCRaster " ) # gdalstr = "gdal_translate -a_ullr " + str(Xul) + " " + str(Yul) + " " +str(Xlr) + " " +str(Ylr) + " -of PCRaster " print(gdalstr) pcr.report(pcr.cover(1.0), step1dir + "/wflow_riverlength_fact.map") # Now us gdat tp convert the maps os.system( gdalstr + step1dir + "/wflow_riverlength_fact.map" + " " + step2dir + "/wflow_riverlength_fact.map" ) os.system( gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_dem.map" ) os.system( gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmin.map" ) os.system( gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmax.map" ) os.system( gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_gauges.map" ) os.system( gdalstr + step1dir + "/rivers.map" + " " + step2dir + "/wflow_river.map" ) os.system( gdalstr + step1dir + "/streamorder.map" + " " + step2dir + "/wflow_streamorder.map" ) os.system( gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_outlet.map" ) os.system( gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_catchment.map" ) os.system(gdalstr + step1dir + "/ldd.map" + " " + step2dir + "/wflow_ldd.map") os.system( gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_subcatch.map" ) if lu_water: os.system(gdalstr + lu_water + " " + step2dir + "/WaterFrac.map") if lu_paved: os.system(gdalstr + lu_paved + " " + step2dir + "/PathFrac.map") try: lumap = config.get("files", "landuse") except: print("no landuse map...creating uniform map") # clone=pcr.readmap(step2dir + "/wflow_dem.map") pcr.setclone(step2dir + "/wflow_dem.map") pcr.report(pcr.nominal(1), step2dir + "/wflow_landuse.map") else: os.system( "resample --clone " + step2dir + "/wflow_dem.map " + lumap + " " + step2dir + "/wflow_landuse.map" ) try: soilmap = config.get("files", "soil") except: print("no soil map..., creating uniform map") pcr.setclone(step2dir + "/wflow_dem.map") pcr.report(pcr.nominal(1), step2dir + "/wflow_soil.map") else: os.system( "resample --clone " + step2dir + "/wflow_dem.map " + soilmap + " " + step2dir + "/wflow_soil.map" )
water_body_id = vos.readPCRmapClone(water_body_id_file, \ clone_map_file, \ tmp_folder, \ None, False, None, True ) water_body_id = pcr.ifthen(pcr.scalar(water_body_id) > 0.00, water_body_id) water_body_id = pcr.ifthen( landmask, water_body_id) # # water body outlet wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd_map_low_resolution) water_body_outlet = pcr.ifthen(wbCatchment ==\ pcr.areamaximum(wbCatchment, \ water_body_id),\ water_body_id) # = outlet ids # This may give more than two outlets, particularly if there are more than one cells that have largest upstream areas # - make sure that there is only one outlet for each water body water_body_outlet = pcr.ifthen(\ pcr.areaorder(pcr.scalar( water_body_outlet), \ water_body_outlet) == 1., water_body_outlet) water_body_outlet = pcr.ifthen(\ pcr.scalar(water_body_outlet) > 0., water_body_outlet) # calculate overbank volume from reservoirs (and lakes) lake_reservoir_volume = pcr.areatotal(extreme_value_map, water_body_id) lake_reservoir_overbank_volume = pcr.cover( pcr.max(0.0, lake_reservoir_volume - reservoir_capacity), 0.0) #~ pcr.aguila(lake_reservoir_overbank_volume) # # transfer 75% of overbank volume to the downstream (several cells downstream) transfer_to_downstream = pcr.cover(\ pcr.ifthen(pcr.scalar(water_body_outlet) > 0., lake_reservoir_overbank_volume * 0.50), 0.0) transfer_to_downstream = pcr.upstream(ldd_map_low_resolution, transfer_to_downstream) transfer_to_downstream = pcr.upstream(ldd_map_low_resolution, transfer_to_downstream) transfer_to_downstream = pcr.upstream(ldd_map_low_resolution, transfer_to_downstream)
def __init__(self, netcdf_input_file, \ areaMapFileName,\ areaPointMapFileName,\ netcdf_input_clone_map_file, \ output_folder, \ unit_conversion_factor, \ unit_conversion_offset, \ modelTime, \ inputProjection, \ outputProjection, \ resample_method, \ tss_daily_output_file, \ tss_10day_output_file, \ report_10day_pcr_files = False ): DynamicModel.__init__(self) # netcdf input file self.netcdf_input_file = netcdf_input_file # clone maps self.inputClone = netcdf_input_clone_map_file self.outputClone = areaMapFileName # time variable/object self.modelTime = modelTime # output folder self.output_folder = output_folder # prepare temporary directory logger.debug('Preparing tmp directory.') self.tmpDir = self.output_folder + "/tmp/" if os.path.exists(self.tmpDir): shutil.rmtree(self.tmpDir) os.makedirs(self.tmpDir) # prepare maps directory (to store 10-day pcraster files ) self.report_10day_pcr_files = report_10day_pcr_files if self.report_10day_pcr_files: logger.info( 'Preparing map directory (for saving 10 day pcraster files).') self.mapDir = self.output_folder + "/map/" if os.path.exists(self.mapDir): shutil.rmtree(self.mapDir) os.makedirs(self.mapDir) # unit conversion variables self.unit_conversion_factor = unit_conversion_factor self.unit_conversion_offset = unit_conversion_offset # input and output projection/coordinate systems self.inputProjection = inputProjection self.outputProjection = outputProjection # resample method self.resample_method = resample_method # get the properties of output clone map (in string, needed for resampling with gdalwarp command) pcr.setclone(self.outputClone) self.cell_length = str(pcr.clone().cellSize()) self.x_min_output = str(pcr.clone().west()) self.x_max_output = str(pcr.clone().west() + pcr.clone().nrCols() * pcr.clone().cellSize()) self.y_min_output = str(pcr.clone().north() - pcr.clone().nrRows() * pcr.clone().cellSize()) self.y_max_output = str(pcr.clone().north()) # pcraster area/class map self.area_class = pcr.readmap(areaMapFileName) # - landmask self.landmask = pcr.defined(self.area_class) self.landmask = pcr.ifthen(self.landmask, self.landmask) # - choose a point for each area/class as its station representative (needed for tss reporting) if areaPointMapFileName == None: logger.info("Get stations/representatives for all classes") self.point_area_class = pcr.nominal( pcr.ifthen( pcr.areaorder(pcr.scalar(self.area_class), self.area_class) == 1, self.area_class)) #~ pcr.aguila(self.point_area_class) #~ raw_input("Press Enter to continue...") else: self.point_area_class = pcr.readmap(areaPointMapFileName) self.point_area_class = pcr.ifthen(self.landmask, self.point_area_class) # output tss file self.tss_daily_output_file = tss_daily_output_file self.tss_10day_output_file = tss_10day_output_file
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_30min_final.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_final.map") # ~ pcr.aguila(ldd_map) # identify small islands print("identify small islands") # - maps of islands smaller than 10000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 10000., island_map) UNTIL_THIS # identify the biggest island for every group of small islands within the windows 10x10 arcdeg cells # - sort from the largest catchment catchment_pits_boolean = pcr.ifthen( pcr.scalar(pcr.pit(ldd_map)) > 0.0, pcr.boolean(1.0)) catchment_pits_boolean = pcr.ifthen(catchment_pits_boolean, catchment_pits_boolean) pcr.aguila(catchment_pits_boolean) catchment_map = pcr.nominal( pcr.areaorder(catchment_size * -1.0, pcr.nominal(catchment_pits_boolean))) island_map = pcr.ifthen(island_size == pcr.windowmaximum(island_size, 10.), island_map) pcr.aguila(island_map) # identify big catchments # merge biggest islands and big catchments # make catchment and island map print("make catchment and island map") # - catchment map catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) pcr.report(catchment_map, "global_catchment_not_sorted.map") os.system("mapattr -p global_catchment_not_sorted.map") num_of_catchments = int(vos.getMinMaxMean(pcr.scalar(catchment_map))[1]) # - maps of islands smaller than 10000 cells island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 10000., island_map) island_map = pcr.nominal( pcr.scalar(pcr.ifthen(landmask, pcr.clump(island_map))) + pcr.scalar(num_of_catchments) * 100.) pcr.aguila(island_map) island_size = pcr.ifthen(pcr.defined(island_map), island_size) island_map = pcr.ifthen(island_size == pcr.windowmaximum(island_size, 10.), island_map) pcr.aguila(island_map) catchment_map = pcr.cover(island_map, catchment_map) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # - calculate the size catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # - sort from the largest catchment catchment_pits_boolean = pcr.ifthen( pcr.scalar(pcr.pit(ldd_map)) > 0.0, pcr.boolean(1.0)) catchment_pits_boolean = pcr.ifthen(catchment_pits_boolean, catchment_pits_boolean) pcr.aguila(catchment_pits_boolean) catchment_map = pcr.nominal( pcr.areaorder(catchment_size * -1.0, pcr.nominal(catchment_pits_boolean))) catchment_map = pcr.catchment(ldd_map, catchment_map) pcr.report(catchment_map, "global_catchment_final.map") os.system("mapattr -p global_catchment_final.map") # - calculate the size catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) pcr.report(catchment_size, "global_catchment_size_in_number_of_cells.map") # number of catchments num_of_catchments = int(vos.getMinMaxMean(pcr.scalar(catchment_map))[1]) # size of the largest catchment catchment_size_max = vos.getMinMaxMean(catchment_size)[1] print("") print(str(float(catchment_size_max))) print("") # identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 print("identify catchments with the minimum size of 50 cells") catchment_map_ge_50 = pcr.ifthen(catchment_size >= 50, catchment_map) pcr.report(catchment_map_ge_50, "global_catchment_ge_50_cells.map") # include the island catchment_map_ge_50 = pcr.cover(island_map, catchment_map_ge_50) # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments cmd = "gdal_translate -of NETCDF global_catchment_ge_50_cells.map global_catchment_ge_50_cells.nc" print(cmd) os.system(cmd) cmd = "cdo fillmiss2 global_catchment_ge_50_cells.nc global_catchment_ge_50_cells_filled.nc" print(cmd) os.system(cmd) cmd = "cdo fillmiss2 global_catchment_ge_50_cells_filled.nc global_catchment_ge_50_cells_filled.nc" print(cmd) os.system(cmd) cmd = "gdal_translate -of PCRaster global_catchment_ge_50_cells_filled.nc global_catchment_ge_50_cells_filled.map" print(cmd) os.system(cmd) cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "global_catchment_ge_50_cells_filled.map" print(cmd) os.system(cmd) # - initial subdomains subdomains_initial = pcr.nominal( pcr.readmap("global_catchment_ge_50_cells_filled.map")) subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) # - initial subdomains clump subdomains_initial_clump = pcr.clump(subdomains_initial) pcr.aguila(subdomains_initial_clump) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) # - remove temporay files (not used) cmd = "rm global_catchment_ge_50_cells_filled*" print(cmd) os.system(cmd) # clone code that will be assigned assigned_number = 0