def selectCatchments(ldd, pitsMap, pitsDict, continentList, cloneMap): catchments = pcr.ifthen(pcr.boolean(pcr.readmap(cloneMap)) == 0, pcr.scalar(1)) for continent in pitsDict.iterkeys(): continentNr = pitsDict[continent][0] pitsContinent = pitsDict[continent][1:] print 'updating catchments for ', continent for pitNr in pitsContinent: pitMap = pcr.ifthen(pitsMap == pitNr, pcr.scalar(continentNr)) catchment = pcr.catchment(ldd, pcr.nominal(pitMap)) catchment = pcr.ifthen(pcr.boolean(catchment) == 1, catchment) catchments = pcr.cover(catchments, pcr.scalar(catchment)) return catchments
pointscover = pcr.ifthen(pcr.scalar(points)==1,pcr.scalar(0)) #pcr.report(linescover,'lines.map') #pcr.report(pointscover,'points.map') dem = pcr.cover(dem,linescover,pointscover) #pcr.report(dem,'dem1.map') dem = dem + burn #pcr.report(dem,'dem2.map') ldd = pcr.lddcreate(dem,float("1E35"),float("1E35"),float("1E35"),float("1E35")) else: ldd = pcr.lddcreate(dem,burnvalue/2,float("1E35"),float("1E35"),float("1E35")) streamorder = pcr.ordinal(pcr.streamorder(ldd)) river = pcr.boolean(pcr.ifthen(streamorder >= int(min(np.max(pcr.pcr2numpy(streamorder,-9999)),minorder)), streamorder)) outlets = pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.boolean(1)) outlets = pcr.nominal(pcr.uniqueid(outlets)) catchments = pcr.nominal(pcr.catchment(ldd, outlets)) if not keepall: catchments = pcr.nominal(pcr.ifthen(pcr.mapmaximum(pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments))) == pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments)),catchments)) pcr.report(ldd,ldd_map) pcr.report(streamorder,streamorder_map) pcr.report(river,river_map) pcr.report(catchments,catchments_map) if not EPSG == None: call(('gdal_translate','-of','GTiff','-stats','-a_srs',EPSG,'-ot','Float32',catchments_map,catchments_tif)) else: call(('gdal_translate','-of','GTiff','-stats','-ot','Float32',catchments_map,catchments_tif)) wt.Raster2Pol(catchments_tif,catchshp,srs) riversid_map = workdir + 'riverid.map' drain_map = workdir + 'drain.map'
def main(): # output folder clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # tmp folder tmp_folder = out_folder + "/tmp/" if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder) os.makedirs(tmp_folder) # set the clone map print("set the clone map") pcr.setclone(global_clone_map_file) # read ldd map print("define the ldd") # ~ ldd_map = pcr.readmap(global_ldd_inp_file) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(vos.readPCRmapClone(v = global_ldd_inp_file, \ cloneMapFileName = global_clone_map_file, \ tmpDir = tmp_folder, \ absolutePath = None, \ isLddMap = True, \ cover = None, \ isNomMap = False)))) # define the landmask if landmask_map_file == None: print("define the landmask based on the ldd input") # ~ landmask = pcr.defined(pcr.readmap(global_ldd_inp_file)) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) else: print("define the landmask based on the input landmask_map_file") landmask = pcr.readmap(landmask_map_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(ldd_map))) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) # save ldd files used # - global ldd cmd = "cp " + str(global_ldd_inp_file) + " ." print(cmd) os.system(cmd) # - ldd map that is used pcr.report(ldd_map, "lddmap_used.map") # make catchment map print("make catchment map") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) # read global subdomain file print("read global subdomain file") global_subdomain_map = vos.readPCRmapClone( v=global_subdomain_file, cloneMapFileName=global_clone_map_file, tmpDir=tmp_folder, absolutePath=None, isLddMap=False, cover=None, isNomMap=True) # set initial subdomain print("assign subdomains to all catchments") subdomains_initial = pcr.areamajority(global_subdomain_map, catchment_map) subdomains_initial = pcr.ifthen(landmask, subdomains_initial) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) process_this_clone = False if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(mask_selected_boolean)), 1, 1)[0] > 0: process_this_clone = True # ~ if nr == 1: pcr.aguila(mask_selected_boolean) # - initial check value check_ok = True if process_this_clone: xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False # ~ # ignore checking # ~ check_ok = True if check_ok == True and process_this_clone == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False and process_this_clone == True: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") print("Making the clone and landmask maps for all subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) # clone and mask folders clone_folder = out_folder + "/clone/" if os.path.exists(clone_folder): shutil.rmtree(clone_folder) os.makedirs(clone_folder) mask_folder = out_folder + "/mask/" if os.path.exists(mask_folder): shutil.rmtree(mask_folder) os.makedirs(mask_folder) print("") print("") for nr in range(1, num_of_masks + 1, 1): msg = "Processing the subdomain %s" % (str(nr)) print(msg) # set the global clone pcr.setclone(global_clone_map_file) mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) mask_selected_nominal = pcr.ifthen(subdomains_final == nr, pcr.nominal(nr)) mask_file = "mask/mask_%s.map" % (str(nr)) pcr.report(mask_selected_nominal, mask_file) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) # cellsize in arcdegree cellsize = cellsize_in_arcmin / 60. # number of rows and cols num_rows = int(round(ymax - ymin) / cellsize) num_cols = int(round(xmax - xmin) / cellsize) # make the clone map using mapattr clonemap_mask_file = "clone/clonemap_mask_%s.map" % (str(nr)) cmd = "mapattr -s -R %s -C %s -B -P yb2t -x %s -y %s -l %s %s" % ( str(num_rows), str(num_cols), str(xmin), str(ymax), str(cellsize), clonemap_mask_file) print(cmd) os.system(cmd) # set the local landmask for the clump pcr.setclone(clonemap_mask_file) local_mask = vos.readPCRmapClone(v = mask_file, \ cloneMapFileName = clonemap_mask_file, tmpDir = tmp_folder, \ absolutePath = None, isLddMap = False, cover = None, isNomMap = True) local_mask_boolean = pcr.defined(local_mask) local_mask_boolean = pcr.ifthen(local_mask_boolean, local_mask_boolean) pcr.report(local_mask_boolean, mask_file) print("") print("") print("") print(num_of_masks)
def generate_hydro_datasets(path, output_dir, step): print(path) file_name = os.path.splitext(os.path.basename(path))[0] map_path = output_dir + '/' + file_name + '.map' path_prefix = map_path[:-14] if step == 'ldd': cmd = u'gdal_translate -a_nodata -9999 -of PCRaster -ot Float32 ' + path + ' ' + map_path print(cmd) subprocess.call(cmd, shell=True) # slope = pcr.slope(dem) # pcr.report(slope, path_prefix + '_slope.map') # pcr.setglobaloption("lddin") if step == 'ldd': dem = pcr.readmap(map_path) print("Computing LDD ...") # enable pit filling ldd = pcr.lddcreate(dem, 9999999, 9999999, 9999999, 9999999) pcr.report(ldd, path_prefix + '_ldd.map') return elif step == 'ldddem': dem = pcr.readmap(map_path) print("Computing LDD DEM ...") dem_pitfilled = pcr.lddcreatedem(dem, 9999999, 9999999, 9999999, 9999999) dem_diff = dem_pitfilled - dem pcr.report(dem_diff, path_prefix + '_dem_pits_diff.map') return # print("Computing LDD without pit filling ...") # ldd_pits = pcr.lddcreate(dem, 0, 0, 0, 0) # pcr.report(ldd_pits, path_prefix + '_ldd_with_pits.map') # print("Computing pits ...") # pits = pcr.pit(ldd_pits) # pcr.report(pits, path_prefix + '_pits.map') if step == 'fa': ldd = pcr.readmap(path_prefix + '_ldd.map') print("Computing flow accumulation ...") fa = pcr.accuflux(ldd, 1) pcr.report(fa, path_prefix + '_fa.map') return if step == 'catchments': ldd = pcr.readmap(path_prefix + '_ldd.map') print("Delineating catchments ...") catchments = pcr.catchment(ldd, pcr.pit(ldd)) pcr.report(catchments, path_prefix + '_catchments.map') return if step == 'stream_order': ldd = pcr.readmap(path_prefix + '_ldd.map') print("Computing stream order ...") stream_order = pcr.streamorder(ldd) pcr.report(stream_order, path_prefix + '_streamorder.map') return if step == 'stream': ldd = pcr.readmap(path_prefix + '_ldd.map') accuThreshold = 100 print("Computing stream ...") stream = pcr.ifthenelse( pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)) pcr.report(stream, path_prefix + '_stream.map') return if step == 'height_river': print("Computing heigh_river ...") stream = pcr.readmap(path_prefix + '_stream.map') dem = pcr.readmap(map_path) height_river = pcr.ifthenelse(stream, pcr.ordinal(dem), 0) pcr.report(height_river, path_prefix + '_height_river.map') return if step == 'up_elevation': print("Computing up_elevation ...") height_river = pcr.readmap(path_prefix + '_height_river.map') ldd = pcr.readmap(path_prefix + '_ldd.map') up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river)) pcr.report(up_elevation, path_prefix + '_up_elevation.map') return if step == 'hand': print("Computing HAND ...") dem = pcr.readmap(map_path) up_elevation = pcr.readmap(path_prefix + '_up_elevation.map') hand = pcr.max(dem - up_elevation, 0) pcr.report(hand, path_prefix + '_hand.map') return if step == 'dand': print("Computing DAND ...") ldd = pcr.readmap(path_prefix + '_ldd.map') stream = pcr.readmap(path_prefix + '_stream.map') dist = pcr.ldddist(ldd, stream, 1) pcr.report(dist, path_prefix + '_dist.map') return if step == 'fa_river': print("Computing FA river ...") fa = pcr.readmap(path_prefix + '_fa.map') stream = pcr.readmap(path_prefix + '_stream.map') fa_river = pcr.ifthenelse(stream, pcr.ordinal(fa), 0) pcr.report(fa_river, path_prefix + '_fa_river.map') return if step == 'faand': print("Computing FAAND ...") fa_river = pcr.readmap(path_prefix + '_fa_river.map') ldd = pcr.readmap(path_prefix + '_ldd.map') up_fa = pcr.scalar(pcr.subcatchment(ldd, fa_river)) pcr.report(up_fa, path_prefix + '_faand.map') return
def initial(self): """ initial part of the routing module """ maskinfo = MaskInfo.instance() self.var.avgdis = maskinfo.in_zero() self.var.Beta = loadmap('beta') self.var.InvBeta = 1 / self.var.Beta # Inverse of beta for kinematic wave self.var.ChanLength = loadmap('ChanLength').astype(float) self.var.InvChanLength = 1 / self.var.ChanLength # Inverse of channel length [1/m] self.var.NoRoutSteps = int( np.maximum(1, round(self.var.DtSec / self.var.DtSecChannel, 0))) # Number of sub-steps based on value of DtSecChannel, # or 1 if DtSec is smaller than DtSecChannel settings = LisSettings.instance() option = settings.options if option['InitLisflood']: self.var.NoRoutSteps = 1 # InitLisflood is used! # so channel routing step is the same as the general time step self.var.DtRouting = self.var.DtSec / self.var.NoRoutSteps # Corresponding sub-timestep (seconds) self.var.InvDtRouting = 1 / self.var.DtRouting self.var.InvNoRoutSteps = 1 / float(self.var.NoRoutSteps) # inverse for faster calculation inside the dynamic section # -------------------------- LDD self.var.Ldd = lddmask(loadmap('Ldd', pcr=True, lddflag=True), self.var.MaskMap) # Cut ldd to size of MaskMap (NEW, 29/9/2004) # Prevents 'unsound' ldd if MaskMap covers sub-area of ldd # Count (inverse of) upstream area for each pixel # Needed if we want to calculate average values of variables # upstream of gauge locations self.var.UpArea = accuflux(self.var.Ldd, self.var.PixelAreaPcr) # Upstream contributing area for each pixel # Note that you might expext that values of UpArea would be identical to # those of variable CatchArea (see below) at the outflow points. # This is NOT actually the case, because outflow points are shifted 1 # cell in upstream direction in the calculation of CatchArea! self.var.InvUpArea = 1 / self.var.UpArea # Calculate inverse, so we can multiply in dynamic (faster than divide) self.var.IsChannelPcr = boolean(loadmap('Channels', pcr=True)) self.var.IsChannel = np.bool8(compressArray(self.var.IsChannelPcr)) # Identify channel pixels self.var.IsChannelKinematic = self.var.IsChannel.copy() # Identify kinematic wave channel pixels # (identical to IsChannel, unless dynamic wave is used, see below) #self.var.IsStructureKinematic = pcraster.boolean(0) self.var.IsStructureKinematic = np.bool8(maskinfo.in_zero()) # Map that identifies special inflow/outflow structures (reservoirs, lakes) within the # kinematic wave channel routing. Set to (dummy) value of zero modified in reservoir and lake # routines (if those are used) LddChan = lddmask(self.var.Ldd, self.var.IsChannelPcr) # ldd for Channel network self.var.MaskMap = boolean(self.var.Ldd) # Use boolean version of Ldd as calculation mask # (important for correct mass balance check # any water generated outside of Ldd won't reach # channel anyway) self.var.LddToChan = lddrepair( ifthenelse(self.var.IsChannelPcr, 5, self.var.Ldd)) # Routing of runoff (incl. ground water)en AtOutflow = boolean(pit(self.var.Ldd)) # find outlet points... if option['dynamicWave']: IsChannelDynamic = boolean(loadmap('ChannelsDynamic', pcr=True)) # Identify channel pixels where dynamic wave is used self.var.IsChannelKinematic = (self.var.IsChannelPcr == 1) & (IsChannelDynamic == 0) # Identify (update) channel pixels where kinematic wave is used self.var.LddKinematic = lddmask(self.var.Ldd, self.var.IsChannelKinematic) # Ldd for kinematic wave: ends (pit) just before dynamic stretch LddDynamic = lddmask(self.var.Ldd, IsChannelDynamic) # Ldd for dynamic wave # Following statements produce an ldd network that connects the pits in # LddKinematic to the nearest downstream dynamic wave pixel LddToDyn = lddrepair(ifthenelse(IsChannelDynamic, 5, self.var.Ldd)) # Temporary ldd: flow paths end in dynamic pixels PitsKinematic = cover(boolean(pit(self.var.LddKinematic)), 0) # Define start of each flow path at pit on LddKinematic PathKinToDyn = path(LddToDyn, PitsKinematic) # Identify paths that connect pits in LddKinematic to dynamic wave # pixels LddKinToDyn = lddmask(LddToDyn, PathKinToDyn) # Create ldd DynWaveBoundaryCondition = boolean(pit(LddDynamic)) # NEW 12-7-2005 (experimental) # Location of boundary condition dynamic wave self.var.AtLastPoint = (downstream( self.var.Ldd, AtOutflow) == 1) & (AtOutflow != 1) & self.var.IsChannelPcr # NEW 23-6-2005 # Dynamic wave routine gives no outflow out of pits, so we calculate this # one cell upstream (WvD) # (implies that most downstream cell is not taken into account in mass balance # calculations, even if dyn wave is not used) # Only include points that are on a channel (otherwise some small 'micro-catchments' # are included, for which the mass balance cannot be calculated # properly) else: self.var.LddKinematic = LddChan # No dynamic wave, so kinematic ldd equals channel ldd self.var.AtLastPoint = AtOutflow self.var.AtLastPointC = np.bool8( compressArray(self.var.AtLastPoint)) # assign unique identifier to each of them maskinfo = MaskInfo.instance() lddC = compressArray(self.var.LddKinematic) inAr = decompress(np.arange(maskinfo.info.mapC[0], dtype="int32")) # giving a number to each non missing pixel as id self.var.downstruct = (compressArray( downstream(self.var.LddKinematic, inAr))).astype("int32") # each upstream pixel gets the id of the downstream pixel self.var.downstruct[lddC == 5] = maskinfo.info.mapC[0] # all pits gets a high number #d3=np.bincount(self.var.down, weights=loadmap('AvgDis'))[:-1] # upstream function in numpy OutflowPoints = nominal(uniqueid(self.var.AtLastPoint)) # and assign unique identifier to each of them self.var.Catchments = (compressArray( catchment(self.var.Ldd, OutflowPoints))).astype(np.int32) CatchArea = np.bincount( self.var.Catchments, weights=self.var.PixelArea)[self.var.Catchments] #CatchArea = CatchArea[self.var.Catchments] # define catchment for each outflow point #CatchArea = areatotal(self.var.PixelArea, self.var.Catchments) # Compute area of each catchment [m2] # Note: in earlier versions this was calculated using the "areaarea" function, # changed to "areatotal" in order to enable handling of grids with spatially # variable cell areas (e.g. lat/lon grids) self.var.InvCatchArea = 1 / CatchArea # inverse of catchment area [1/m2] # ************************************************************ # ***** CHANNEL GEOMETRY ************************************ # ************************************************************ self.var.ChanGrad = np.maximum(loadmap('ChanGrad'), loadmap('ChanGradMin')) # avoid calculation of Alpha using ChanGrad=0: this creates MV! self.var.CalChanMan = loadmap('CalChanMan') self.var.ChanMan = self.var.CalChanMan * loadmap('ChanMan') # Manning's n is multiplied by ChanManCal # enables calibration for peak timing self.var.ChanBottomWidth = loadmap('ChanBottomWidth') ChanDepthThreshold = loadmap('ChanDepthThreshold') ChanSdXdY = loadmap('ChanSdXdY') self.var.ChanUpperWidth = self.var.ChanBottomWidth + 2 * ChanSdXdY * ChanDepthThreshold # Channel upper width [m] self.var.TotalCrossSectionAreaBankFull = 0.5 * \ ChanDepthThreshold * (self.var.ChanUpperWidth + self.var.ChanBottomWidth) # Area (sq m) of bank full discharge cross section [m2] # (trapezoid area equation) TotalCrossSectionAreaHalfBankFull = 0.5 * self.var.TotalCrossSectionAreaBankFull # Cross-sectional area at half bankfull [m2] # This can be used to initialise channel flow (see below) TotalCrossSectionAreaInitValue = loadmap( 'TotalCrossSectionAreaInitValue') self.var.TotalCrossSectionArea = np.where( TotalCrossSectionAreaInitValue == -9999, TotalCrossSectionAreaHalfBankFull, TotalCrossSectionAreaInitValue) # Total cross-sectional area [m2]: if initial value in binding equals -9999 the value at half bankfull is used, # otherwise TotalCrossSectionAreaInitValue (typically end map from previous simulation) if option['SplitRouting']: # in_zero = maskinfo.in_zero() CrossSection2AreaInitValue = loadmap('CrossSection2AreaInitValue') self.var.CrossSection2Area = np.where( CrossSection2AreaInitValue == -9999, maskinfo.in_zero(), CrossSection2AreaInitValue) # cross-sectional area [m2] for 2nd line of routing: if initial value in binding equals -9999 the value is set to 0 # otherwise CrossSection2AreaInitValue (typically end map from previous simulation) PrevSideflowInitValue = loadmap('PrevSideflowInitValue') self.var.Sideflow1Chan = np.where(PrevSideflowInitValue == -9999, maskinfo.in_zero(), PrevSideflowInitValue) # sideflow from previous run for 1st line of routing: if initial value in binding equals -9999 the value is set to 0 # otherwise PrevSideflowInitValue (typically end map from previous simulation) # ************************************************************ # ***** CHANNEL ALPHA (KIN. WAVE)***************************** # ************************************************************ # Following calculations are needed to calculate Alpha parameter in kinematic # wave. Alpha currently fixed at half of bankful depth (this may change in # future versions!) ChanWaterDepthAlpha = np.where(self.var.IsChannel, 0.5 * ChanDepthThreshold, 0.0) # Reference water depth for calculation of Alpha: half of bankfull self.var.ChanWettedPerimeterAlpha = self.var.ChanBottomWidth + 2 * \ np.sqrt(np.square(ChanWaterDepthAlpha) + np.square(ChanWaterDepthAlpha * ChanSdXdY)) # Channel wetted perimeter [m](Pythagoras) AlpTermChan = (self.var.ChanMan / (np.sqrt(self.var.ChanGrad)))**self.var.Beta self.var.AlpPow = 2.0 / 3.0 * self.var.Beta self.var.ChannelAlpha = ( AlpTermChan * (self.var.ChanWettedPerimeterAlpha**self.var.AlpPow)).astype(float) self.var.InvChannelAlpha = 1 / self.var.ChannelAlpha # ChannelAlpha for kinematic wave # ************************************************************ # ***** CHANNEL INITIAL DISCHARGE **************************** # ************************************************************ self.var.ChanM3 = self.var.TotalCrossSectionArea * self.var.ChanLength # channel water volume [m3] self.var.ChanIniM3 = self.var.ChanM3.copy() self.var.ChanM3Kin = self.var.ChanIniM3.copy().astype(float) # Initialise water volume in kinematic wave channels [m3] self.var.ChanQKin = np.where(self.var.ChannelAlpha > 0, (self.var.TotalCrossSectionArea / self.var.ChannelAlpha)**self.var.InvBeta, 0).astype(float) # Initialise discharge at kinematic wave pixels (note that InvBeta is # simply 1/beta, computational efficiency!) self.var.CumQ = maskinfo.in_zero() # ininialise sum of discharge to calculate average # ************************************************************ # ***** CHANNEL INITIAL DYNAMIC WAVE ************************* # ************************************************************ if option['dynamicWave']: pass # TODO !!!!!!!!!!!!!!!!!!!! # lookchan = lookupstate(TabCrossSections, ChanCrossSections, ChanBottomLevel, self.var.ChanLength, # DynWaveConstantHeadBoundary + ChanBottomLevel) # ChanIniM3 = ifthenelse(AtOutflow, lookchan, ChanIniM3) # Correct ChanIniM3 for constant head boundary in pit (only if # dynamic wave is used) # ChanM3Dyn = ChanIniM3 # Set volume of water in dynamic wave channel to initial value # (note that initial condition is expressed as a state in [m3] for the dynamic wave, # and as a rate [m3/s] for the kinematic wave (a bit confusing) # Estimate number of iterations needed in first time step (based on Courant criterium) # TO DO !!!!!!!!!!!!!!!!!!!! # Potential = lookuppotential( # TabCrossSections, ChanCrossSections, ChanBottomLevel, self.var.ChanLength, ChanM3Dyn) # Potential # WaterLevelDyn = Potential - ChanBottomLevel # Water level [m above bottom level) # WaveCelerityDyn = pcraster.sqrt(9.81 * WaterLevelDyn) # Dynamic wave celerity [m/s] # CourantDynamic = self.var.DtSec * \ # (WaveCelerityDyn + 2) / self.var.ChanLength # Courant number for dynamic wave # We don't know the water velocity at this time so # we just guess it's 2 m/s (Odra tests show that flow velocity # is typically much lower than wave celerity, and 2 m/s is quite # high already so this gives a pretty conservative/safe estimate # for DynWaveIterations) # DynWaveIterationsTemp = max( # 1, roundup(CourantDynamic / CourantDynamicCrit)) # DynWaveIterations = ordinal(mapmaximum(DynWaveIterationsTemp)) # Number of sub-steps needed for required numerical # accuracy. Always greater than or equal to 1 # (otherwise division by zero!) # TEST # If polder option is used, we need an estimate of the initial channel discharge, but we don't know this # for the dynamic wave pixels (since only initial state is known)! Try if this works (dyn wave flux based on zero inflow 1 iteration) # Note that resulting ChanQ is ONLY used in the polder routine!!! # Since we need instantaneous estimate at start of time step, a # ChanQM3Dyn is calculated for one single one-second time step!!! # ChanQDyn = dynwaveflux(TabCrossSections, # ChanCrossSections, # LddDynamic, # ChanIniM3, # 0.0, # ChanBottomLevel, # self.var.ChanMan, # self.var.ChanLength, # 1, # 1, # DynWaveBoundaryCondition) # Compute volume and discharge in channel after dynamic wave # ChanM3Dyn in [cu m] # ChanQDyn in [cu m / s] # self.var.ChanQ = ifthenelse( # IsChannelDynamic, ChanQDyn, self.var.ChanQKin) # Channel discharge: combine results of kinematic and dynamic wave else: # ***** NO DYNAMIC WAVE ************************* # Dummy code if dynamic wave is not used, in which case ChanQ equals ChanQKin # (needed only for polder routine) PrevDischarge = loadmap('PrevDischarge') self.var.ChanQ = np.where(PrevDischarge == -9999, self.var.ChanQKin, PrevDischarge) # initialise channel discharge: cold start: equal to ChanQKin # [m3/s] # Initialising cumulative output variables # These are all needed to compute the cumulative mass balance error self.var.DischargeM3Out = maskinfo.in_zero() # cumulative discharge at outlet [m3] self.var.TotalQInM3 = maskinfo.in_zero() # cumulative inflow from inflow hydrographs [m3] #self.var.sumDis = maskinfo.in_zero() self.var.sumDis = maskinfo.in_zero() self.var.sumIn = maskinfo.in_zero()
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only.map") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only.map") # - based on the 06min input landmask_06min = define_landmask(input_file = global_landmask_06min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_06min_only.map") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only.map") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only.map") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_06min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_extended_30min.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_extended_30min.map") # ~ pcr.aguila(ldd_map) # catchment map and size catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ pcr.aguila(catchment_size) # identify small islands print("identify small islands") # - maps of islands smaller than 15000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 15000., island_map) # ~ # - use catchments (instead of islands) # ~ island_map = catchment_map # ~ island_size = catchment_size # ~ island_map = pcr.ifthen(island_size < 10000., island_map) # - sort from the largest island # -- take one cell per island as a representative island_map_rep_size = pcr.ifthen( pcr.areaorder(island_size, island_map) == 1.0, island_size) # -- sort from the largest island island_map_rep_ids = pcr.areaorder( island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(island_map_rep_size), pcr.nominal(1.0))) # -- map of smaller islands, sorted from the largest one island_map = pcr.areamajority(pcr.nominal(island_map_rep_ids), island_map) # identify the biggest island for every group of small islands within a certain window (arcdeg cells) print("the biggest island for every group of small islands") large_island_map = pcr.ifthen( pcr.scalar(island_map) == pcr.windowminimum(pcr.scalar(island_map), 15.), island_map) # ~ pcr.aguila(large_island_map) # identify big catchments print("identify large catchments") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ # - identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 # ~ large_catchment_map = pcr.ifthen(catchment_size >= 50, catchment_map) # ~ # - identify all large catchments with size >= 10 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 10, catchment_map) # ~ # - identify all large catchments with size >= 5 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 5, catchment_map) # ~ # - identify all large catchments with size >= 20 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 20, catchment_map) # - identify all large catchments with size >= 25 cells (at the resolution of 30 arcmin) large_catchment_map = pcr.ifthen(catchment_size >= 25, catchment_map) # - give the codes that are different than islands large_catchment_map = pcr.nominal( pcr.scalar(large_catchment_map) + 10. * vos.getMinMaxMean(pcr.scalar(large_island_map))[1]) # merge biggest islands and big catchments print("merge large catchments and islands") large_catchment_and_island_map = pcr.cover(large_catchment_map, large_island_map) # ~ large_catchment_and_island_map = pcr.cover(large_island_map, large_catchment_map) large_catchment_and_island_map_size = pcr.areatotal( pcr.spatial(pcr.scalar(1.0)), large_catchment_and_island_map) # - sort from the largest one # -- take one cell per island as a representative large_catchment_and_island_map_rep_size = pcr.ifthen( pcr.areaorder(large_catchment_and_island_map_size, large_catchment_and_island_map) == 1.0, large_catchment_and_island_map_size) # -- sort from the largest large_catchment_and_island_map_rep_ids = pcr.areaorder( large_catchment_and_island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(large_catchment_and_island_map_rep_size), pcr.nominal(1.0))) # -- map of largest catchments and islands, sorted from the largest one large_catchment_and_island_map = pcr.areamajority( pcr.nominal(large_catchment_and_island_map_rep_ids), large_catchment_and_island_map) # ~ pcr.report(large_catchment_and_island_map, "large_catchments_and_islands.map") # ~ # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments # ~ print("spatial interpolation/extrapolation using cdo fillmiss2 to get initial subdomains") # ~ cmd = "gdal_translate -of NETCDF large_catchments_and_islands.map large_catchments_and_islands.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "cdo fillmiss2 large_catchments_and_islands.nc large_catchments_and_islands_filled.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "gdal_translate -of PCRaster large_catchments_and_islands_filled.nc large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ # - initial subdomains # ~ subdomains_initial = pcr.nominal(pcr.readmap("large_catchments_and_islands_filled.map")) # ~ subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) # ~ pcr.aguila(subdomains_initial) # spatial interpolation/extrapolation in order to merge the small catchments to the nearest large catchments print("spatial interpolation/extrapolation to get initial subdomains") field = large_catchment_and_island_map cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_initial = field subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_30min_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) # ~ if nr == 1: pcr.aguila(mask_selected_boolean) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok # - initial check value check_ok = True reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False if check_ok == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_30min_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") for nr in range(1, num_of_masks + 1, 1): mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) print("") print("") print("") print("Number of subdomains: " + str(num_of_masks)) print("") print("") print("") # spatial extrapolation in order to cover the entire map print("spatial interpolation/extrapolation to cover the entire map") field = subdomains_final cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_final_filled = field pcr.aguila(subdomains_final_filled) pcr.report(subdomains_final_filled, "global_subdomains_30min_final_filled.map")
def main(): #-initialization # MVs MV= -999. # minimum catchment size to process catchmentSizeLimit= 0.0 # period of interest, start and end year startYear= 1961 endYear= 2010 # maps cloneMapFileName= '/data/hydroworld/PCRGLOBWB20/input30min/global/Global_CloneMap_30min.map' lddFileName= '/data/hydroworld/PCRGLOBWB20/input30min/routing/lddsound_30min.map' cellAreaFileName= '/data/hydroworld/PCRGLOBWB20/input30min/routing/cellarea30min.map' # set clone pcr.setclone(cloneMapFileName) # output outputPath= '/scratch/rens/reservedrecharge' percentileMapFileName= os.path.join(outputPath,'q%03d_cumsec.map') textFileName= os.path.join(outputPath,'groundwater_environmentalflow_%d.txt') fractionReservedRechargeMapFileName= os.path.join(outputPath,'fraction_reserved_recharge%d.map') fractionMinimumReservedRechargeMapFileName= os.path.join(outputPath,'minimum_fraction_reserved_recharge%d.map') # input inputPath= '/nfsarchive/edwin-emergency-backup-DO-NOT-DELETE/rapid/edwin/05min_runs_results/2015_04_27/non_natural_2015_04_27/global/netcdf/' # define data to be read from netCDF files ncData= {} variableName= 'totalRunoff' ncData[variableName]= {} ncData[variableName]['fileName']= os.path.join(inputPath,'totalRunoff_monthTot_output.nc') ncData[variableName]['fileRoot']= os.path.join(outputPath,'qloc') ncData[variableName]['annualAverage']= pcr.scalar(0) variableName= 'gwRecharge' ncData[variableName]= {} ncData[variableName]['fileName']= os.path.join(inputPath,'gwRecharge_monthTot_output.nc') ncData[variableName]['fileRoot']= os.path.join(outputPath,'gwrec') ncData[variableName]['annualAverage']= pcr.scalar(0) variableName= 'discharge' ncData[variableName]= {} ncData[variableName]['fileName']= os.path.join(inputPath,'totalRunoff_monthTot_output.nc') ncData[variableName]['fileRoot']= os.path.join(outputPath,'qc') ncData[variableName]['annualAverage']= pcr.scalar(0) ncData[variableName]['mapStack']= np.array([]) # percents and environmental flow condition set as percentile percents= range(10,110,10) environmentalFlowPercent= 10 if environmentalFlowPercent not in percents: percents.append(environmentalFlowPercent) percents.sort() #-start # obtain attributes pcr.setclone(cloneMapFileName) cloneSpatialAttributes= spatialAttributes(cloneMapFileName) years= range(startYear,endYear+1) # output path if not os.path.isdir(outputPath): os.makedirs(outputPath) os.chdir(outputPath) # compute catchments ldd= pcr.readmap(lddFileName) cellArea= pcr.readmap(cellAreaFileName) catchments= pcr.catchment(ldd,pcr.pit(ldd)) fractionWater= pcr.scalar(0.0) # temporary! lakeMask= pcr.boolean(0) # temporary! pcr.report(catchments,os.path.join(outputPath,'catchments.map')) maximumCatchmentID= int(pcr.cellvalue(pcr.mapmaximum(pcr.scalar(catchments)),1)[0]) # iterate over years weight= float(len(years))**-1 for year in years: #-echo year print ' - processing year %d' % year #-process data startDate= datetime.datetime(year,1,1) endDate= datetime.datetime(year,12,31) timeSteps= endDate.toordinal()-startDate.toordinal()+1 dynamicIncrement= 1 for variableName in ncData.keys(): print ' extracting %s' % variableName, ncFileIn= ncData[variableName]['fileName'] #-process data pcrDataSet= pcrObject(variableName, ncData[variableName]['fileRoot'],\ ncFileIn,cloneSpatialAttributes, pcrVALUESCALE= pcr.Scalar, resamplingAllowed= True,\ dynamic= True, dynamicStart= startDate, dynamicEnd= endDate, dynamicIncrement= dynamicIncrement, ncDynamicDimension= 'time') pcrDataSet.initializeFileInfo() pcrDataSet.processFileInfo() for fileInfo in pcrDataSet.fileProcessInfo.values()[0]: tempFileName= fileInfo[1] variableField= pcr.readmap(tempFileName) variableField= pcr.ifthen(pcr.defined(ldd),pcr.cover(variableField,0)) if variableName == 'discharge': dayNumber= int(os.path.splitext(tempFileName)[1].strip('.')) date= datetime.date(year,1,1)+datetime.timedelta(dayNumber-1) numberDays= calendar.monthrange(year,date.month)[1] variableField= pcr.max(0,pcr.catchmenttotal(variableField*cellArea,ldd)/(numberDays*24*3600)) ncData[variableName]['annualAverage']+= weight*variableField if 'mapStack' in ncData[variableName].keys(): tempArray= pcr2numpy(variableField,MV) mask= tempArray != MV if ncData[variableName]['mapStack'].size != 0: ncData[variableName]['mapStack']= np.vstack((ncData[variableName]['mapStack'],tempArray[mask])) else: ncData[variableName]['mapStack']= tempArray[mask] coordinates= np.zeros((ncData[variableName]['mapStack'].size,2)) pcr.setglobaloption('unitcell') tempArray= pcr2numpy(pcr.ycoordinate(pcr.boolean(1))+0.5,MV) coordinates[:,0]= tempArray[mask] tempArray= pcr2numpy(pcr.xcoordinate(pcr.boolean(1))+0.5,MV) coordinates[:,1]= tempArray[mask] os.remove(tempFileName) # delete object pcrDataSet= None del pcrDataSet # close line on screen print # report annual averages key= 'annualAverage' ncData['discharge'][key]/= 12 for variableName in ncData.keys(): ncData[variableName][key]= pcr.max(0,ncData[variableName][key]) pcr.report(ncData[variableName][key],\ os.path.join(outputPath,'%s_%s.map' % (variableName,key))) # remove aux.xml for tempFileName in os.listdir(outputPath): if 'aux.xml' in tempFileName: os.remove(tempFileName) # sort data print 'sorting discharge data' variableName= 'discharge' key= 'mapStack' indices= np.zeros((ncData[variableName][key].shape),np.uint) for iCnt in xrange(ncData[variableName][key].shape[1]): indices[:,iCnt]= ncData[variableName][key][:,iCnt].argsort(kind= 'mergesort') ncData[variableName][key][:,iCnt]= ncData[variableName][key][:,iCnt][indices[:,iCnt]] # extract values for percentiles print 'returning maps' for percent in percents: percentile= 0.01*percent index0= min(ncData[variableName][key].shape[0]-1,int(percentile*ncData[variableName][key].shape[0])) index1= min(ncData[variableName][key].shape[0]-1,int(percentile*ncData[variableName][key].shape[0])+1) x0= float(index0)/ncData[variableName][key].shape[0] x1= float(index1)/ncData[variableName][key].shape[0] if x0 <> x1: y= ncData[variableName][key][index0,:]+(percentile-x0)*\ (ncData[variableName][key][index1,:]-ncData[variableName][key][index0,:])/(x1-x0) else: y= ncData[variableName][key][index0,:] # convert a slice of the stack into an array tempArray= np.ones((cloneSpatialAttributes.numberRows,cloneSpatialAttributes.numberCols))*MV for iCnt in xrange(coordinates.shape[0]): row= coordinates[iCnt,0]-1 col= coordinates[iCnt,1]-1 tempArray[row,col]= y[iCnt] variableField= numpy2pcr(pcr.Scalar,tempArray,MV) pcr.report(variableField,percentileMapFileName % percent) if percent == environmentalFlowPercent: ncData[variableName]['environmentalFlow']= variableField tempArray= None; variableField= None del tempArray, variableField # process environmental flow # initialize map of reserved recharge fraction fractionReservedRechargeMap= pcr.ifthen(ncData[variableName]['environmentalFlow'] < 0,pcr.scalar(0)) fractionMinimumReservedRechargeMap= pcr.ifthen(ncData[variableName]['environmentalFlow'] < 0,pcr.scalar(0)) textFile= open(textFileName % environmentalFlowPercent,'w') hStr= 'Environmental flow analysis per basin, resulting in a map of renewable, exploitable recharge, for the %d%s quantile of discharge\n' % (environmentalFlowPercent,'%') hStr+= 'Returns Q_%d/R, the fraction of reserved recharge needed to sustain fully the environental flow requirement defined as the %d percentile,\n' % (environmentalFlowPercent, environmentalFlowPercent) hStr+= 'and Q*_%d/R, a reduced fraction that takes the availability of surface water into account\n' % environmentalFlowPercent textFile.write(hStr) print hStr # create header to display on screen and write to file # reported are: 1: ID, 2: Area, 3: average discharge, 4: environmental flow, 5: average recharge, # 6: Q_%d/Q, 7: Q_%d/R_Avg, 8: R_Avg/Q_Avg, 9: Q*_%d/R_Avg hStr= '%6s,%15s,%15s,%15s,%15s,%15s,%15s,%15s,%15s\n' % \ ('ID','Area [km2]','Q_Avg [m3]','Q_%d [m3]' % environmentalFlowPercent ,'R_Avg [m3]','Q_%d/Q_Avg [-]' % environmentalFlowPercent,\ 'Q_%d/Q_Avg [-]' % environmentalFlowPercent,'R_Avg/Q_Avg [-]','Q*_%d/Q_Avg [-]' % environmentalFlowPercent) textFile.write(hStr) print hStr for catchment in xrange(1,maximumCatchmentID+1): # create catchment mask and check whether it does not coincide with a lake catchmentMask= catchments == catchment catchmentSize= pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,cellArea*1.e-6)),1)[0] #~ ##~ if pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,pcr.scalar(lakeMask))),1) <> \ #~ ##~ pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,pcr.scalar(catchmentMask))),1)[0] and \ #~ ##~ catchmentSize > catchmentSizeLimit: key= 'annualAverage' variableName= 'discharge' if bool(pcr.cellvalue(pcr.maptotal(pcr.ifthen((ldd == 5) & catchmentMask,\ pcr.scalar(ncData[variableName][key] > 0))),1)[0]) and catchmentSize >= catchmentSizeLimit: # valid catchment, process # all volumes are in m3 per year key= 'annualAverage' catchmentAverageDischarge= pcr.cellvalue(pcr.mapmaximum(pcr.ifthen(catchmentMask & (ldd == 5),\ ncData[variableName][key])),1)[0]*365.25*3600*24 variableName= 'gwRecharge' catchmentRecharge= pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,ncData[variableName][key]*\ (1.-fractionWater)*cellArea)),1)[0] variableName= 'totalRunoff' catchmentRunoff= pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,ncData[variableName][key]*\ cellArea)),1)[0] key= 'environmentalFlow' variableName= 'discharge' catchmentEnvironmentalFlow= pcr.cellvalue(pcr.mapmaximum(pcr.ifthen(catchmentMask & (ldd == 5),\ ncData[variableName][key])),1)[0]*365.25*3600*24 catchmentRunoff= max(catchmentRunoff,catchmentEnvironmentalFlow) if catchmentAverageDischarge > 0.: fractionEnvironmentalFlow= catchmentEnvironmentalFlow/catchmentAverageDischarge fractionGroundWaterContribution= catchmentRecharge/catchmentAverageDischarge else: fractionEnvironmentalFlow= 0. fractionGroundWaterContribution= 0. if catchmentRecharge > 0: fractionReservedRecharge= min(1,catchmentEnvironmentalFlow/catchmentRecharge) else: fractionReservedRecharge= 1.0 fractionMinimumReservedRecharge= (fractionReservedRecharge+fractionGroundWaterContribution-\ fractionReservedRecharge*fractionGroundWaterContribution)*fractionReservedRecharge #~ # echo to screen, and write to file and map wStr= '%6s,%15.1f,%15.6g,%15.6g,%15.6g,%15.6f,%15.6f,%15.6f,%15.6f\n' % \ (catchment,catchmentSize,catchmentAverageDischarge,catchmentEnvironmentalFlow,catchmentRecharge,\ fractionEnvironmentalFlow,fractionReservedRecharge,fractionGroundWaterContribution,fractionMinimumReservedRecharge) print wStr textFile.write(wStr) # update maps fractionReservedRechargeMap= pcr.ifthenelse(catchmentMask,\ pcr.scalar(fractionReservedRecharge),fractionReservedRechargeMap) fractionMinimumReservedRechargeMap= pcr.ifthenelse(catchmentMask,\ pcr.scalar(fractionMinimumReservedRecharge),fractionMinimumReservedRechargeMap) #-report map and close text file pcr.report(fractionReservedRechargeMap,fractionReservedRechargeMapFileName % environmentalFlowPercent) pcr.report(fractionMinimumReservedRechargeMap,fractionMinimumReservedRechargeMapFileName % environmentalFlowPercent) # close text file textFile.close() # finished print 'all done!'
def generate_hydro_datasets(path, output_dir, step): print(path) file_name = os.path.splitext(os.path.basename(path))[0] map_path = output_dir + "/" + file_name + ".map" path_prefix = map_path[:-14] if step == "ldd": cmd = u"gdal_translate -a_nodata -9999 -of PCRaster -ot Float32 " + path + " " + map_path print(cmd) subprocess.call(cmd, shell=True) # slope = pcr.slope(dem) # pcr.report(slope, path_prefix + '_slope.map') # pcr.setglobaloption("lddin") if step == "ldd": dem = pcr.readmap(map_path) print("Computing LDD ...") # enable pit filling ldd = pcr.lddcreate(dem, 9999999, 9999999, 9999999, 9999999) pcr.report(ldd, path_prefix + "_ldd.map") return elif step == "ldddem": dem = pcr.readmap(map_path) print("Computing LDD DEM ...") dem_pitfilled = pcr.lddcreatedem(dem, 9999999, 9999999, 9999999, 9999999) dem_diff = dem_pitfilled - dem pcr.report(dem_diff, path_prefix + "_dem_pits_diff.map") return # print("Computing LDD without pit filling ...") # ldd_pits = pcr.lddcreate(dem, 0, 0, 0, 0) # pcr.report(ldd_pits, path_prefix + '_ldd_with_pits.map') # print("Computing pits ...") # pits = pcr.pit(ldd_pits) # pcr.report(pits, path_prefix + '_pits.map') if step == "fa": ldd = pcr.readmap(path_prefix + "_ldd.map") print("Computing flow accumulation ...") fa = pcr.accuflux(ldd, 1) pcr.report(fa, path_prefix + "_fa.map") return if step == "catchments": ldd = pcr.readmap(path_prefix + "_ldd.map") print("Delineating catchments ...") catchments = pcr.catchment(ldd, pcr.pit(ldd)) pcr.report(catchments, path_prefix + "_catchments.map") return if step == "stream_order": ldd = pcr.readmap(path_prefix + "_ldd.map") print("Computing stream order ...") stream_order = pcr.streamorder(ldd) pcr.report(stream_order, path_prefix + "_streamorder.map") return if step == "stream": ldd = pcr.readmap(path_prefix + "_ldd.map") accuThreshold = 100 print("Computing stream ...") stream = pcr.ifthenelse(pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)) pcr.report(stream, path_prefix + "_stream.map") return if step == "height_river": print("Computing heigh_river ...") stream = pcr.readmap(path_prefix + "_stream.map") dem = pcr.readmap(map_path) height_river = pcr.ifthenelse(stream, pcr.ordinal(dem), 0) pcr.report(height_river, path_prefix + "_height_river.map") return if step == "up_elevation": print("Computing up_elevation ...") height_river = pcr.readmap(path_prefix + "_height_river.map") ldd = pcr.readmap(path_prefix + "_ldd.map") up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river)) pcr.report(up_elevation, path_prefix + "_up_elevation.map") return if step == "hand": print("Computing HAND ...") dem = pcr.readmap(map_path) up_elevation = pcr.readmap(path_prefix + "_up_elevation.map") hand = pcr.max(dem - up_elevation, 0) pcr.report(hand, path_prefix + "_hand.map") return if step == "dand": print("Computing DAND ...") ldd = pcr.readmap(path_prefix + "_ldd.map") stream = pcr.readmap(path_prefix + "_stream.map") dist = pcr.ldddist(ldd, stream, 1) pcr.report(dist, path_prefix + "_dist.map") return if step == "fa_river": print("Computing FA river ...") fa = pcr.readmap(path_prefix + "_fa.map") stream = pcr.readmap(path_prefix + "_stream.map") fa_river = pcr.ifthenelse(stream, pcr.ordinal(fa), 0) pcr.report(fa_river, path_prefix + "_fa_river.map") return if step == "faand": print("Computing FAAND ...") fa_river = pcr.readmap(path_prefix + "_fa_river.map") ldd = pcr.readmap(path_prefix + "_ldd.map") up_fa = pcr.scalar(pcr.subcatchment(ldd, fa_river)) pcr.report(up_fa, path_prefix + "_faand.map") return
ldd = fillMVBoundingBox(ldd, 1, -108, -101, 31, 40) ldd = pcr.lddrepair(pcr.ldd(ldd)) pits = pcr.pit(ldd) pcr.setglobaloption('unitcell') continentMasks = pcr.cover(mask48, pcr.windowmajority(mask48,5)) #- adjust area masks with 5 min ldd continentMasksNew = pcr.ifthen(pcr.boolean(pcr.readmap(cloneMap)) == 0, pcr.scalar(1)) for i in areas[0:]: print 'area mask = ', i mask = pcr.ifthen(continentMasks == i, pcr.boolean(1)) pitsContinent = pcr.pcrand(mask, pcr.boolean(pits)) pitsContinent = pcr.nominal(pcr.uniqueid(pcr.ifthen(pitsContinent == 1, pcr.boolean(1)))) catchments = pcr.catchment(ldd, pitsContinent) newMask = pcr.ifthen(pcr.boolean(catchments) == 1, pcr.scalar(i)) continentMasksNew = pcr.cover(continentMasksNew, newMask) updateCatchments = selectCatchments(ldd, pits, pitsDict, areas, cloneMap) outMask = pcr.cover(updateCatchments, continentMasksNew) pcr.report(pcr.nominal(outMask), 'mask5min.map') pcr.report(ldd, 'ldd5min.map') #- create clone, ldd, and mask map for each area. outMask = pcr.readmap('mask5min.map') ldd = pcr.readmap('ldd5min.map') ldd = pcr.lddrepair(pcr.ldd(pcr.ifthen(pcr.boolean(outMask) == 1, pcr.scalar(ldd)))) clone = pcr.cover(pcr.boolean(outMask), pcr.boolean(1)) pcr.report(clone, 'globalClone5min.map') pcr.setglobaloption('unittrue')
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_30min_final.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_final.map") # ~ pcr.aguila(ldd_map) # identify small islands print("identify small islands") # - maps of islands smaller than 10000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 10000., island_map) UNTIL_THIS # identify the biggest island for every group of small islands within the windows 10x10 arcdeg cells # - sort from the largest catchment catchment_pits_boolean = pcr.ifthen( pcr.scalar(pcr.pit(ldd_map)) > 0.0, pcr.boolean(1.0)) catchment_pits_boolean = pcr.ifthen(catchment_pits_boolean, catchment_pits_boolean) pcr.aguila(catchment_pits_boolean) catchment_map = pcr.nominal( pcr.areaorder(catchment_size * -1.0, pcr.nominal(catchment_pits_boolean))) island_map = pcr.ifthen(island_size == pcr.windowmaximum(island_size, 10.), island_map) pcr.aguila(island_map) # identify big catchments # merge biggest islands and big catchments # make catchment and island map print("make catchment and island map") # - catchment map catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) pcr.report(catchment_map, "global_catchment_not_sorted.map") os.system("mapattr -p global_catchment_not_sorted.map") num_of_catchments = int(vos.getMinMaxMean(pcr.scalar(catchment_map))[1]) # - maps of islands smaller than 10000 cells island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 10000., island_map) island_map = pcr.nominal( pcr.scalar(pcr.ifthen(landmask, pcr.clump(island_map))) + pcr.scalar(num_of_catchments) * 100.) pcr.aguila(island_map) island_size = pcr.ifthen(pcr.defined(island_map), island_size) island_map = pcr.ifthen(island_size == pcr.windowmaximum(island_size, 10.), island_map) pcr.aguila(island_map) catchment_map = pcr.cover(island_map, catchment_map) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # - calculate the size catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # - sort from the largest catchment catchment_pits_boolean = pcr.ifthen( pcr.scalar(pcr.pit(ldd_map)) > 0.0, pcr.boolean(1.0)) catchment_pits_boolean = pcr.ifthen(catchment_pits_boolean, catchment_pits_boolean) pcr.aguila(catchment_pits_boolean) catchment_map = pcr.nominal( pcr.areaorder(catchment_size * -1.0, pcr.nominal(catchment_pits_boolean))) catchment_map = pcr.catchment(ldd_map, catchment_map) pcr.report(catchment_map, "global_catchment_final.map") os.system("mapattr -p global_catchment_final.map") # - calculate the size catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) pcr.report(catchment_size, "global_catchment_size_in_number_of_cells.map") # number of catchments num_of_catchments = int(vos.getMinMaxMean(pcr.scalar(catchment_map))[1]) # size of the largest catchment catchment_size_max = vos.getMinMaxMean(catchment_size)[1] print("") print(str(float(catchment_size_max))) print("") # identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 print("identify catchments with the minimum size of 50 cells") catchment_map_ge_50 = pcr.ifthen(catchment_size >= 50, catchment_map) pcr.report(catchment_map_ge_50, "global_catchment_ge_50_cells.map") # include the island catchment_map_ge_50 = pcr.cover(island_map, catchment_map_ge_50) # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments cmd = "gdal_translate -of NETCDF global_catchment_ge_50_cells.map global_catchment_ge_50_cells.nc" print(cmd) os.system(cmd) cmd = "cdo fillmiss2 global_catchment_ge_50_cells.nc global_catchment_ge_50_cells_filled.nc" print(cmd) os.system(cmd) cmd = "cdo fillmiss2 global_catchment_ge_50_cells_filled.nc global_catchment_ge_50_cells_filled.nc" print(cmd) os.system(cmd) cmd = "gdal_translate -of PCRaster global_catchment_ge_50_cells_filled.nc global_catchment_ge_50_cells_filled.map" print(cmd) os.system(cmd) cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "global_catchment_ge_50_cells_filled.map" print(cmd) os.system(cmd) # - initial subdomains subdomains_initial = pcr.nominal( pcr.readmap("global_catchment_ge_50_cells_filled.map")) subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) # - initial subdomains clump subdomains_initial_clump = pcr.clump(subdomains_initial) pcr.aguila(subdomains_initial_clump) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) # - remove temporay files (not used) cmd = "rm global_catchment_ge_50_cells_filled*" print(cmd) os.system(cmd) # clone code that will be assigned assigned_number = 0