cell_area = vos.readPCRmapClone(input_files['cell_area_05min'], clone_map_file, output_files['tmp_folder']) # set the basin map msg = "Setting the basin map" + ":" logger.info(msg) basin_map = pcr.nominal(\ vos.readPCRmapClone(input_files['basin_map_05min'], input_files['clone_map_05min'], output_files['tmp_folder'], None, False, None, True)) #~ pcr.aguila(basin_map) # - extend/extrapolate the basin basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.0)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.5)) basin_map = pcr.ifthen(landmask, basin_map) #~ pcr.aguila(basin_map) msg = "Redefining the basin map (so that it is consistent with the ldd map used in PCR-GLOBWB):" logger.info(msg) # - calculate the upstream area of every pixel: upstream_area = pcr.catchmenttotal(cell_area, ldd) # - calculate the catchment area of every basin: upstream_area_maximum = pcr.areamaximum(upstream_area, basin_map) # - identify the outlet of every basin (in order to rederive the basin so that it is consistent with the ldd) outlet = pcr.nominal(pcr.uniqueid(pcr.ifthen(upstream_area == upstream_area_maximum, pcr.boolean(1.0))))
def identifyModelPixel(self,tmpDir,\ catchmentAreaAll,\ landMaskClass,\ xCoordinate,yCoordinate,id): # TODO: Include an option to consider average discharge. logger.info("Identify model pixel for the grdc station "+str(id)+".") # make a temporary directory: randomDir = self.makeRandomDir(tmpDir) # coordinate of grdc station xCoord = float(self.attributeGRDC["grdc_longitude_in_arc_degree"][str(id)]) yCoord = float(self.attributeGRDC["grdc_latitude_in_arc_degree"][str(id)]) # identify the point at pcraster model point = pcr.ifthen((pcr.abs(xCoordinate - xCoord) == pcr.mapminimum(pcr.abs(xCoordinate - xCoord))) &\ (pcr.abs(yCoordinate - yCoord) == pcr.mapminimum(pcr.abs(yCoordinate - yCoord))), \ pcr.boolean(1)) # expanding the point point = pcr.windowmajority(point, self.cell_size_in_arc_degree * 5.0) point = pcr.ifthen(catchmentAreaAll > 0, point) point = pcr.boolean(point) # values based on the model; modelCatchmentArea = pcr.ifthen(point, catchmentAreaAll) # unit: km2 model_x_ccordinate = pcr.ifthen(point, xCoordinate) # unit: arc degree model_y_ccordinate = pcr.ifthen(point, yCoordinate) # unit: arc degree # calculate (absolute) difference with GRDC data # - initiating all of them with the values of MV diffCatchArea = pcr.abs(pcr.scalar(vos.MV)) # difference between the model and grdc catchment area (unit: km2) diffDistance = pcr.abs(pcr.scalar(vos.MV)) # distance between the model pixel and grdc catchment station (unit: arc degree) diffLongitude = pcr.abs(pcr.scalar(vos.MV)) # longitude difference (unit: arc degree) diffLatitude = pcr.abs(pcr.scalar(vos.MV)) # latitude difference (unit: arc degree) # # - calculate (absolute) difference with GRDC data try: diffCatchArea = pcr.abs(modelCatchmentArea-\ float(self.attributeGRDC["grdc_catchment_area_in_km2"][str(id)])) except: logger.info("The difference in the model and grdc catchment area cannot be calculated.") try: diffLongitude = pcr.abs(model_x_ccordinate - xCoord) except: logger.info("The difference in longitude cannot be calculated.") try: diffLatitude = pcr.abs(model_y_ccordinate - yCoord) except: logger.info("The difference in latitude cannot be calculated.") try: diffDistance = (diffLongitude**(2) + \ diffLatitude**(2))**(0.5) # TODO: calculate distance in meter except: logger.info("Distance cannot be calculated.") # identify masks masks = pcr.ifthen(pcr.boolean(point), landMaskClass) # export the difference to temporary files: maps and txt catchmentAreaMap = randomDir+"/"+vos.get_random_word()+".area.map" diffCatchAreaMap = randomDir+"/"+vos.get_random_word()+".dare.map" diffDistanceMap = randomDir+"/"+vos.get_random_word()+".dist.map" diffLatitudeMap = randomDir+"/"+vos.get_random_word()+".dlat.map" diffLongitudeMap = randomDir+"/"+vos.get_random_word()+".dlon.map" diffLatitudeMap = randomDir+"/"+vos.get_random_word()+".dlat.map" # maskMap = randomDir+"/"+vos.get_random_word()+".mask.map" diffColumnFile = randomDir+"/"+vos.get_random_word()+".cols.txt" # output # pcr.report(pcr.ifthen(point,modelCatchmentArea), catchmentAreaMap) pcr.report(pcr.ifthen(point,diffCatchArea ), diffCatchAreaMap) pcr.report(pcr.ifthen(point,diffDistance ), diffDistanceMap ) pcr.report(pcr.ifthen(point,diffLatitude ), diffLongitudeMap) pcr.report(pcr.ifthen(point,diffLongitude ), diffLatitudeMap ) pcr.report(pcr.ifthen(point,masks ), maskMap) # cmd = 'map2col '+catchmentAreaMap +' '+\ diffCatchAreaMap +' '+\ diffDistanceMap +' '+\ diffLongitudeMap +' '+\ diffLatitudeMap +' '+\ maskMap+' '+diffColumnFile print(cmd); os.system(cmd) # use R to sort the file cmd = 'R -f saveIdentifiedPixels.R '+diffColumnFile print(cmd); os.system(cmd) try: # read the output file (from R) f = open(diffColumnFile+".sel") ; allLines = f.read() ; f.close() # split the content of the file into several lines allLines = allLines.replace("\r",""); allLines = allLines.split("\n") selectedPixel = allLines[0].split(";") model_longitude_in_arc_degree = float(selectedPixel[0]) model_latitude_in_arc_degree = float(selectedPixel[1]) model_catchment_area_in_km2 = float(selectedPixel[2]) model_landmask = str(selectedPixel[7]) log_message = "Model pixel for grdc station "+str(id)+" is identified (lat/lon in arc degree): " log_message += str(model_latitude_in_arc_degree) + " ; " + str(model_longitude_in_arc_degree) logger.info(log_message) self.attributeGRDC["model_longitude_in_arc_degree"][str(id)] = model_longitude_in_arc_degree self.attributeGRDC["model_latitude_in_arc_degree"][str(id)] = model_latitude_in_arc_degree self.attributeGRDC["model_catchment_area_in_km2"][str(id)] = model_catchment_area_in_km2 self.attributeGRDC["model_landmask"][str(id)] = model_landmask except: logger.info("Model pixel for grdc station "+str(id)+" can NOT be identified.") self.cleanRandomDir(randomDir)
landmask = pcr.ifthen(pcr.defined(ldd), pcr.boolean(1.0)) # - cell area cell_area = vos.readPCRmapClone(input_files['cell_area_05min'], clone_map_file, output_files['tmp_folder']) # set the basin map msg = "Setting the basin map" + ":" logger.info(msg) basin_map = pcr.nominal(\ vos.readPCRmapClone(input_files['basin_map_05min'], input_files['clone_map_05min'], output_files['tmp_folder'], None, False, None, True)) #~ pcr.aguila(basin_map) # - extend/extrapolate the basin basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.0)) basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.5)) basin_map = pcr.ifthen(landmask, basin_map) #~ pcr.aguila(basin_map) msg = "Redefining the basin map (so that it is consistent with the ldd map used in PCR-GLOBWB):" logger.info(msg) # - calculate the upstream area of every pixel: upstream_area = pcr.catchmenttotal(cell_area, ldd) # - calculate the catchment area of every basin: upstream_area_maximum = pcr.areamaximum(upstream_area, basin_map) # - identify the outlet of every basin (in order to rederive the basin so that it is consistent with the ldd) outlet = pcr.nominal(
def main(): # output folder clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # tmp folder tmp_folder = out_folder + "/tmp/" if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder) os.makedirs(tmp_folder) # set the clone map print("set the clone map") pcr.setclone(global_clone_map_file) # read ldd map print("define the ldd") # ~ ldd_map = pcr.readmap(global_ldd_inp_file) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(vos.readPCRmapClone(v = global_ldd_inp_file, \ cloneMapFileName = global_clone_map_file, \ tmpDir = tmp_folder, \ absolutePath = None, \ isLddMap = True, \ cover = None, \ isNomMap = False)))) # define the landmask if landmask_map_file == None: print("define the landmask based on the ldd input") # ~ landmask = pcr.defined(pcr.readmap(global_ldd_inp_file)) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) else: print("define the landmask based on the input landmask_map_file") landmask = pcr.readmap(landmask_map_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(ldd_map))) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) # save ldd files used # - global ldd cmd = "cp " + str(global_ldd_inp_file) + " ." print(cmd) os.system(cmd) # - ldd map that is used pcr.report(ldd_map, "lddmap_used.map") # make catchment map print("make catchment map") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) # read global subdomain file print("read global subdomain file") global_subdomain_map = vos.readPCRmapClone( v=global_subdomain_file, cloneMapFileName=global_clone_map_file, tmpDir=tmp_folder, absolutePath=None, isLddMap=False, cover=None, isNomMap=True) # set initial subdomain print("assign subdomains to all catchments") subdomains_initial = pcr.areamajority(global_subdomain_map, catchment_map) subdomains_initial = pcr.ifthen(landmask, subdomains_initial) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) process_this_clone = False if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(mask_selected_boolean)), 1, 1)[0] > 0: process_this_clone = True # ~ if nr == 1: pcr.aguila(mask_selected_boolean) # - initial check value check_ok = True if process_this_clone: xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False # ~ # ignore checking # ~ check_ok = True if check_ok == True and process_this_clone == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False and process_this_clone == True: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") print("Making the clone and landmask maps for all subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) # clone and mask folders clone_folder = out_folder + "/clone/" if os.path.exists(clone_folder): shutil.rmtree(clone_folder) os.makedirs(clone_folder) mask_folder = out_folder + "/mask/" if os.path.exists(mask_folder): shutil.rmtree(mask_folder) os.makedirs(mask_folder) print("") print("") for nr in range(1, num_of_masks + 1, 1): msg = "Processing the subdomain %s" % (str(nr)) print(msg) # set the global clone pcr.setclone(global_clone_map_file) mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) mask_selected_nominal = pcr.ifthen(subdomains_final == nr, pcr.nominal(nr)) mask_file = "mask/mask_%s.map" % (str(nr)) pcr.report(mask_selected_nominal, mask_file) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) # cellsize in arcdegree cellsize = cellsize_in_arcmin / 60. # number of rows and cols num_rows = int(round(ymax - ymin) / cellsize) num_cols = int(round(xmax - xmin) / cellsize) # make the clone map using mapattr clonemap_mask_file = "clone/clonemap_mask_%s.map" % (str(nr)) cmd = "mapattr -s -R %s -C %s -B -P yb2t -x %s -y %s -l %s %s" % ( str(num_rows), str(num_cols), str(xmin), str(ymax), str(cellsize), clonemap_mask_file) print(cmd) os.system(cmd) # set the local landmask for the clump pcr.setclone(clonemap_mask_file) local_mask = vos.readPCRmapClone(v = mask_file, \ cloneMapFileName = clonemap_mask_file, tmpDir = tmp_folder, \ absolutePath = None, isLddMap = False, cover = None, isNomMap = True) local_mask_boolean = pcr.defined(local_mask) local_mask_boolean = pcr.ifthen(local_mask_boolean, local_mask_boolean) pcr.report(local_mask_boolean, mask_file) print("") print("") print("") print(num_of_masks)
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only.map") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only.map") # - based on the 06min input landmask_06min = define_landmask(input_file = global_landmask_06min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_06min_only.map") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only.map") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only.map") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_06min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_extended_30min.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_extended_30min.map") # ~ pcr.aguila(ldd_map) # catchment map and size catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ pcr.aguila(catchment_size) # identify small islands print("identify small islands") # - maps of islands smaller than 15000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 15000., island_map) # ~ # - use catchments (instead of islands) # ~ island_map = catchment_map # ~ island_size = catchment_size # ~ island_map = pcr.ifthen(island_size < 10000., island_map) # - sort from the largest island # -- take one cell per island as a representative island_map_rep_size = pcr.ifthen( pcr.areaorder(island_size, island_map) == 1.0, island_size) # -- sort from the largest island island_map_rep_ids = pcr.areaorder( island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(island_map_rep_size), pcr.nominal(1.0))) # -- map of smaller islands, sorted from the largest one island_map = pcr.areamajority(pcr.nominal(island_map_rep_ids), island_map) # identify the biggest island for every group of small islands within a certain window (arcdeg cells) print("the biggest island for every group of small islands") large_island_map = pcr.ifthen( pcr.scalar(island_map) == pcr.windowminimum(pcr.scalar(island_map), 15.), island_map) # ~ pcr.aguila(large_island_map) # identify big catchments print("identify large catchments") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ # - identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 # ~ large_catchment_map = pcr.ifthen(catchment_size >= 50, catchment_map) # ~ # - identify all large catchments with size >= 10 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 10, catchment_map) # ~ # - identify all large catchments with size >= 5 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 5, catchment_map) # ~ # - identify all large catchments with size >= 20 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 20, catchment_map) # - identify all large catchments with size >= 25 cells (at the resolution of 30 arcmin) large_catchment_map = pcr.ifthen(catchment_size >= 25, catchment_map) # - give the codes that are different than islands large_catchment_map = pcr.nominal( pcr.scalar(large_catchment_map) + 10. * vos.getMinMaxMean(pcr.scalar(large_island_map))[1]) # merge biggest islands and big catchments print("merge large catchments and islands") large_catchment_and_island_map = pcr.cover(large_catchment_map, large_island_map) # ~ large_catchment_and_island_map = pcr.cover(large_island_map, large_catchment_map) large_catchment_and_island_map_size = pcr.areatotal( pcr.spatial(pcr.scalar(1.0)), large_catchment_and_island_map) # - sort from the largest one # -- take one cell per island as a representative large_catchment_and_island_map_rep_size = pcr.ifthen( pcr.areaorder(large_catchment_and_island_map_size, large_catchment_and_island_map) == 1.0, large_catchment_and_island_map_size) # -- sort from the largest large_catchment_and_island_map_rep_ids = pcr.areaorder( large_catchment_and_island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(large_catchment_and_island_map_rep_size), pcr.nominal(1.0))) # -- map of largest catchments and islands, sorted from the largest one large_catchment_and_island_map = pcr.areamajority( pcr.nominal(large_catchment_and_island_map_rep_ids), large_catchment_and_island_map) # ~ pcr.report(large_catchment_and_island_map, "large_catchments_and_islands.map") # ~ # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments # ~ print("spatial interpolation/extrapolation using cdo fillmiss2 to get initial subdomains") # ~ cmd = "gdal_translate -of NETCDF large_catchments_and_islands.map large_catchments_and_islands.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "cdo fillmiss2 large_catchments_and_islands.nc large_catchments_and_islands_filled.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "gdal_translate -of PCRaster large_catchments_and_islands_filled.nc large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ # - initial subdomains # ~ subdomains_initial = pcr.nominal(pcr.readmap("large_catchments_and_islands_filled.map")) # ~ subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) # ~ pcr.aguila(subdomains_initial) # spatial interpolation/extrapolation in order to merge the small catchments to the nearest large catchments print("spatial interpolation/extrapolation to get initial subdomains") field = large_catchment_and_island_map cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_initial = field subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_30min_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) # ~ if nr == 1: pcr.aguila(mask_selected_boolean) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok # - initial check value check_ok = True reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False if check_ok == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_30min_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") for nr in range(1, num_of_masks + 1, 1): mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) print("") print("") print("") print("Number of subdomains: " + str(num_of_masks)) print("") print("") print("") # spatial extrapolation in order to cover the entire map print("spatial interpolation/extrapolation to cover the entire map") field = subdomains_final cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_final_filled = field pcr.aguila(subdomains_final_filled) pcr.report(subdomains_final_filled, "global_subdomains_30min_final_filled.map")
None, False, None, True)) uniqueIDs = pcr.readmap(uniqueIDsFile) uniqueIDs = pcr.ifthen(pcr.scalar(uniqueIDs) >= 0.0, uniqueIDs) # landmask landmask = pcr.defined(pcr.readmap(landmask05minFile)) landmask = pcr.ifthen(landmask, landmask) # - extending landmask with uniqueIDs landmask = pcr.cover(landmask, pcr.defined(uniqueIDs)) # extending class (country) ids max_step = 5 for i in range(1, max_step+1, 1): cmd = "Extending class: step "+str(i)+" from " + str(max_step) print(cmd) uniqueIDs = pcr.cover(uniqueIDs, pcr.windowmajority(uniqueIDs, 0.5)) # - use only cells within the landmask uniqueIDs = pcr.ifthen(landmask, uniqueIDs) pcr.report(uniqueIDs, "class_ids.map") # cell area at 5 arc min resolution cellArea = vos.readPCRmapClone(cellArea05minFile, cloneMapFileName, tmp_directory) cellArea = pcr.ifthen(landmask, cellArea) # get a sample cell for every id x_min_for_each_id = pcr.areaminimum(pcr.xcoordinate(pcr.boolean(1.0)), uniqueIDs) sample_cells = pcr.xcoordinate(pcr.boolean(1.0)) == x_min_for_each_id y_min_for_each_id = pcr.areaminimum(pcr.ycoordinate(sample_cells), uniqueIDs) sample_cells = pcr.ycoordinate(sample_cells) == y_min_for_each_id uniqueIDs_sample = pcr.ifthen(sample_cells, uniqueIDs)
pcr.defined(edwin_code), pcrglobwb_catchment_area_km2) # calculate the absolute difference abs_diff = pcr.abs(usgs_drain_area_km2 - edwin_code_pcrglobwb_catchment_area_km2) # make correction if required abs_diff_value = pcr.cellvalue(pcr.mapmaximum(abs_diff), 1)[0] usgs_drain_area_km2 = pcr.cellvalue(pcr.mapmaximum(usgs_drain_area_km2), 1)[0] if (usgs_drain_area_km2 > 1000.0) and \ (abs_diff_value > 0.10 * usgs_drain_area_km2): # class within 0.1 arc degree windows edwin_code = pcr.windowmajority(edwin_code, 0.1) # find the most accurate cell: areaorder = pcr.areaorder( pcr.windowmaximum(pcr.spatial(pcr.scalar(usgs_drain_area_km2)), 0.1) - pcrglobwb_catchment_area_km2, edwin_code) # select pixel edwin_code = pcr.ifthen(areaorder == 1., edwin_code) # pcrglobwb catchment area edwin_code_pcrglobwb_catchment_area_km2 = pcr.ifthen( areaorder == 1., pcrglobwb_catchment_area_km2) # save using map2col pcr.report(edwin_code, "edwin_code.map")
def __init__(self, clone_map_file,\ input_thickness_netcdf_file,\ input_thickness_var_name ,\ margat_aquifers,\ tmp_directory, landmask = None, arcdegree = True): object.__init__(self) # aquifer table from Margat and van der Gun self.margat_aquifers = margat_aquifers # clone map self.clone_map_file = clone_map_file self.clone_map_attr = vos.getMapAttributesALL(self.clone_map_file) if arcdegree == True: self.clone_map_attr['cellsize'] = round(self.clone_map_attr['cellsize'] * 360000.)/360000. xmin = self.clone_map_attr['xUL'] xmax = xmin + self.clone_map_attr['cols'] * self.clone_map_attr['cellsize'] ymax = self.clone_map_attr['yUL'] ymin = ymax - self.clone_map_attr['rows'] * self.clone_map_attr['cellsize'] pcr.setclone(self.clone_map_file) # temporary directory self.tmp_directory = tmp_directory # thickness approximation (unit: m, file in netcdf with variable name = average self.approx_thick = vos.netcdf2PCRobjCloneWithoutTime(input_thickness_netcdf_file,\ input_thickness_var_name,\ self.clone_map_file) # set minimum value to 0.1 mm self.approx_thick = pcr.max(0.0001, self.approx_thick) # rasterize the shape file # - # save current directory and move to temporary directory current_dir = str(os.getcwd()+"/") os.chdir(str(self.tmp_directory)) # cmd_line = 'gdal_rasterize -a MARGAT ' # layer name = MARGAT cmd_line += '-te '+str(xmin)+' '+str(ymin)+' '+str(xmax)+' '+str(ymax)+ ' ' cmd_line += '-tr '+str(self.clone_map_attr['cellsize'])+' '+str(self.clone_map_attr['cellsize'])+' ' cmd_line += str(margat_aquifers['shapefile'])+' ' cmd_line += 'tmp.tif' print(cmd_line); os.system(cmd_line) # # make it nomial cmd_line = 'pcrcalc tmp.map = "nominal(tmp.tif)"' print(cmd_line); os.system(cmd_line) # # make sure that the clone map is correct cmd_line = 'mapattr -c '+str(self.clone_map_file)+' tmp.map' print(cmd_line); os.system(cmd_line) # # read the map self.margat_aquifer_map = pcr.nominal(pcr.readmap("tmp.map")) # # clean temporary directory and return to the original directory vos.clean_tmp_dir(self.tmp_directory) os.chdir(current_dir) # extend the extent of each aquifer self.margat_aquifer_map = pcr.cover(self.margat_aquifer_map, pcr.windowmajority(self.margat_aquifer_map, 1.25)) # assign aquifer thickness, unit: m (lookuptable operation) self.margat_aquifer_thickness = pcr.lookupscalar(margat_aquifers['txt_table'], self.margat_aquifer_map) self.margat_aquifer_thickness = pcr.ifthen(self.margat_aquifer_thickness > 0., \ self.margat_aquifer_thickness) #~ pcr.report(self.margat_aquifer_thickness,"thick.map"); os.system("aguila thick.map") # aquifer map self.margat_aquifer_map = pcr.ifthen(self.margat_aquifer_thickness > 0., self.margat_aquifer_map) # looping per aquifer: cirrecting or rescaling aquifer_ids = np.unique(pcr.pcr2numpy(pcr.scalar(self.margat_aquifer_map), vos.MV)) aquifer_ids = aquifer_ids[aquifer_ids > 0] aquifer_ids = aquifer_ids[aquifer_ids < 10000] self.rescaled_thickness = None for id in aquifer_ids: rescaled_thickness = self.correction_per_aquifer(id) try: self.rescaled_thickness = pcr.cover(self.rescaled_thickness, rescaled_thickness) except: self.rescaled_thickness = rescaled_thickness # integrating ln_aquifer_thickness = self.mapFilling( pcr.ln(self.rescaled_thickness), pcr.ln(self.approx_thick) ) self.aquifer_thickness = pcr.exp(ln_aquifer_thickness) #~ pcr.report(self.aquifer_thickness,"thick.map"); os.system("aguila thick.map") # cropping only in the landmask region if landmask == None: landmask = self.clone_map_file self.landmask = pcr.defined(vos.readPCRmapClone(landmask,self.clone_map_file,self.tmp_directory)) #~ pcr.report(self.landmask,"test.map"); os.system("aguila test.map") self.aquifer_thickness = pcr.ifthen(self.landmask, self.aquifer_thickness)
def __init__(self, clone_map_file, \ dem_average_netcdf, dem_floodplain_netcdf, ldd_netcdf, \ lookup_table_average_thickness, lookup_table_zscore, \ number_of_samples, include_percentile = True,\ threshold_sedimentary_basin = 50.0, elevation_F_min = 0.0, elevation_F_max = 50.0): # values defined in de Graaf et al. (2014) DynamicModel.__init__(self) MonteCarloModel.__init__(self) msg = "\n" msg += "\n" msg += 'For each step, please refer to de Graaf et al. (2014).' msg += "\n" logger.info(msg) # set clone self.clone_map_file = clone_map_file pcr.setclone(clone_map_file) # an option to include_percentile or not self.include_percentile = include_percentile # number of samples self.number_of_samples = pcr.scalar(number_of_samples) logger.info( "Step 1: Identify the cells belonging to the sedimentary basin region." ) dem_average = vos.netcdf2PCRobjCloneWithoutTime(dem_average_netcdf['file_name'],\ dem_average_netcdf['variable_name'],\ clone_map_file) dem_average = pcr.max(0.0, dem_average) self.dem_average = pcr.cover(dem_average, 0.0) dem_floodplain = vos.netcdf2PCRobjCloneWithoutTime( dem_floodplain_netcdf['file_name'], dem_floodplain_netcdf['variable_name'], clone_map_file) dem_floodplain = pcr.max(0.0, dem_floodplain) lddMap = vos.netcdf2PCRobjCloneWithoutTime(ldd_netcdf['file_name'], ldd_netcdf['variable_name'], clone_map_file) self.lddMap = pcr.lddrepair(pcr.lddrepair(pcr.ldd(lddMap))) self.landmask = pcr.defined(self.lddMap) elevation_F = dem_average - dem_floodplain sedimentary_basin_extent = pcr.ifthen( elevation_F < pcr.scalar(threshold_sedimentary_basin), pcr.boolean(1)) # include the continuity along the river network sedimentary_basin_extent = pcr.windowmajority( sedimentary_basin_extent, 3.00 * vos.getMapAttributes(clone_map_file, "cellsize")) sedimentary_basin_extent = pcr.cover(sedimentary_basin_extent, \ pcr.path(self.lddMap, pcr.defined(sedimentary_basin_extent))) # TODO: We should also include the extent of major aquifer basins and unconsolidated sediments in the GLiM map. elevation_F = pcr.ifthen(sedimentary_basin_extent, elevation_F) elevation_F = pcr.max(0.0, elevation_F) elevation_F = pcr.min(50., elevation_F) logger.info( "Step 2: Calculate relative difference and associate z_score.") relative_elevation_F = pcr.scalar( 1.0) - (elevation_F - elevation_F_min) / (elevation_F_max - elevation_F_min) z_score_relat_elev_F = pcr.lookupscalar(lookup_table_zscore, relative_elevation_F) self.F = z_score_relat_elev_F # zscore (varying over the map) # maximum and minimum z_score self.F = pcr.min(3.75, self.F) self.F = pcr.max(-10.00, self.F) logger.info( "Step 3: Assign average and variation of aquifer thickness.") self.lookup_table_average_thickness = lookup_table_average_thickness self.lnCV = pcr.scalar( 0.1 ) # According to Inge, this lnCV value corresponds to the table "lookup_table_average_thickness".
lddDiff = pcr.scalar(ldd) - pcr.scalar(lddOld) pcr.report(pcr.boolean(lddDiff), 'lddDiff.map') p ############################### main ################################### ldd = pcr.nominal(ldd) ldd = fillMVBoundingBox(ldd, 1, 57, 76, 26, 47) ldd = fillMVBoundingBox(ldd, 1, 58, 62, 51, 60) ldd = fillMVBoundingBox(ldd, 1, -108, -101, 31, 40) ldd = pcr.lddrepair(pcr.ldd(ldd)) pits = pcr.pit(ldd) pcr.setglobaloption('unitcell') continentMasks = pcr.cover(mask48, pcr.windowmajority(mask48,5)) #- adjust area masks with 5 min ldd continentMasksNew = pcr.ifthen(pcr.boolean(pcr.readmap(cloneMap)) == 0, pcr.scalar(1)) for i in areas[0:]: print 'area mask = ', i mask = pcr.ifthen(continentMasks == i, pcr.boolean(1)) pitsContinent = pcr.pcrand(mask, pcr.boolean(pits)) pitsContinent = pcr.nominal(pcr.uniqueid(pcr.ifthen(pitsContinent == 1, pcr.boolean(1)))) catchments = pcr.catchment(ldd, pitsContinent) newMask = pcr.ifthen(pcr.boolean(catchments) == 1, pcr.scalar(i)) continentMasksNew = pcr.cover(continentMasksNew, newMask) updateCatchments = selectCatchments(ldd, pits, pitsDict, areas, cloneMap) outMask = pcr.cover(updateCatchments, continentMasksNew) pcr.report(pcr.nominal(outMask), 'mask5min.map') pcr.report(ldd, 'ldd5min.map')