def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False): # v: inputMapFileName or floating values # cloneMapFileName: If the inputMap and cloneMap have different clones, # resampling will be done. print(v) if v == "None": PCRmap = str("None") elif not re.match(r"[0-9.-]*$",v): if absolutePath != None: v = getFullPath(v,absolutePath) # print(v) sameClone = isSameClone(v,cloneMapFileName) if sameClone == True: PCRmap = pcr.readmap(v) else: # resample using GDAL: output = tmpDir+'temp.map' warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap) # read from temporary file and delete the temporary file: PCRmap = pcr.readmap(output) if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap) if isLddMap == True: PCRmap = pcr.ldd(PCRmap) if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) > 0., PCRmap) if isNomMap == True: PCRmap = pcr.nominal(PCRmap) co = 'rm '+str(tmpDir)+'*.*' cOut,err = subprocess.Popen(co, stdout=subprocess.PIPE,stderr=open('/dev/null'),shell=True).communicate() else: PCRmap = pcr.scalar(float(v)) if cover != None: PCRmap = pcr.cover(PCRmap, cover) co = None; cOut = None; err = None; warp = None del co; del cOut; del err; del warp stdout = None; del stdout stderr = None; del stderr return PCRmap
def testDalException(self): exceptionThrown = False try: # Trying to read a map that is not there should fail. pcraster.readmap("notThere.map") except RuntimeError, exception: message = str(exception) self.assert_(string.find(message, "Raster notThere.map: can not be opened") != -1) exceptionThrown = True
def testNonZero(self): # behaviour of __nonzero__ changes in Python version 3. assert sys.version_info[0] < 3 raster1 = pcraster.readmap("abs_Expr.map") raster2 = pcraster.readmap("abs_Expr.map") exceptionThrown = False ambiguousSpatialMsg = "The truth value for PCRaster spatial data types is ambiguous. " try: bool(raster1) except Exception, exception: message = str(exception) self.assert_(string.find(message, ambiguousSpatialMsg) != -1)
def testReportNonSpatial(self): raster = pcraster.readmap("abs_Expr.map") max1 = pcraster.mapmaximum(raster) value, isValid = pcraster.cellvalue(max1, 1) self.assertTrue(isinstance(value, float)) self.assertEqual(isValid, True) self.assertEqual(value, 14.0) pcraster.report(max1, "maximum.map") max2 = pcraster.readmap("maximum.map") for i in range(1, 8): value, isValid = pcraster.cellvalue(max2, i) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, 14.0)
def readPCRmap(v): # v : fileName or floating values if not re.match(r"[0-9.-]*$", v): PCRmap = pcr.readmap(v) else: PCRmap = pcr.scalar(float(v)) return PCRmap
def test_01(self): """ divide float by field """ raster = pcraster.readmap("abs_Expr.map") result = 1 / raster value, isValid = pcraster.cellvalue(result, 1) self.assertEqual(isValid, True) self.assertAlmostEqual(value, 0.5) value, isValid = pcraster.cellvalue(result, 2) self.assertEqual(isValid, True) self.assertAlmostEqual(value, -0.142857, 6) value, isValid = pcraster.cellvalue(result, 3) self.assertEqual(isValid, True) self.assertAlmostEqual(value, 0.285714, 6) value, isValid = pcraster.cellvalue(result, 4) self.assertEqual(isValid, True) self.assertAlmostEqual(value, -0.117647, 6) value, isValid = pcraster.cellvalue(result, 5) self.assertEqual(isValid, True) self.assertAlmostEqual(value, 0.277778, 6) value, isValid = pcraster.cellvalue(result, 6) self.assertEqual(isValid, False) value, isValid = pcraster.cellvalue(result, 7) self.assertEqual(isValid, False) value, isValid = pcraster.cellvalue(result, 8) self.assertEqual(isValid, True) self.assertAlmostEqual(value, 0.0714286, 6) value, isValid = pcraster.cellvalue(result, 9) self.assertEqual(isValid, True) self.assertAlmostEqual(value, -1.25)
def testLddRaster2Array(self): raster = pcraster.readmap("accu_Ldd.map") mv = 99 array = pcraster.pcr2numpy(raster, mv) self.assertTrue(isinstance(array[0][0], numpy.uint8)) self.assertEqual(array[0][0], 2) self.assertEqual(array[0][1], 2) self.assertEqual(array[0][2], 2) self.assertEqual(array[0][3], 1) self.assertEqual(array[0][4], 1) self.assertEqual(array[1][0], 2) self.assertEqual(array[1][1], 2) self.assertEqual(array[1][2], 1) self.assertEqual(array[1][3], 1) self.assertEqual(array[1][4], 1) self.assertEqual(array[2][0], 3) self.assertEqual(array[2][1], 2) self.assertEqual(array[2][2], 1) self.assertEqual(array[2][3], 4) self.assertEqual(array[2][4], 1) self.assertEqual(array[3][0], 3) self.assertEqual(array[3][1], 2) self.assertEqual(array[3][2], 1) self.assertEqual(array[3][3], 4) self.assertEqual(array[3][4], 4) self.assertEqual(array[4][0], 6) self.assertEqual(array[4][1], 5) self.assertEqual(array[4][2], 4) self.assertEqual(array[4][3], 4) self.assertEqual(array[4][4], 4)
def testNominalRaster2Array(self): raster = pcraster.readmap("areaarea_Class.map") mv = 99 array = pcraster.pcr2numpy(raster, mv) self.assertTrue(isinstance(array[0][0], numpy.int32)) self.assertEqual(array[0][0], 2) self.assertEqual(array[0][1], 6) self.assertEqual(array[0][2], 2) self.assertEqual(array[0][3], 2) self.assertEqual(array[0][4], mv) self.assertEqual(array[1][0], 6) self.assertEqual(array[1][1], 6) self.assertEqual(array[1][2], 2) self.assertEqual(array[1][3], 2) self.assertEqual(array[1][4], 2) self.assertEqual(array[2][0], 6) self.assertEqual(array[2][1], 6) self.assertEqual(array[2][2], 0) self.assertEqual(array[2][3], 0) self.assertEqual(array[2][4], 0) self.assertEqual(array[3][0], 6) self.assertEqual(array[3][1], 6) self.assertEqual(array[3][2], 0) self.assertEqual(array[3][3], 0) self.assertEqual(array[3][4], 0) self.assertEqual(array[4][0], 6) self.assertEqual(array[4][1], 3) self.assertEqual(array[4][2], 3) self.assertEqual(array[4][3], 4) self.assertEqual(array[4][4], 4)
def __init__(self,fileName,variable,typeStr,outputFormat,\ x0,y0,x1,y1,dx,dy,resampleRatio= 1.,attribute= None): #-requires as input ... #-read in the source file name and decide on processing tempFileName= os.path.split(fileName)[1].lower() tempFileName= 'temp_%s' % os.path.splitext(tempFileName)[0] #-set output format outputFormat= 'PCRASTER_VALUESCALE=VS_%s' % outputFormat.upper() if '.shp' in fileName: #-shape file: create empty geotiff and rasterize prior to further processing shutil.copy('empty.tif','%s.tif' % tempFileName) ## co= 'gdal_rasterize -a %s -ot %s -tr %f %f -te %f %f %f %f -l %s %s %s.tif -quiet' %\ ## (attribute,typeStr,dx,dy,x0,y0,x1,y1,os.path.split(fileName)[1].replace('.shp',''),fileName,tempFileName) co= 'gdal_rasterize -at -a %s -l %s %s %s.tif -quiet ' %\ (attribute,os.path.split(fileName)[1].replace('.shp',''),fileName,tempFileName) subprocess.check_call(co,stdout= subprocess.PIPE,stderr= subprocess.PIPE,shell= True) fileName= '%s.tif' % tempFileName #-process grid co= 'gdal_translate -ot %s -of PCRaster -mo %s %s %s.map -projwin %f %f %f %f -outsize %s %s -quiet' %\ (typeStr,outputFormat,fileName,tempFileName,\ x0,y1,x1,y0,'%.3f%s' % (100.*resampleRatio,'%'),'%.3f%s' % (100.*resampleRatio,'%')) #print co subprocess.check_call(co,stdout= subprocess.PIPE,stderr= subprocess.PIPE,shell= True) #-read resulting map setattr(self,variable,pcr.readmap('%s.map' % tempFileName)) for suffix in ['map','tif']: try: os.remove('%s.%s' %(tempFileName,suffix)) os.remove('%s.%s.aux.xml' %(tempFileName,suffix)) except: pass
def lddcreate_save( lddname, dem, force, corevolume=1e35, catchmentprecipitation=1e35, corearea=1e35, outflowdepth=1e35, ): """ Creates an ldd if a file does not exists or if the force flag is used input: - lddname (name of the ldd to create) - dem (actual dem) - force (boolean to force recreation of the ldd) - outflowdepth (set to 10.0E35 normally but smaller if needed) Output: - the LDD """ if os.path.exists(lddname) and not force: if Verbose: print(("Returning existing ldd", lddname)) return pcr.readmap(lddname) else: if Verbose: print(("Creating ldd", lddname)) LDD = pcr.lddcreate(dem, 10.0e35, outflowdepth, 10.0e35, 10.0e35) pcr.report(LDD, lddname) return LDD
def __init__(self, input_netcdf,\ output_netcdf,\ modelTime,\ tmpDir = "/dev/shm/"): DynamicModel.__init__(self) self.input_netcdf = input_netcdf self.output_netcdf = output_netcdf self.tmpDir = tmpDir self.modelTime = modelTime # set clone self.clone_map_file = self.input_netcdf['cell_area'] pcr.setclone(self.clone_map_file) self.clone = {} self.clone['cellsize'] = pcr.clone().cellSize() ; print self.clone['cellsize'] self.clone['rows'] = int(pcr.clone().nrRows()) self.clone['cols'] = int(pcr.clone().nrCols()) self.clone['xUL'] = round(pcr.clone().west(), 2) self.clone['yUL'] = round(pcr.clone().north(), 2) # cell area (unit: m2) self.cell_area = pcr.readmap(self.input_netcdf['cell_area']) # an object for netcdf reporting self.output = OutputNetcdf(self.clone, self.output_netcdf) # preparing the netcdf file and make variable: self.output.createNetCDF(self.output_netcdf['file_name'], self.output_netcdf['gross_variable_name'], self.output_netcdf['variable_unit']) self.output.addNewVariable(self.output_netcdf['file_name'], self.output_netcdf['netto_variable_name'], self.output_netcdf['variable_unit'])
def testCellValueScalar(self): pcraster.setclone("abs_Expr.map") raster = scalar(pcraster.readmap("abs_Expr.map")) value, isValid = pcraster.cellvalue(raster, 1) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, 2.0) value, isValid = pcraster.cellvalue(raster, 2) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, -7.0) value, isValid = pcraster.cellvalue(raster, 3) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, 3.5) value, isValid = pcraster.cellvalue(raster, 6) self.assertEqual(isValid, False) value, isValid = pcraster.cellvalue(raster, 7) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, 0.0) value, isValid = pcraster.cellvalue(raster, 8) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, 14.0)
def testNonSpatialConversions(self): pcraster.setclone("map2asc_PCRmap.map") nonSpatialValue = mapmaximum(pcraster.readmap("map2asc_PCRmap.map")) # Ordinal. nonSpatial = ordinal(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 124) self.assertEqual(float(nonSpatial), 124.0) # Nominal. nonSpatial = nominal(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 124) self.assertEqual(float(nonSpatial), 124) # Boolean. nonSpatial = boolean(nonSpatialValue) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 1) self.assertEqual(float(nonSpatial), 1.0) # Scalar. nonSpatial = scalar(mapmaximum("abs_Expr.map")) self.assertEqual(bool(nonSpatial), True) self.assertEqual(int(nonSpatial), 14) self.assertEqual(float(nonSpatial), 14.0)
def set_latlon_based_on_cloneMapFileName(self, cloneMapFileName): # cloneMap cloneMap = pcr.boolean(pcr.readmap(cloneMapFileName)) cloneMap = pcr.boolean(pcr.scalar(1.0)) # properties of the clone maps # - numbers of rows and colums rows = pcr.clone().nrRows() cols = pcr.clone().nrCols() # - cell size in arc minutes rounded to one value behind the decimal cellSizeInArcMin = round(pcr.clone().cellSize() * 60.0, 1) # - cell sizes in ar degrees for longitude and langitude direction deltaLon = cellSizeInArcMin / 60.0 deltaLat = deltaLon # - coordinates of the upper left corner - rounded to two values behind the decimal in order to avoid rounding errors during (future) resampling process x_min = round(pcr.clone().west(), 2) y_max = round(pcr.clone().north(), 2) # - coordinates of the lower right corner - rounded to two values behind the decimal in order to avoid rounding errors during (future) resampling process x_max = round(x_min + cols * deltaLon, 2) y_min = round(y_max - rows * deltaLat, 2) # cell centres coordinates longitudes = np.arange(x_min + deltaLon / 2.0, x_max, deltaLon) latitudes = np.arange(y_max - deltaLat / 2.0, y_min, -deltaLat) # ~ # cell centres coordinates # ~ longitudes = np.linspace(x_min + deltaLon/2., x_max - deltaLon/2., cols) # ~ latitudes = np.linspace(y_max - deltaLat/2., y_min + deltaLat/2., rows) # ~ # cell centres coordinates (latitudes and longitudes, directly from the clone maps) # ~ longitudes = np.unique(pcr.pcr2numpy(pcr.xcoordinate(cloneMap), vos.MV)) # ~ latitudes = np.unique(pcr.pcr2numpy(pcr.ycoordinate(cloneMap), vos.MV))[::-1] return longitudes, latitudes, cellSizeInArcMin
def test_8(self): """ test lookupscalar in dynamic model [bugzilla 269] """ class DummyModel(pcraster.framework.dynamicPCRasterBase.DynamicModel): def __init__(self, cloneMap): pcraster.framework.dynamicPCRasterBase.DynamicModel.__init__(self) pcraster.setclone(cloneMap) def initial(self): pass def dynamic(self): # rewrite input each timestep filename = "in.tbl" f = open(filename, "w") f.write("1 %f\n" % (2.5 * self.currentTimeStep())) f.write("2 %f\n" % (3.5 * self.currentTimeStep())) f.write("3 %f\n" % (5.5 * self.currentTimeStep())) f.close() tmp = pcraster.lookupscalar(filename, "soil.map") self.report(tmp, "tmp") myModel = DummyModel("mask.map") dynModelFw = df.DynamicFramework(myModel, lastTimeStep=5, firstTimestep=1) dynModelFw.run() for i in range(1,6): filename = pcraster.framework.frameworkBase.generateNameT("tmp", i) tmp = pcraster.readmap(filename)
def test_3(self): """ test nonspatials nominals in != with scalar raster""" raster = pcraster.readmap("abs_Expr.map") result = raster != -7 value, isValid = pcraster.cellvalue(result, 1) self.assertEqual(isValid, True) self.assertEqual(value, 1) value, isValid = pcraster.cellvalue(result, 2) self.assertEqual(isValid, True) self.assertEqual(value, 0) value, isValid = pcraster.cellvalue(result, 3) self.assertEqual(isValid, True) self.assertEqual(value, 1) value, isValid = pcraster.cellvalue(result, 4) self.assertEqual(isValid, True) self.assertEqual(value, 1) value, isValid = pcraster.cellvalue(result, 5) self.assertEqual(isValid, True) self.assertEqual(value, 1) value, isValid = pcraster.cellvalue(result, 6) self.assertEqual(isValid, False) value, isValid = pcraster.cellvalue(result, 7) self.assertEqual(isValid, True) self.assertEqual(value, 1) value, isValid = pcraster.cellvalue(result, 8) self.assertEqual(isValid, True) self.assertEqual(value, 1) value, isValid = pcraster.cellvalue(result, 9) self.assertEqual(isValid, True) self.assertEqual(value, 1)
def readDeterministic(self, name): if self._userModel()._inPremc() or self._userModel()._inPostmc() or self._userModel()._inInitial(): newName = name + ".map" else: newName = generateNameT(name, self._userModel().currentTimeStep()) import pcraster return pcraster.readmap(newName)
def readmapSave(pathtomap, default): """ Adpatation of readmap that returns a default map if the map cannot be found """ if os.path.isfile(pathtomap): return pcr.readmap(pathtomap) else: return pcr.scalar(default)
def testCopyRaster(self): raster = pcraster.readmap(os.path.join("validated","boolean_Result.map")) exceptionThrown = False try: tmp = copy.copy(raster) except Exception, e: self.assertEqual(str(e), "Shallow copy of PCRaster objects not supported\n") exceptionThrown = True
def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict): line = re.sub("\n","",line) line = re.sub("\t"," ",line) result = None # read until first comment content = "" content,sep,comment = line.partition("#") if len(content) > 1: collectionVariableName, sep, tail = content.partition(" ") if collectionVariableName == self._varName: tail = tail.strip() key, sep, variableValue = tail.rpartition(" ") if len(key.split()) != nrColumns: tmp = re.sub("\(|\)|,","",str(key)) msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" %(self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2) raise ValueError(msg) variableValue = re.sub('\"', "", variableValue) tmp = None try: tmp = int(variableValue) if self._dataType == pcraster.Boolean: tmp = pcraster.boolean(tmp) elif self._dataType == pcraster.Nominal: tmp = pcraster.nominal(tmp) elif self._dataType == pcraster.Ordinal: tmp = pcraster.ordinal(tmp) elif self._dataType == pcraster.Ldd: tmp = pcraster.ldd(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError, e: try: tmp = float(variableValue) if self._dataType == pcraster.Scalar: tmp = pcraster.scalar(tmp) elif self._dataType == pcraster.Directional: tmp = pcraster.directional(tmp) else: msg = "Conversion to %s failed" % (self._dataType) raise Exception(msg) except ValueError,e: variableValue = re.sub("\\\\","/",variableValue) variableValue = variableValue.strip() path = os.path.normpath(variableValue) try: tmp = pcraster.readmap(path) except RuntimeError, e: msg = "Error reading %s line %d, %s" %(self._fileName, lineNumber, e) raise ValueError(msg)
def test_4(self): """test self.readmap/self.report functionality""" myModel = staticTestModels.ReadmapReport() staticModelFw = sf.StaticFramework(myModel) staticModelFw.run() try: result = pcraster.readmap("static.map") self.failUnless(self.mapEquals(result, "plus.Result.map"), "test_4: %s" % ("Result and validated result are not the same")) except Exception, exception: self.failUnless(False, "test4: %s" % (str(exception)))
def testCellValueNonSpatial(self): raster = pcraster.readmap("abs_Expr.map") value, isValid = pcraster.cellvalue(pcraster.mapmaximum(raster), 1, 1) self.assertEqual(isValid, True) self.assert_(isinstance(value, types.FloatType)) self.assertEqual(value, 14.0) value, isValid = pcraster.cellvalue(pcraster.mapmaximum(raster), 1) self.assertEqual(isValid, True) self.assert_(isinstance(value, types.FloatType)) self.assertEqual(value, 14.0)
def dynamic(self): # Write state, using unique names. names = [pcraster.framework.generateNameST("dummy{}x".format(i), self.currentSampleNumber(), self.currentTimeStep()) for i in range(1, self.nr_state_variables + 1)] # Write state. for name in names: pcraster.report(self.dummy, name) # Read state. for name in names: pcraster.readmap(name) # Remove state. for name in names: os.remove(name) self.print_memory_used()
def test_7(self): """test report/readmap""" myModel = dynamicTestModels.TestReadmapReport() dynModelFw = df.DynamicFramework(myModel, 5) dynModelFw.setQuiet(True) dynModelFw.run() try: result = pcraster.readmap("static.map") self.failUnless(self.mapEquals(result, "plus.Result.map"), "test_04: %s" % ("Result and validated result are not the same")) except Exception as exception: self.failUnless(False, "test1: %s" % (str(exception))) for timestep in myModel.timeSteps(): try: name = pcraster.framework.frameworkBase.generateNameT("dyna", timestep) result = pcraster.readmap(name) self.failUnless(self.mapEquals(result, "plus.Result.map"), "test04: %s" % ("Result and validated result are not the same")) except Exception as exception: self.failUnless(False, "test1: %s" % (str(exception)))
def testCellValueNonSpatial(self): pcraster.setclone("abs_Expr.map") raster = pcraster.readmap("abs_Expr.map") value, isValid = pcraster.cellvalue(mapmaximum(raster), 1, 1) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, 14.0) value, isValid = pcraster.cellvalue(mapmaximum(raster), 1) self.assertEqual(isValid, True) self.assertTrue(isinstance(value, float)) self.assertEqual(value, 14.0)
def test_7(self): """test report/readmap""" myModel = dynamicTestModels.TestReadmapReport() dynModelFw = df.DynamicFramework(myModel, 5) dynModelFw.setQuiet(True) dynModelFw.run() try: result = pcraster.readmap("static.map") self.failUnless(self.mapEquals(result, "plus.Result.map"), "test_04: %s" % ("Result and validated result are not the same")) except Exception, exception: self.failUnless(False, "test1: %s" % (str(exception)))
def readState(self, variableName): """ Read a state variable map. """ sample = str(self.currentSampleNumber()) if re.search(".map", variableName): filename = variableName else: timestep = self.firstTimeStep() - 1 filename = frameworkBase.generateNameT(variableName, timestep) name = os.path.join(sample, "stateVar", filename) return pcraster.readmap(name)
def testCatchNoneInput(self): # all PCRasterPython bindings with an input argument should check on None object raster = pcraster.readmap("abs_Expr.map") exceptionThrown = False try: raster += None except Exception, e: msg = "right operand of operator '+': type is Python None, legal type is scalar" self.assert_(str(e).find(msg) != -1, str(e)) exceptionThrown = True
def set_up_class(cls): format_ = "pcraster" cls.bool8_random_1_raster = pcr.readmap( cls.data.dataset_pathname(format_, numpy.bool8, "random_1")) cls.int32_random_1_raster = pcr.readmap( cls.data.dataset_pathname(format_, numpy.int32, "random_1")) cls.float32_random_1_raster = pcr.readmap( cls.data.dataset_pathname(format_, numpy.float32, "random_1")) cls.float32_random_2_raster = pcr.readmap( cls.data.dataset_pathname(format_, numpy.float32, "random_2")) cls.raster_by_value_scale = { pcr.VALUESCALE.Boolean: [ cls.bool8_random_1_raster], pcr.VALUESCALE.Nominal: [ cls.int32_random_1_raster], pcr.VALUESCALE.Scalar: [ cls.float32_random_1_raster, cls.float32_random_2_raster] }
def readDeterministic(self, name): """ Read deterministic data from disk. Returns the map of the current time step from the current working directory. """ if self._userModel()._inPremc() or self._inPostmc() or self._inInitial(): newName = name + ".map" else: newName = frameworkBase.generateNameT(name, self._userModel().currentTimeStep()) import pcraster return pcraster.readmap(newName)
def readPCRmapClone(v, cloneMapFileName, tmpDir, absolutePath=None, isLddMap=False, cover=None, isNomMap=False, inputEPSG="EPSG:4326", outputEPSG="EPSG:4326", method="near"): # v: inputMapFileName or floating values # cloneMapFileName: If the inputMap and cloneMap have different clones, # resampling will be done. logger.debug('read file/values: ' + str(v)) if v == "None": PCRmap = str("None") elif not re.match(r"[0-9.-]*$", v): if absolutePath != None: v = getFullPath(v, absolutePath) # print(v) sameClone = isSameClone(v, cloneMapFileName) if sameClone == True: PCRmap = pcr.readmap(v) else: # resample using GDAL: output = tmpDir + 'temp.map' # if no re-projection needed: if inputEPSG == outputEPSG or outputEPSG == None: warp = gdalwarpPCR(v, output, cloneMapFileName, tmpDir, isLddMap, isNomMap) else: warp = gdalwarpPCR(v, output, cloneMapFileName, tmpDir, isLddMap, isNomMap, inputEPSG, outputEPSG, method) # read from temporary file and delete the temporary file: PCRmap = pcr.readmap(output) if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap) if isLddMap == True: PCRmap = pcr.ldd(PCRmap) if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) > 0., PCRmap) if isNomMap == True: PCRmap = pcr.nominal(PCRmap) if os.path.isdir(tmpDir): shutil.rmtree(tmpDir) os.makedirs(tmpDir) else: PCRmap = pcr.scalar(float(v)) if cover != None: PCRmap = pcr.cover(PCRmap, cover) co = None cOut = None err = None warp = None del co del cOut del err del warp stdout = None del stdout stderr = None del stderr return PCRmap
def main(): try: opts, args = getopt.getopt(sys.argv[1:], "fhC:N:I:s:M:", ['version']) except getopt.error as msg: usage(msg) factor = 1 Verbose = 1 inmaps = True force = False caseName = "thecase" caseNameNew = "thecase_resamp" maxcpu = 4 for o, a in opts: if o == "-C": caseName = a if o == "-N": caseNameNew = a if o == "-s": subcatch = int(a) if o == "-I": inmaps = False if o == "-h": usage() if o == "-f": force = True if o == "-M": maxcpu = int(a) if o == "--version": import wflow print("wflow version: ", wflow.__version__) sys.exit(0) dirs = [ "/intbl/", "/staticmaps/", "/intss/", "/instate/", "/outstate/", "/inmaps/", "/inmaps/clim/", "/intbl/clim/", ] ext_to_copy = ["*.tss", "*.tbl", "*.col", "*.xml"] if os.path.isdir(caseNameNew) and not force: print("Refusing to write into an existing directory:" + caseNameNew) exit() # ddir = [] dirs = [] for (path, thedirs, files) in os.walk(caseName): print(path) dirs.append(path) if not os.path.isdir(caseNameNew): for ddir in dirs: os.makedirs(ddir.replace(caseName, caseNameNew)) for inifile in glob.glob(caseName + "/*.ini"): shutil.copy(inifile, inifile.replace(caseName, caseNameNew)) # read subcatchment map x, y, subcatchmap, FillVal = readMap( os.path.join(caseName, "staticmaps", "wflow_subcatch.map"), "PCRaster") for ddir in dirs: print(ddir) allcmd = [] for mfile in glob.glob(ddir + "/*.map"): if not os.path.exists(mfile.replace(caseName, caseNameNew)): x, y, data, FillVal = readMap(mfile, "PCRaster") try: good = 1 xn, yn, datan = cutMapById(data, subcatchmap, subcatch, x, y, FillVal) except Exception as e: good = 0 print("Skipping: " + mfile + " exception: " + str(e)) if xn.size == 0: good = 0 print("Skipping: " + mfile + " size does not match...") if good: ofile = mfile.replace(caseName, caseNameNew) if data.dtype == np.int32 or data.dtype == np.uint8: writeMap(ofile, "PCRaster", xn, yn, datan.astype(np.int32), FillVal) else: writeMap(ofile, "PCRaster", xn, yn, datan, FillVal) # Assume ldd and repair if (data.dtype == np.uint8 and 'wflow_ldd.map' in mfile): myldd = pcr.ldd(pcr.readmap(ofile)) myldd = pcr.lddrepair(myldd) pcr.report(myldd, ofile) for mfile in glob.glob(ddir + "/*.[0-9][0-9][0-9]"): if not os.path.exists(mfile.replace(caseName, caseNameNew)): x, y, data, FillVal = readMap(mfile, "PCRaster") try: good = 1 xn, yn, datan = cutMapById(data, subcatchmap, subcatch, x, y, FillVal) except Exception as e: good = 0 print("Skipping: " + mfile + " exception: " + str(e)) if xn.size == 0: good = 0 print("Skipping: " + mfile + " size does not match...") if good: ofile = mfile.replace(caseName, caseNameNew) if data.dtype == np.int32 or data.dtype == np.uint8: writeMap(ofile, "PCRaster", xn, yn, datan.astype(np.int32), FillVal) else: writeMap(ofile, "PCRaster", xn, yn, datan, FillVal) for ext in ext_to_copy: for mfile in glob.glob(os.path.join(ddir, ext)): shutil.copy(mfile, mfile.replace(caseName, caseNameNew)) # Copy ini files for mfile in glob.glob(os.path.join(caseName, "*.ini")): shutil.copy(mfile, mfile.replace(caseName, caseNameNew))
output_dir = os.path.join(root_dir, 'output/waal_XL') ens_dir = os.path.join(output_dir, 'measures_ensemble03') ens_map_dir = os.path.join(ens_dir, 'maps') ens_FM_dir = os.path.join(ens_dir, 'hydro') ens_overview_dir = os.path.join(ens_dir, 'overview') scratch_dir = os.path.join(root_dir, 'scratch') clone_file = os.path.join(ref_map_dir, 'clone.map') pcr.setclone(clone_file) pcr.setglobaloption('unittrue') os.chdir(scratch_dir) ppp #%% Initialize BIOSAFE ndff_species = pd.read_pickle(os.path.join(bio_dir, 'ndff_sub_BS_13.pkl')) flpl_sections = pcr.readmap(os.path.join(bio_dir, 'flpl_sections.map')) ecotopes = measures.read_map_with_legend(os.path.join(bio_dir, 'ecotopes.map')) legalWeights, linksLaw, linksEco = bsIO.from_csv(bio_dir) speciesPresence = pd.DataFrame(np.random.randint(2, size=len(linksLaw)),\ columns=['speciesPresence'], \ index=linksLaw.index) ecotopeArea = pd.DataFrame(np.ones(82) * 1e5,\ columns = ['area_m2'],\ index = linksEco.columns.values[0:-1]) bs = biosafe.biosafe(legalWeights, linksLaw, linksEco, speciesPresence, ecotopeArea) excel_file = os.path.join(bio_dir, 'BIOSAFE_20150629.xlsx') lut1 = pd.read_excel(excel_file, sheetname='lut_RWES').fillna(method='ffill') # this lookup table has: # ecotope codes of BIOSAFE in the first column: oldEcotope
varUnit = variable_unit, \ longName = var_long_name, \ comment = varDict.comment[var_name] ) # store the pcraster map to netcdf files: for return_period in return_periods: # variable name variable_name = str( return_period) + "_of_" + varDict.netcdf_short_name[var_name] msg = "Writing " + str(variable_name) logger.info(msg) # read from pcraster files inundation_file_name = inputDirRoot + "/global/maps/" + "inun_" + str( return_period) + "_of_flood_inundation_volume_catch_04.tif.map" if map_type_name == "channel_storage.map": inundation_file_name = inputDirRoot + "/global/maps/" + "inun_" + str( return_period) + "_of_channel_storage_catch_04.tif.map" inundation_map = pcr.readmap(inundation_file_name) # put it in a data dictionary netcdf_report.data_to_netcdf(netcdf_file[var_name]['file_name'], variable_name, pcr.pcr2numpy(inundation_map, vos.MV), timeBounds, timeStamp=None, posCnt=0)
print 'This file is usually created with the CreateGrid script' sys.exit(1) else: pcr.setclone(clone_map) ds = gdal.Open(clone_map, GA_ReadOnly) clone_trans = ds.GetGeoTransform() cellsize = clone_trans[1] clone_rows = ds.RasterYSize clone_columns = ds.RasterXSize extent_mask = [ clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize, clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3] ] xmin, ymin, xmax, ymax = map(str, extent_mask) ds = None ones = pcr.scalar(pcr.readmap(clone_map)) zeros = ones * 0 empty = pcr.ifthen(ones == 0, pcr.scalar(0)) ''' read projection from mask.shp ''' # TODO: check how to deal with projections (add .prj to mask.shp in creategrid) if not os.path.exists(clone_prj): print 'please add prj-file to mask.shp' sys.exit(1) if os.path.exists(clone_shp): ds = ogr.Open(clone_shp) file_att = os.path.splitext(os.path.basename(clone_shp))[0] lyr = ds.GetLayerByName(file_att) spatialref = lyr.GetSpatialRef() if not spatialref == None: srs_clone = osr.SpatialReference() srs_clone.ImportFromWkt(spatialref.ExportToWkt())
def aggregate_return_flow_non_irrigation(pointer_array): ''' Parameters ---------- basin : TYPE scalar array DESCRIPTION. spatial unit IDs pointer_array : TYPE array (spatialunit,indexes) DESCRIPTION. for each spatial unit (rows) there is an index filter pointing at the coordinates of the spatial unit Returns timeseries of the return flows from industry and domestic uses, summed over the gridcells of each spatial units. negative values are filtered None. ''' d3 = Dataset('D:/Fate/data/nonIrrWaterConsumption_annualTot_1960to2010b.nc') consumption_non_irrigation = d3.variables['consumptive_water_use_for_non_irrigation_demand'] time3 = d3.variables['time'][:] d4 = Dataset('D:/Fate/data/industry_Withdrawal_annualTot_out_1960to2010_b.nc') withdrawal_industry = d4.variables['industry_water_withdrawal'][:] #time4 = d4.variables['time'][:] d5 = Dataset('D:/Fate/data/domesticWithdrawal_annualTot_out_1960to2010_b.nc') withdrawal_domestic = d5.variables['domestic_water_withdrawal'][:] #time5 = d5.variables['time'][:] area = pcr2numpy(readmap(Input.inputDir + '/' + Input.fn_area_map), mv = 1e20) #time3 == time 4 and time4 == time 5 ok n_spatial_unit = pointer_array.shape[0] return_flow_non_irrigation = np.full((n_spatial_unit, time3.shape[0]), 1e20) for t in range(len(time3)): temp = ( - consumption_non_irrigation[t,:,:] + withdrawal_industry[t,:,:] + withdrawal_domestic[t,:,:] )*area for k in range(n_spatial_unit): return_flow_non_irrigation[k,t] = np.sum(temp[pointer_array[k][0],pointer_array[k][1]]) return_flow_non_irrigation = ma.masked_where(isnan(return_flow_non_irrigation), return_flow_non_irrigation) return_flow_non_irrigation_filtered = np.where(return_flow_non_irrigation >= 0, return_flow_non_irrigation, 0) return_flow_non_irrigation_filtered = ma.masked_where(isnan(return_flow_non_irrigation_filtered), return_flow_non_irrigation_filtered) world = np.sum(return_flow_non_irrigation_filtered, axis = 0) fig, ax = plt.subplots() # Create a figure and an axes. ax.plot(world/1e9, label='return flow>0') # Plot some data on the axes. ax.plot(np.sum(return_flow_non_irrigation, axis = 0)/1e9, label='all values') # Plot some data on the axes. ax.set_xlabel('year') # Add an x-label to the axes. ax.set_ylabel('km3') # Add a y-label to the axes. ax.set_title("World return flows from households and industry "+Input.name_timeperiod) # Add a title to the axes. ax.legend() # Add a legend. return return_flow_non_irrigation, return_flow_non_irrigation_filtered
str_year = 2000 end_year = 2012 # calculate the average value/map of the local model msg = "Calculate the average value/map of the local model!" print(msg) # - Using the top layer of the local model local_model_folder = "/scratch-shared/edwinhs/colombia_model_results/head/l1_top/" i_month = 0 cum_map = pcr.scalar(0.0) for year in (str_year, end_year + 1, 1): for month in (1, 12 + 1, 1): i_month = i_month + 1 file_name = local_model_folder + "/head_%04i%02i" + "01_l1.idf.map" %(year, month) print(file_name) cum_map = cum_map + pcr.readmap(local_model_folder + "") average_local = cum_map / i_month pcr.aguila(average_local) #~ # calculate the average value/map of the global model #~ # - Using the upper layer of the global model #~ global_model_folder = "/scratch-shared/edwinhs/modflow_results_in_pcraster/upper_layer/regional/" #~ i_month = 0 #~ for year in (str_year, end_year + 1, 1): #~ for month in (1, 12 + 1, 1): #~ average_global = #~ #~ # calculate the anomaly value of the local model #~ anomaly_local = {} #~ #~
def read_dd_pcraster(fn, transform, nodata=255): import pcraster as pcr lddmap = pcr.readmap(str(fn)) ldd_data = pcr.pcr2numpy(lddmap, nodata) ldd = LDD(ldd_data, transform, nodata=nodata) return ldd
vos.cmd_line(cmd, using_subprocess = False) # change the working directory to the output folder os.chdir(output_folder) # clone and landmask files at low resolution (e.g. 5 arc-minutes) # - set clone map clone_map_file = landmask_map msg = "Set the pcraster clone map to : " + str(clone_map_file) logger.info(msg) pcr.setclone(clone_map_file) # - set the landmask landmask_map_file = landmask_map msg = "Set the landmask to : " + str(landmask_map_file) logger.info(msg) landmask = pcr.readmap(landmask_map_file) # read the event map (low resolution), resample, and save to the output folder msg = "Resampling the event map." logger.info(msg) date_used = chosen_date.split('-') date_time_used = datetime.date(int(date_used[0]), int(date_used[1]), int(date_used[2])) extreme_value_map = vos.netcdf2PCRobjClonePCRGLOBWB(ncFile = input_netcdf_file, \ varName = nc_variable_name, \ dateInput = date_time_used,\ useDoy = None, cloneMapFileName = clone_map_file,\ LatitudeLongitude = True,\ specificFillValue = None) # - focus only to the landmask area. We have to do this so that only flood in the landmask that will be downscaled/routed. extreme_value_map = pcr.ifthen(landmask, extreme_value_map)
rivshp = removeshp(rivshp, resultdir) catchshp = removeshp(catchshp, resultdir) ''' convert and read DEM ''' if not skipldd: dem_map = workdir + 'dem.map' if not scalefactor == None: cellsizescaled = float(cellsize) * float(scalefactor) dem_scaled = workdir + 'dem_scaled.tif' call(('gdalwarp', '-overwrite', '-s_srs', EPSG, '-t_srs', EPSG, '-tr', str(cellsizescaled), str(-cellsizescaled), '-dstnodata', str(-9999), '-r', 'cubic', dem_in, dem_scaled)) dem_in = dem_scaled call(('gdal_translate', '-of', 'PCRaster', '-a_srs', EPSG, '-ot', 'Float32', dem_in, dem_map)) dem = pcr.readmap(dem_map) lines = dem * 0 points = dem * 0 # create mask (if needed) burndem = False if not (lineshp == None and areashp == None and pointshp == None): clone_map = workdir + 'clone.map' clone = dem * 0 burn = pcr.cover(dem * 0, pcr.scalar(0)) # pcr.report(burn,'burn1.map') pcr.report(clone, clone_map) burndem = True # burn lines if not lineshp == None: file_att = os.path.splitext(os.path.basename(lineshp))[0] line_tif = workdir + 'line.tif'
def __init__(self): mcPCRasterBase.MonteCarloModel.__init__(self) staticPCRasterBase.StaticModel.__init__(self) pcraster.setclone("clone.map") self.newmap = pcraster.readmap("clone.map")
def __init__(self, clone_map_file,\ input_thickness_netcdf_file,\ input_thickness_var_name ,\ margat_aquifers,\ tmp_directory, landmask = None, arcdegree = True): object.__init__(self) # aquifer table from Margat and van der Gun self.margat_aquifers = margat_aquifers # clone map self.clone_map_file = clone_map_file self.clone_map_attr = vos.getMapAttributesALL(self.clone_map_file) if arcdegree == True: self.clone_map_attr['cellsize'] = round(self.clone_map_attr['cellsize'] * 360000.)/360000. xmin = self.clone_map_attr['xUL'] xmax = xmin + self.clone_map_attr['cols'] * self.clone_map_attr['cellsize'] ymax = self.clone_map_attr['yUL'] ymin = ymax - self.clone_map_attr['rows'] * self.clone_map_attr['cellsize'] pcr.setclone(self.clone_map_file) # temporary directory self.tmp_directory = tmp_directory # thickness approximation (unit: m, file in netcdf with variable name = average self.approx_thick = vos.netcdf2PCRobjCloneWithoutTime(input_thickness_netcdf_file,\ input_thickness_var_name,\ self.clone_map_file) # set minimum value to 0.1 mm self.approx_thick = pcr.max(0.0001, self.approx_thick) # rasterize the shape file # - # save current directory and move to temporary directory current_dir = str(os.getcwd()+"/") os.chdir(str(self.tmp_directory)) # cmd_line = 'gdal_rasterize -a MARGAT ' # layer name = MARGAT cmd_line += '-te '+str(xmin)+' '+str(ymin)+' '+str(xmax)+' '+str(ymax)+ ' ' cmd_line += '-tr '+str(self.clone_map_attr['cellsize'])+' '+str(self.clone_map_attr['cellsize'])+' ' cmd_line += str(margat_aquifers['shapefile'])+' ' cmd_line += 'tmp.tif' print(cmd_line); os.system(cmd_line) # # make it nomial cmd_line = 'pcrcalc tmp.map = "nominal(tmp.tif)"' print(cmd_line); os.system(cmd_line) # # make sure that the clone map is correct cmd_line = 'mapattr -c '+str(self.clone_map_file)+' tmp.map' print(cmd_line); os.system(cmd_line) # # read the map self.margat_aquifer_map = pcr.nominal(pcr.readmap("tmp.map")) # # clean temporary directory and return to the original directory vos.clean_tmp_dir(self.tmp_directory) os.chdir(current_dir) # extend the extent of each aquifer self.margat_aquifer_map = pcr.cover(self.margat_aquifer_map, pcr.windowmajority(self.margat_aquifer_map, 1.25)) # assign aquifer thickness, unit: m (lookuptable operation) self.margat_aquifer_thickness = pcr.lookupscalar(margat_aquifers['txt_table'], self.margat_aquifer_map) self.margat_aquifer_thickness = pcr.ifthen(self.margat_aquifer_thickness > 0., \ self.margat_aquifer_thickness) #~ pcr.report(self.margat_aquifer_thickness,"thick.map"); os.system("aguila thick.map") # aquifer map self.margat_aquifer_map = pcr.ifthen(self.margat_aquifer_thickness > 0., self.margat_aquifer_map) # looping per aquifer: cirrecting or rescaling aquifer_ids = np.unique(pcr.pcr2numpy(pcr.scalar(self.margat_aquifer_map), vos.MV)) aquifer_ids = aquifer_ids[aquifer_ids > 0] aquifer_ids = aquifer_ids[aquifer_ids < 10000] self.rescaled_thickness = None for id in aquifer_ids: rescaled_thickness = self.correction_per_aquifer(id) try: self.rescaled_thickness = pcr.cover(self.rescaled_thickness, rescaled_thickness) except: self.rescaled_thickness = rescaled_thickness # integrating ln_aquifer_thickness = self.mapFilling( pcr.ln(self.rescaled_thickness), pcr.ln(self.approx_thick) ) self.aquifer_thickness = pcr.exp(ln_aquifer_thickness) #~ pcr.report(self.aquifer_thickness,"thick.map"); os.system("aguila thick.map") # cropping only in the landmask region if landmask == None: landmask = self.clone_map_file self.landmask = pcr.defined(vos.readPCRmapClone(landmask,self.clone_map_file,self.tmp_directory)) #~ pcr.report(self.landmask,"test.map"); os.system("aguila test.map") self.aquifer_thickness = pcr.ifthen(self.landmask, self.aquifer_thickness)
def main(): # output folder clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # tmp folder tmp_folder = out_folder + "/tmp/" if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder) os.makedirs(tmp_folder) # set the clone map print("set the clone map") pcr.setclone(global_clone_map_file) # read ldd map print("define the ldd") # ~ ldd_map = pcr.readmap(global_ldd_inp_file) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(vos.readPCRmapClone(v = global_ldd_inp_file, \ cloneMapFileName = global_clone_map_file, \ tmpDir = tmp_folder, \ absolutePath = None, \ isLddMap = True, \ cover = None, \ isNomMap = False)))) # define the landmask if landmask_map_file == None: print("define the landmask based on the ldd input") # ~ landmask = pcr.defined(pcr.readmap(global_ldd_inp_file)) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) else: print("define the landmask based on the input landmask_map_file") landmask = pcr.readmap(landmask_map_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(ldd_map))) landmask = pcr.defined(ldd_map) landmask = pcr.ifthen(landmask, landmask) # save ldd files used # - global ldd cmd = "cp " + str(global_ldd_inp_file) + " ." print(cmd); os.system(cmd) # - ldd map that is used pcr.report(ldd_map, "lddmap_used.map") # make catchment map print("make catchment map") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) # read global subdomain file print("read global subdomain file") global_subdomain_map = vos.readPCRmapClone(v = global_subdomain_file, cloneMapFileName = global_clone_map_file, tmpDir = tmp_folder, absolutePath = None, isLddMap = False, cover = None, isNomMap = True) # set initial subdomain print("assign subdomains to all catchments") subdomains_initial = pcr.areamajority(global_subdomain_map, catchment_map) subdomains_initial = pcr.ifthen(landmask, subdomains_initial) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen(pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" %(str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) process_this_clone = False if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(mask_selected_boolean)), 1, 1)[0] > 0: process_this_clone = True # ~ if nr == 1: pcr.aguila(mask_selected_boolean) # - initial check value check_ok = True if process_this_clone: xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10* (ymax - ymin): check_ok = False # ~ # ignore checking # ~ check_ok = True if check_ok == True and process_this_clone == True: msg = "Clump is not needed." msg = "\n\n" +str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False and process_this_clone == True: msg = "Clump is needed." msg = "\n\n" +str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int(pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)),1)[0]) max_clump_id = int(pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)),1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" %(str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" +str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen(clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen(mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen(mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float(pcr.cellvalue(pcr.mapmaximum(pcr.scalar(pcr.defined(check_mask_selected_boolean_from_clump))),1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" +str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" +str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") print("Making the clone and landmask maps for all subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) # clone and mask folders clone_folder = out_folder + "/clone/" if os.path.exists(clone_folder): shutil.rmtree(clone_folder) os.makedirs(clone_folder) mask_folder = out_folder + "/mask/" if os.path.exists(mask_folder): shutil.rmtree(mask_folder) os.makedirs(mask_folder) print("") print("") for nr in range(1, num_of_masks + 1, 1): msg = "Processing the subdomain %s" %(str(nr)) print(msg) # set the global clone pcr.setclone(global_clone_map_file) mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) mask_selected_nominal = pcr.ifthen(subdomains_final == nr, pcr.nominal(nr)) mask_file = "mask/mask_%s.map" %(str(nr)) pcr.report(mask_selected_nominal, mask_file) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print(str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) # cellsize in arcdegree cellsize = cellsize_in_arcmin / 60. # number of rows and cols num_rows = int(round(ymax - ymin) / cellsize) num_cols = int(round(xmax - xmin) / cellsize) # make the clone map using mapattr clonemap_mask_file = "clone/clonemap_mask_%s.map" %(str(nr)) cmd = "mapattr -s -R %s -C %s -B -P yb2t -x %s -y %s -l %s %s" %(str(num_rows), str(num_cols), str(xmin), str(ymax), str(cellsize), clonemap_mask_file) print(cmd); os.system(cmd) # set the local landmask for the clump pcr.setclone(clonemap_mask_file) local_mask = vos.readPCRmapClone(v = mask_file, \ cloneMapFileName = clonemap_mask_file, tmpDir = tmp_folder, \ absolutePath = None, isLddMap = False, cover = None, isNomMap = True) local_mask_boolean = pcr.defined(local_mask) local_mask_boolean = pcr.ifthen(local_mask_boolean, local_mask_boolean) pcr.report(local_mask_boolean, mask_file) print("") print("") print("") print(num_of_masks)
#Last update - March 2016 import os import pcraster as pcr import numpy as np from pcraster import pcr2numpy,numpy2pcr #-time management staYear= 2000 endYear= 2001 staMonth= 1 endMonth= 13 #-input maps country= pcr.readmap('F:\\Dropbox\\Data\\AQURA\\maps\\country.map') cellArea= pcr.readmap('F:\\Dropbox\\Data\\AQURA\\maps\\cellarea30.map') countryLocations= pcr.readmap('F:\\Dropbox\\Data\\AQURA\\maps\\country_lat_lon.map') conv= 1e3 cntTot= int(255) MV= -9999. def valueReport(val,valName,outDir): #-return global and country values #-open text files valNameFile= '%s\\%s.txt' % (outDir,valName) if year == staYear and month == staMonth: valName= open(valNameFile,'w') else:
msr_map_dir = r'D:\Projecten\RiverScapeWaal2\output\waal_XL\measures_ensemble04\maps' pcr.setclone(os.path.join(ref_map_dir, 'clone.map')) scratch_dir = r'D:\Projecten\RiverScapeWaal2\scratch' os.chdir(scratch_dir) msr_names = next(os.walk(msr_map_dir))[1] print msr_names ppp #%% update fm based on directory with measures start_time = time.time() batch_FM(msr_fm_root, msr_names, 6) copy_fm_dirs(msr_names, ref_fm_dir, msr_fm_root) fm = read_FM(ref_fm_dir, 'ref.mdu') winbed_file = os.path.join(ref_map_dir, 'winter_bed.map') winter_bed = pcr.readmap(winbed_file) for msr_name in msr_names[:]: print msr_name msr = read_measure(os.path.join(msr_map_dir, msr_name)) fm_new = update_FM(fm, msr, winter_bed) fm_msr_dir = os.path.join(msr_fm_root, msr_name) write_FM(fm_new, fm_msr_dir, 'msr.mdu') # FM2GIS(os.path.join(msr_fm_root, msr_name), 'msr.mdu', winbed_file) end_time = time.time() print 'Processing duration in seconds: ', end_time - start_time ppp #%% extract water level lowering at the river axis for all measures def rkm_to_float(in_arr): return np.array([float(rkm.split('_')[0]) for rkm in in_arr])
def flood_tile(terrain, x_terrain, y_terrain, itile, boundaries, x_boundaries, y_boundaries, ldd=None, resistance=0.05, water_perc=None, zero_resistance_waterp=1.0, tempdir=r'd:/temp/inun', test=False, nodatavalue=-9999, dist_method='ldd'): """ function for rapid coast flood mapping based on topography and resistance :param terrain: masked 2d numpy array with elevation data :param x_terrain: 1d numpy array with x coordinates :param y_terrain: 1d numpy array with y coordinates :param boundaries: 1d numpy array water level boundary conditions :param x_boundaries: 1d numpy array with x coordinates boundary conditions :param y_boundaries: 1d numpy array with y coordinates boundary conditions :param ldd 2 numpy array with lld data if NONE an ldd is calculated based on the DEM file (default=None) :param resistance: resistance to flooding (decrease in water depth over ldd distance) [km-1] :param tempdir: directory to save temporary pcRaster clone file :param test: if True, some intermediate steps are saved and clone map is not removed :return: 2d numpy array with flood depth [m] """ # create geo-transform and get pixel coordinates of boundaries if dist_method=='ldd': create_ldd = True else: create_ldd = False # import pdb; pdb.set_trace() boundaries = np.array(boundaries).flatten() gt = cl.get_gt(x_terrain, y_terrain) ids = np.arange(1, np.size(boundaries)+1) # ids start at one! locs_px = [cl.MapToPixel(x_b, y_b, gt) for x_b, y_b in zip(x_boundaries, y_boundaries)] # pcr set clone # and calc ldd if ldd is None OR translate to pcr map if a 2d array is given print('Preprocessing dem') dem, ldd = pcr_preprocess(terrain, x_terrain, y_terrain, itile, tempdir, ldd_in=ldd, test=test, create_ldd=create_ldd) points, _ = val2pcrmap(np.shape(terrain), locs_px, ids) # cell resolution in km at latitude degree cellres_array = cl.distance_on_unit_sphere(np.array(y_terrain), np.abs(y_terrain[1]-y_terrain[0])) cellres_np = np.tile(np.array(cellres_array), (len(x_terrain), 1)).T # cellres = pcr.numpy2pcr(pcr.Scalar, cellres_np, 0) x an y axis get mixed up using this function?? # work around -> save map to disk using gdal_write and read back... # TODO: fix work around for mixing up axis by numpy2pcr fn = os.path.join(tempdir, '_{:03d}_cellres1.map'.format(itile)) gdal_writemap(fn, 'PCRaster', x_terrain, y_terrain, cellres_np, 0) cellres = pcr.readmap(fn) if not test: os.unlink(fn) if os.path.isfile(fn + '.aux.xml'): os.unlink(fn + '.aux.xml') # water_perc if water_perc is not None: fn_wperc = os.path.join(tempdir, '_{:03d}_wperc.map'.format(itile)) water_perc_np = np.copy(water_perc) gdal_writemap(fn_wperc, 'PCRaster', x_terrain, y_terrain, water_perc, -9999) water_perc = pcr.readmap(fn_wperc) # cleanup if not test: os.unlink(fn_wperc) if os.path.isfile(fn_wperc + '.aux.xml'): os.unlink(fn_wperc+'.aux.xml') # find coastal pixels ids_coastline, ids_sea = pcr_coast(dem, points) # returns 2D array with ones at coastal cells # find points that have been mapped ids_coastline_np = pcr.pcr2numpy(pcr.cover(ids_coastline, 0), 0) ids_projected = np.unique(ids_coastline_np).astype(np.int).tolist() idx_mapped = np.asarray([idx for idx, i in enumerate(ids) if i in ids_projected]) if idx_mapped.size > 0: h_bounds = boundaries[idx_mapped] ids0 = ids[idx_mapped] print('Inundating tile') # calculate flood depth with resistance along ldd from coastal pixel with boundary flood_depth, dist2coast, dem_adjust = pcr_inun(dem, ids0, h_bounds, ids_coastline, resistance, water_perc, zero_resistance_waterp, cellres, dist_method, ldd) else: print('Unable to map boundary conditions. check intermediate outputs') flood_depth = pcr.scalar(-9999) dem_adjust, dist2coast = None, None test = True # if in test mode report some intermediate steps if test: if idx_mapped.size > 0: np.savetxt(os.path.join(tempdir, '_{:03d}_bounds.csv'.format(itile)), np.stack([idx_mapped+1, x_boundaries[idx_mapped], y_boundaries[idx_mapped], h_bounds], axis=1), delimiter=",", fmt='%.4f') if dist2coast is not None: pcr.report(dist2coast * pcr.scalar(resistance), os.path.join(tempdir, '_{:03d}_resistance.map'.format(itile))) if dem_adjust is not None: pcr.report(dem_adjust, os.path.join(tempdir, '_{:03d}_dem_adjust.map'.format(itile))) if ldd is not None: pcr.report(ldd, os.path.join(tempdir, '_{:03d}_ldd.map'.format(itile))) pcr.report(pcr.ifthen(flood_depth > 0, flood_depth), os.path.join(tempdir, '_{:03d}_flood_depth.map'.format(itile))) pcr.report(pcr.nominal(pcr.ifthen(ids_coastline > 0, ids_coastline)), os.path.join(tempdir, '_{:03d}_ids_coastline.map'.format(itile))) pcr.report(pcr.nominal(pcr.ifthen(pcr.scalar(ids_sea) > 0, ids_sea)), os.path.join(tempdir, '_{:03d}_ids_sea.map'.format(itile))) pcr.report(pcr.nominal(pcr.ifthen(pcr.scalar(points) > 0, points)), os.path.join(tempdir, '_{:03d}_ids.map'.format(itile))) # translate flood depth pcraster map to numpy 2d array and return flood_depth_np = pcr.pcr2numpy(flood_depth, terrain.fill_value.item()).astype(np.float32) # 2numpy 2d array # set dem nodata and permanent water to nodata values in output # flood_depth_np[np.logical_or(flood_depth_np == terrain.fill_value.item(), water_perc_np >= float(zero_resistance_waterp))] = nodatavalue flood_depth_np[flood_depth_np == terrain.fill_value.item()] = nodatavalue flood_depth_np = np.ma.masked_equal(flood_depth_np, nodatavalue, copy=True) return flood_depth_np
cmd = "cp " + str(ini_file) + " downscaling.ini" vos.cmd_line(cmd, using_subprocess=False) # clone and landmask files at low resolution (using 5 arc-minutes) # - set clone map clone_map_file = "/projects/0/dfguu/data/hydroworld/others/05ArcMinCloneMaps/new_masks_from_top/clone_" + str( mask_code) + ".map" msg = "Set the pcraster clone map to : " + str(clone_map_file) logger.info(msg) pcr.setclone(clone_map_file) # - set the landmask landmask_map_file = "/projects/0/dfguu/data/hydroworld/others/05ArcMinCloneMaps/new_masks_from_top/mask_" + str( mask_code) + ".map" msg = "Set the landmask to : " + str(landmask_map_file) logger.info(msg) landmask = pcr.readmap(landmask_map_file) # resampling low resolution ldd map msg = "Resample the low resolution ldd map." logger.info(msg) ldd_map_low_resolution_file_name = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/lddsound_05min.map" ldd_map_low_resolution = vos.readPCRmapClone(ldd_map_low_resolution_file_name, \ clone_map_file, \ tmp_folder, \ None, True, None, False) ldd_map_low_resolution = pcr.ifthen( landmask, ldd_map_low_resolution) # NOTE THAT YOU MAY NOT HAVE TO MASK-OUT THE LDD. ldd_map_low_resolution = pcr.lddrepair(pcr.ldd(ldd_map_low_resolution)) ldd_map_low_resolution = pcr.lddrepair(ldd_map_low_resolution) pcr.report(ldd_map_low_resolution, "resampled_low_resolution_ldd.map")
def testNominal2Ordinal(self): pcraster.setclone("areaarea_Class.map") nominalMap = pcraster.readmap("areaarea_Class.map") self.assertEqual(nominalMap.dataType(), pcraster.VALUESCALE.Nominal) ordinalMap = pcraster.ordinal(nominalMap) self.assertEqual(ordinalMap.dataType(), pcraster.VALUESCALE.Ordinal)
def __init__(self): self.val = 5.3 self.field = pcraster.readmap( os.path.join("validated", "sin_Result.map"))
def testOrdinal2Nominal(self): ordinalMap = ordinal(pcraster.readmap("areaarea_Class.map")) self.assertEqual(ordinalMap.dataType(), pcraster.VALUESCALE.Ordinal) nominalMap = nominal(ordinalMap) pcraster.report(nominalMap, "nominal.map") self.assertEqual(nominalMap.dataType(), pcraster.VALUESCALE.Nominal)
if historical_results: return_periods = [ "2-year_", "5-year_", "10-year_", "25-year_", "50-year_", "100-year_", "250-year_", "500-year_", "1000-year_" ] for i_return_period in range(0, len(return_periods)): return_period = return_periods[i_return_period] # loop the file list to get the correct file for pcraster_file in input_pcraster_files: if historical_results: if os.path.basename(pcraster_file).startswith(return_period): selected_pcraster_file = pcraster_file map_for_this_return_period = pcr.readmap( selected_pcraster_file) else: if return_period in pcraster_file: selected_pcraster_file = pcraster_file map_for_this_return_period = pcr.readmap( selected_pcraster_file) print selected_pcraster_file map_for_this_return_period = pcr.cover(map_for_this_return_period, 0.0) if i_return_period > 0: check_map = pcr.ifthenelse(map_for_this_return_period >= previous_map, pcr.scalar(0.0), pcr.scalar(-1.0)) minimum_value, maximum_value, average_value = vos.getMinMaxMean( check_map)
def testNominal2Scalar(self): pcraster.setclone("areaarea_Class.map") nominalMap = pcraster.readmap("areaarea_Class.map") self.assertEqual(nominalMap.dataType(), pcraster.VALUESCALE.Nominal) scalarMap = scalar(nominalMap) self.assertEqual(scalarMap.dataType(), pcraster.VALUESCALE.Scalar)
def aggregate_return_flow_irrigation(pointer_array): ''' Parameters ---------- basin : TYPE scalar array DESCRIPTION. spatial unit IDs pointer_array : TYPE array (spatialunit,indexes) DESCRIPTION. for each spatial unit (rows) there is an index filter pointing at the coordinates of the spatial unit Returns timeseries of the return flows from irrigation, summed over the gridcells of each spatial units. negative values are filtered ------- None. ''' #irrigation retunr flows calculation----------------------------------------------------------- #we have to do it manually because the gwd file is to big to load in the memory. d1 = Dataset('D:/Fate/data/gwRecharge_monthTot_output_1960to2010_human.nc') recharge_human = d1.variables['groundwater_recharge']#monthly time_human = ma.getdata(d1.variables['time'][:]) d2 = Dataset('D:/Fate/data/gwRecharge_monthTot_output_1960to2010_natural.nc') recharge_natural = d2.variables['groundwater_recharge']#monthly. reprated times time_natural = ma.getdata(d2.variables['time'][:]) time_unique = ma.getdata(np.unique(time_natural, return_index = 1)) area = pcr2numpy(readmap(Input.inputDir + '/' + Input.fn_area_map), mv = 1e20) #aggrgeate ntime = time_human.shape[0] n_spatial_unit = pointer_array.shape[0] return_flow_irrigation = np.full((n_spatial_unit, ntime), 1e20) for t in range(len(time_unique[1])): temp = (recharge_human[t, :, :] - recharge_natural[time_unique[1][t], :, :] )*area for k in range(n_spatial_unit): return_flow_irrigation[k,t] = np.sum(temp[pointer_array[k][0],pointer_array[k][1]])#should i exclude negatives values here? return_flow_irrigation = ma.masked_where(isnan(return_flow_irrigation), return_flow_irrigation) return_flow_irrigation_yr = module.convert_month_to_year_avg(return_flow_irrigation) return_flow_irrigation_yr_filtered = np.where(return_flow_irrigation_yr >= 0, return_flow_irrigation_yr, 0) # module.new_stressor_out_netcdf(Input.outputDir + '/'+'test_net_return_flow_irrigation_'+ Input.name_timeperiod + '_' + Input.name_scale, return_flow_irrigation_yr_filtered[:-1,:] , ID, time3, 'total return flows irrigation and non irrigation', 'year', 'm3') fig, ax = plt.subplots() # Create a figure and an axes. ax.plot(np.sum(return_flow_irrigation_yr_filtered/1e9, axis = 0), label='filter>0') # Plot some data on the axes. ax.plot(np.sum(return_flow_irrigation_yr/1e9, axis = 0), label='all values') ax.set_xlabel('year') # Add an x-label to the axes. ax.set_ylabel('km3') # Add a y-label to the axes. ax.set_title("World return flows from irrigation "+Input.name_timeperiod) # Add a title to the axes. ax.legend() # Add a legend. return return_flow_irrigation, return_flow_irrigation_yr_filtered
def testScalar2Nominal(self): pcraster.setclone("abs_Expr.map") scalarMap = pcraster.readmap("abs_Expr.map") self.assertEqual(scalarMap.dataType(), pcraster.VALUESCALE.Scalar) nominalMap = nominal(scalarMap) self.assertEqual(nominalMap.dataType(), pcraster.VALUESCALE.Nominal)
msg = ' all done' logger.info(msg) print print # set the global clone map clone_map_file = "/projects/0/dfguu/users/edwinhs/data/HydroSHEDS/hydro_basin_without_lakes/integrating_ldd/version_9_december_2016/merged_ldd.map" pcr.setclone(clone_map_file) # set the landmask # - using the following landmask (defined to exclude river basins with limited output of PCR-GLOBWB / limited output of extreme value analyses) landmask_30sec_file = "/projects/0/aqueduct/users/edwinsut/data/landmasks_for_extreme_value_analysis_and_downscaling/landmask_downscaling/landmask_downscaling_30sec.map" msg = "Set the (high resolution) landmask based on the file: " + str( landmask_30sec_file) logger.info(msg) landmask_30sec = pcr.defined(pcr.readmap(landmask_30sec_file)) landmask_used = pcr.ifthen(landmask_30sec, landmask_30sec) # boolean maps to mask out permanent water bodies (lakes and reservoirs): reservoirs_30sec_file = "/projects/0/aqueduct/users/edwinsut/data/reservoirs_and_lakes_30sec/grand_reservoirs_v1_1.boolean.map" msg = "Set the (high resolution) reservoirs based on the file: " + str( reservoirs_30sec_file) logger.info(msg) reservoirs_30sec = pcr.cover(pcr.readmap(reservoirs_30sec_file), pcr.boolean(0.0)) lakes_30sec_file = "/projects/0/aqueduct/users/edwinsut/data/reservoirs_and_lakes_30sec/glwd1_lakes.boolean.map" msg = "Set the (high resolution) lakes based on the file: " + str( lakes_30sec_file) logger.info(msg) lakes_30sec = pcr.cover(pcr.readmap(lakes_30sec_file), pcr.boolean(0.0)) #
def joinMaps(inputTuple): '''Merges maps starting from an input tuple that specifies the output map name, the number of rows\ and the number rows, columns, ULL X and Y coordinates, cell length and the missing value identifer and a list of input maps''' outputFileName = inputTuple[0] nrRows = inputTuple[1] nrCols = inputTuple[2] xMin = inputTuple[3] yMax = inputTuple[4] cellLength = inputTuple[5] MV = inputTuple[6] fileNames = inputTuple[7] cloneFileName = inputTuple[8] #-echo to screen/logger msg = 'combining files for %s' % outputFileName, logger.info(msg) #-get extent xMax = xMin + nrCols * cellLength yMin = yMax - nrRows * cellLength xCoordinates = xMin + np.arange(nrCols + 1) * cellLength yCoordinates = yMin + np.arange(nrRows + 1) * cellLength yCoordinates = np.flipud(yCoordinates) msg = 'between %.2f, %.2f and %.2f, %.2f' % (xMin, yMin, xMax, yMax) logger.info(msg) #~ #-set output array #~ variableArray= np.ones((nrRows,nrCols))*MV #-set initial output aaray to zero variableArray = np.zeros((nrRows, nrCols)) * MV #-iterate over maps for fileName in fileNames: print fileName attributeClone = getMapAttributesALL(fileName) cellLengthClone = attributeClone['cellsize'] rowsClone = attributeClone['rows'] colsClone = attributeClone['cols'] xULClone = attributeClone['xUL'] yULClone = attributeClone['yUL'] # check whether both maps have the same attributes and process process, nd = checkResolution(cellLength, cellLengthClone) if process: #-get coordinates and locations sampleXMin = xULClone sampleXMax = xULClone + colsClone * cellLengthClone sampleYMin = yULClone - rowsClone * cellLengthClone sampleYMax = yULClone sampleXCoordinates = sampleXMin + np.arange(colsClone + 1) * cellLengthClone sampleYCoordinates = sampleYMin + np.arange(rowsClone + 1) * cellLengthClone sampleYCoordinates = np.flipud(sampleYCoordinates) sampleXMin = getMax(xMin, sampleXMin) sampleXMax = getMin(xMax, sampleXMax) sampleYMin = getMax(yMin, sampleYMin) sampleYMax = getMin(yMax, sampleYMax) sampleRow0 = getPosition(sampleYMin, sampleYCoordinates, nd) sampleRow1 = getPosition(sampleYMax, sampleYCoordinates, nd) sampleCol0 = getPosition(sampleXMin, sampleXCoordinates, nd) sampleCol1 = getPosition(sampleXMax, sampleXCoordinates, nd) sampleRow0, sampleRow1 = checkRowPosition(sampleRow0, sampleRow1) variableRow0 = getPosition(sampleYMin, yCoordinates, nd) variableRow1 = getPosition(sampleYMax, yCoordinates, nd) variableCol0 = getPosition(sampleXMin, xCoordinates, nd) variableCol1 = getPosition(sampleXMax, xCoordinates, nd) variableRow0, variableRow1 = checkRowPosition( variableRow0, variableRow1) #-read sample array setclone(fileName) sampleArray = pcr2numpy(readmap(fileName), MV) print sampleArray sampleNrRows, sampleNrCols = sampleArray.shape # -create mask #~ mask= (variableArray[variableRow0:variableRow1,variableCol0:variableCol1] == MV) &\ #~ (sampleArray[sampleRow0:sampleRow1,sampleCol0:sampleCol1] <> MV) mask= (variableArray[variableRow0:variableRow1,variableCol0:variableCol1] <> MV) &\ (sampleArray[sampleRow0:sampleRow1,sampleCol0:sampleCol1] <> MV) #-add values msg = ' adding values in %d, %d rows, columns from (x, y) %.3f, %.3f and %.3f, %.3f to position (row, col) %d, %d and %d, %d' %\ (sampleNrRows, sampleNrCols,sampleXMin,sampleYMin,sampleXMax,sampleYMax,variableRow0,variableCol0,variableRow1,variableCol1) logger.info(msg) #~ variableArray[variableRow0:variableRow1,variableCol0:variableCol1][mask]= \ #~ sampleArray[sampleRow0:sampleRow1,sampleCol0:sampleCol1][mask] variableArray[variableRow0:variableRow1, variableCol0:variableCol1][mask] += sampleArray[ sampleRow0:sampleRow1, sampleCol0:sampleCol1][mask] else: msg = '%s does not match resolution and is not processed' % fileName logger.warning(msg) #-report output map setclone(cloneFileName) report(numpy2pcr(Scalar, variableArray, MV), outputFileName)
def _read_set_clone(self, filename): pcraster.setclone(filename) return pcraster.readmap(filename)
#~ [email protected]:/scratch/depfg/sutan101/data/pcrglobwb2_input_release/version_2019_11_beta_extended/pcrglobwb2_input/global_05min/routing/ldd_and_cell_area$ ls -lah #~ total 188M #~ drwxr-xr-x 2 sutan101 depfg 7 Nov 12 2019 . #~ drwxr-xr-x 5 sutan101 depfg 3 Nov 11 2019 .. #~ -rwxr-xr-x 1 sutan101 depfg 36M Nov 11 2019 cellsize05min.correct.map #~ -rw-r--r-- 1 sutan101 depfg 36M Nov 14 2019 cellsize05min_correct.nc #~ -rw-r--r-- 1 sutan101 depfg 36M Nov 14 2019 cellsize05min.correct.nc #~ -rw-r--r-- 1 sutan101 depfg 129 Nov 14 2019 hydroworld_source.txt #~ -rwxr-xr-x 1 sutan101 depfg 8.9M Nov 11 2019 lddsound_05min.map #~ -rw-r--r-- 1 sutan101 depfg 36M Nov 14 2019 lddsound_05min.nc #~ -rw-r--r-- 1 sutan101 depfg 36M Nov 14 2019 lddsound_05min_unmask.nc # calculate pcrglobwb catchment area # - reading input files ldd_file_name = "/scratch/depfg/sutan101/data/pcrglobwb2_input_release/version_2019_11_beta_extended/pcrglobwb2_input/global_05min/routing/ldd_and_cell_area/lddsound_05min.map" ldd = pcr.readmap(ldd_file_name) cell_area_file_name = "/scratch/depfg/sutan101/data/pcrglobwb2_input_release/version_2019_11_beta_extended/pcrglobwb2_input/global_05min/routing/ldd_and_cell_area/cellsize05min.correct.map" cellarea = pcr.readmap(cell_area_file_name) # - calculate pcrglobwb catchment area (km2) pcrglobwb_catchment_area_km2 = pcr.catchmenttotal(cellarea, ldd) / 1e6 # loop through the table for table_line in table_lines[1:len(table_lines) + 1]: #~ for table_line in table_lines[0:len(table_lines) + 1]: #~ for table_line in table_lines[1:3]: # select one line (representing each station) and save it to a tmp file tmp_file_name = "one_line.tmp" if os.path.exists(tmp_file_name): os.remove(tmp_file_name) one_line_txt_file = open(tmp_file_name, "w") one_line_txt_file.write(table_line)
def __init__(self): dynamicPCRasterBase.DynamicModel.__init__(self) mcPCRasterBase.MonteCarloModel.__init__(self) pfPCRasterBase.ParticleFilterModel.__init__(self) pcraster.setclone("clone.map") self.newmap = pcraster.readmap("clone.map")
def main(): # output folder (and tmp folder) clean_out_folder = True if os.path.exists(out_folder): if clean_out_folder: shutil.rmtree(out_folder) os.makedirs(out_folder) else: os.makedirs(out_folder) os.chdir(out_folder) os.system("pwd") # set the clone map print("set the clone") pcr.setclone(global_ldd_30min_inp_file) # define the landmask print("define the landmask") # - based on the 30min input landmask_30min = define_landmask(input_file = global_landmask_30min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30min_only.map") # - based on the 05min input landmask_05min = define_landmask(input_file = global_landmask_05min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_05min_only.map") # - based on the 06min input landmask_06min = define_landmask(input_file = global_landmask_06min_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_06min_only.map") # - based on the 30sec input landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_30sec_only.map") # - based on the 30sec input landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\ clone_map_file = global_ldd_30min_inp_file,\ output_map_file = "landmask_03sec_only.map") # # - merge all landmasks landmask = pcr.cover(landmask_30min, landmask_05min, landmask_06min, landmask_30sec, landmask_03sec) pcr.report(landmask, "global_landmask_extended_30min.map") # ~ pcr.aguila(landmask) # extend ldd print("extend/define the ldd") ldd_map = pcr.readmap(global_ldd_30min_inp_file) ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5))) pcr.report(ldd_map, "global_ldd_extended_30min.map") # ~ pcr.aguila(ldd_map) # catchment map and size catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ pcr.aguila(catchment_size) # identify small islands print("identify small islands") # - maps of islands smaller than 15000 cells (at half arc degree resolution) island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map))) island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map) island_map = pcr.ifthen(island_size < 15000., island_map) # ~ # - use catchments (instead of islands) # ~ island_map = catchment_map # ~ island_size = catchment_size # ~ island_map = pcr.ifthen(island_size < 10000., island_map) # - sort from the largest island # -- take one cell per island as a representative island_map_rep_size = pcr.ifthen( pcr.areaorder(island_size, island_map) == 1.0, island_size) # -- sort from the largest island island_map_rep_ids = pcr.areaorder( island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(island_map_rep_size), pcr.nominal(1.0))) # -- map of smaller islands, sorted from the largest one island_map = pcr.areamajority(pcr.nominal(island_map_rep_ids), island_map) # identify the biggest island for every group of small islands within a certain window (arcdeg cells) print("the biggest island for every group of small islands") large_island_map = pcr.ifthen( pcr.scalar(island_map) == pcr.windowminimum(pcr.scalar(island_map), 15.), island_map) # ~ pcr.aguila(large_island_map) # identify big catchments print("identify large catchments") catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map)) catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map) # ~ # - identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2 # ~ large_catchment_map = pcr.ifthen(catchment_size >= 50, catchment_map) # ~ # - identify all large catchments with size >= 10 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 10, catchment_map) # ~ # - identify all large catchments with size >= 5 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 5, catchment_map) # ~ # - identify all large catchments with size >= 20 cells (at the resolution of 30 arcmin) # ~ large_catchment_map = pcr.ifthen(catchment_size >= 20, catchment_map) # - identify all large catchments with size >= 25 cells (at the resolution of 30 arcmin) large_catchment_map = pcr.ifthen(catchment_size >= 25, catchment_map) # - give the codes that are different than islands large_catchment_map = pcr.nominal( pcr.scalar(large_catchment_map) + 10. * vos.getMinMaxMean(pcr.scalar(large_island_map))[1]) # merge biggest islands and big catchments print("merge large catchments and islands") large_catchment_and_island_map = pcr.cover(large_catchment_map, large_island_map) # ~ large_catchment_and_island_map = pcr.cover(large_island_map, large_catchment_map) large_catchment_and_island_map_size = pcr.areatotal( pcr.spatial(pcr.scalar(1.0)), large_catchment_and_island_map) # - sort from the largest one # -- take one cell per island as a representative large_catchment_and_island_map_rep_size = pcr.ifthen( pcr.areaorder(large_catchment_and_island_map_size, large_catchment_and_island_map) == 1.0, large_catchment_and_island_map_size) # -- sort from the largest large_catchment_and_island_map_rep_ids = pcr.areaorder( large_catchment_and_island_map_rep_size * -1.00, pcr.ifthen(pcr.defined(large_catchment_and_island_map_rep_size), pcr.nominal(1.0))) # -- map of largest catchments and islands, sorted from the largest one large_catchment_and_island_map = pcr.areamajority( pcr.nominal(large_catchment_and_island_map_rep_ids), large_catchment_and_island_map) # ~ pcr.report(large_catchment_and_island_map, "large_catchments_and_islands.map") # ~ # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments # ~ print("spatial interpolation/extrapolation using cdo fillmiss2 to get initial subdomains") # ~ cmd = "gdal_translate -of NETCDF large_catchments_and_islands.map large_catchments_and_islands.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "cdo fillmiss2 large_catchments_and_islands.nc large_catchments_and_islands_filled.nc" # ~ print(cmd); os.system(cmd) # ~ cmd = "gdal_translate -of PCRaster large_catchments_and_islands_filled.nc large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "large_catchments_and_islands_filled.map" # ~ print(cmd); os.system(cmd) # ~ # - initial subdomains # ~ subdomains_initial = pcr.nominal(pcr.readmap("large_catchments_and_islands_filled.map")) # ~ subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) # ~ pcr.aguila(subdomains_initial) # spatial interpolation/extrapolation in order to merge the small catchments to the nearest large catchments print("spatial interpolation/extrapolation to get initial subdomains") field = large_catchment_and_island_map cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_initial = field subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map) pcr.aguila(subdomains_initial) pcr.report(subdomains_initial, "global_subdomains_30min_initial.map") print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0]))) print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0]))) # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1]))) print("Checking all subdomains, avoid too large subdomains") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1]) # clone code that will be assigned assigned_number = 0 subdomains_final = pcr.ifthen( pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0)) for nr in range(1, num_of_masks + 1, 1): msg = "Processing the landmask %s" % (str(nr)) print(msg) mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0)) # ~ if nr == 1: pcr.aguila(mask_selected_boolean) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) # ~ print(str(area_in_degree2)) # check whether the size of bounding box is ok # - initial check value check_ok = True reference_area_in_degree2 = 2500. if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False if check_ok == True: msg = "Clump is not needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) if check_ok == False: msg = "Clump is needed." msg = "\n\n" + str(msg) + "\n\n" print(msg) # make clump clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean)) # merge clumps that are close together clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0) clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) # ~ pcr.aguila(clump_ids) # minimimum and maximum values min_clump_id = int( pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0]) max_clump_id = int( pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0]) for clump_id in range(min_clump_id, max_clump_id + 1, 1): msg = "Processing the clump %s of %s from the landmask %s" % ( str(clump_id), str(max_clump_id), str(nr)) msg = "\n\n" + str(msg) + "\n\n" print(msg) # identify mask based on the clump mask_selected_boolean_from_clump = pcr.ifthen( clump_ids == pcr.nominal(clump_id), mask_selected_boolean) mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean_from_clump, mask_selected_boolean_from_clump) # check whether the clump is empty check_mask_selected_boolean_from_clump = pcr.ifthen( mask_selected_boolean, mask_selected_boolean_from_clump) check_if_empty = float( pcr.cellvalue( pcr.mapmaximum( pcr.scalar( pcr.defined( check_mask_selected_boolean_from_clump))), 1)[0]) if check_if_empty == 0.0: msg = "Map is empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) else: msg = "Map is NOT empty !" msg = "\n\n" + str(msg) + "\n\n" print(msg) # assign the clone code assigned_number = assigned_number + 1 # update global landmask for river and land mask_selected_nominal = pcr.ifthen( mask_selected_boolean_from_clump, pcr.nominal(assigned_number)) subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) # ~ # kill all aguila processes if exist # ~ os.system('killall aguila') pcr.aguila(subdomains_final) print("") print("") print("") print("The subdomain map is READY.") pcr.report(subdomains_final, "global_subdomains_30min_final.map") num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1]) print(num_of_masks) print("") print("") print("") for nr in range(1, num_of_masks + 1, 1): mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0)) xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean) area_in_degree2 = (xmax - xmin) * (ymax - ymin) print( str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin))) print("") print("") print("") print("Number of subdomains: " + str(num_of_masks)) print("") print("") print("") # spatial extrapolation in order to cover the entire map print("spatial interpolation/extrapolation to cover the entire map") field = subdomains_final cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field))) zoneID = pcr.spreadzone(cellID, 0, 1) field = pcr.areamajority(field, zoneID) subdomains_final_filled = field pcr.aguila(subdomains_final_filled) pcr.report(subdomains_final_filled, "global_subdomains_30min_final_filled.map")