Exemplo n.º 1
0
    def __init__(self, configuration, currTimeStep, initialState = None):
        self._configuration = configuration
        self._modelTime = currTimeStep
        
        pcr.setclone(configuration.cloneMap)

        # Read the ldd map.
        self.lddMap = vos.readPCRmapClone(\
                  configuration.routingOptions['lddMap'],
                  configuration.cloneMap,configuration.tmpDir,configuration.globalOptions['inputDir'],True)
        #ensure ldd map is correct, and actually of type "ldd"
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))
 
        if configuration.globalOptions['landmask'] != "None":
            self.landmask = vos.readPCRmapClone(\
            configuration.globalOptions['landmask'],
            configuration.cloneMap,configuration.tmpDir,configuration.globalOptions['inputDir'])
        else:
            self.landmask = pcr.defined(self.lddMap)
        
        # ADDED: variables necessary for 2-way coupling functions
        # ----------------------------------------------------------------------------------------------------------------
        # variable to control activation of 2-way coupling functions (can be changed through BMI)
        self.ActivateCoupling = self._configuration.globalOptions['ActivateCoupling']
        # ----------------------------------------------------------------------------------------------------------------
        
        # defining catchment areas
        self.catchment_class = 1.0
        
        # number of upperSoilLayers:
        self.numberOfSoilLayers = int(configuration.landSurfaceOptions['numberOfUpperSoilLayers'])

        self.createSubmodels(initialState)
Exemplo n.º 2
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    #logger.info('read file/values: '+str(v))
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            co = 'rm '+str(tmpDir)+'*.*'
            cOut,err = subprocess.Popen(co, stdout=subprocess.PIPE,stderr=open('/dev/null'),shell=True).communicate()
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
Exemplo n.º 3
0
    def __init__(self, configuration, currTimeStep, initialState = None):
        self._configuration = configuration
        self._modelTime = currTimeStep
        
        pcr.setclone(configuration.cloneMap)

        # Read the ldd map.
        self.lddMap = vos.readPCRmapClone(\
                  configuration.routingOptions['lddMap'],
                  configuration.cloneMap,configuration.tmpDir,configuration.globalOptions['inputDir'],True)
        #ensure ldd map is correct, and actually of type "ldd"
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))
 
        if configuration.globalOptions['landmask'] != "None":
            self.landmask = vos.readPCRmapClone(\
            configuration.globalOptions['landmask'],
            configuration.cloneMap,configuration.tmpDir,configuration.globalOptions['inputDir'])
        else:
            self.landmask = pcr.defined(self.lddMap)
        
        # defining catchment areas
        self.catchment_class = 1.0
        
        # number of upperSoilLayers:
        self.numberOfSoilLayers = int(configuration.landSurfaceOptions['numberOfUpperSoilLayers'])

        self.createSubmodels(initialState)
Exemplo n.º 4
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    print(v)
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            co = 'rm '+str(tmpDir)+'*.*'
            cOut,err = subprocess.Popen(co, stdout=subprocess.PIPE,stderr=open('/dev/null'),shell=True).communicate()
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
Exemplo n.º 5
0
    def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict):

        line = re.sub("\n", "", line)
        line = re.sub("\t", " ", line)
        result = None

        # read until first comment
        content = ""
        content, sep, comment = line.partition("#")
        if len(content) > 1:
            collectionVariableName, sep, tail = content.partition(" ")
            if collectionVariableName == self._varName:
                tail = tail.strip()
                key, sep, variableValue = tail.rpartition(" ")

                if len(key.split()) != nrColumns:
                    tmp = re.sub("\(|\)|,", "", str(key))
                    msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" % (
                        self._fileName, lineNumber, len(key.split()) + 2,
                        int(nrColumns) + 2)
                    raise ValueError(msg)

                variableValue = re.sub('\"', "", variableValue)

                tmp = None
                try:
                    tmp = int(variableValue)
                    if self._dataType == pcraster.Boolean:
                        tmp = pcraster.boolean(tmp)
                    elif self._dataType == pcraster.Nominal:
                        tmp = pcraster.nominal(tmp)
                    elif self._dataType == pcraster.Ordinal:
                        tmp = pcraster.ordinal(tmp)
                    elif self._dataType == pcraster.Ldd:
                        tmp = pcraster.ldd(tmp)
                    else:
                        msg = "Conversion to %s failed" % (self._dataType)
                        raise Exception(msg)
                except ValueError, e:
                    try:
                        tmp = float(variableValue)
                        if self._dataType == pcraster.Scalar:
                            tmp = pcraster.scalar(tmp)
                        elif self._dataType == pcraster.Directional:
                            tmp = pcraster.directional(tmp)
                        else:
                            msg = "Conversion to %s failed" % (self._dataType)
                            raise Exception(msg)

                    except ValueError, e:
                        variableValue = re.sub("\\\\", "/", variableValue)
                        variableValue = variableValue.strip()
                        path = os.path.normpath(variableValue)
                        try:
                            tmp = pcraster.readmap(path)
                        except RuntimeError, e:
                            msg = "Error reading %s line %d, %s" % (
                                self._fileName, lineNumber, e)
                            raise ValueError(msg)
Exemplo n.º 6
0
def readPCRmapClone(v,
                    cloneMapFileName,
                    tmpDir,
                    absolutePath=None,
                    isLddMap=False,
                    cover=None,
                    isNomMap=False):
    # v: inputMapFileName or floating values
    # cloneMapFileName: If the inputMap and cloneMap have different clones,
    #                   resampling will be done.
    logger.debug('read file/values: ' + str(v))
    if v == "None":
        #~ PCRmap = str("None")
        PCRmap = None  # 29 July: I made an experiment by changing the type of this object.
    elif not re.match(r"[0-9.-]*$", v):
        if absolutePath != None: v = getFullPath(v, absolutePath)
        # print v
        # print cloneMapFileName
        sameClone = isSameClone(v, cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir + 'temp.map'
            warp = gdalwarpPCR(v, output, cloneMapFileName, tmpDir, isLddMap,
                               isNomMap)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True:
                PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True:
                PCRmap = pcr.ifthen(pcr.scalar(PCRmap) > 0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            if os.path.isdir(tmpDir):
                shutil.rmtree(tmpDir)
            os.makedirs(tmpDir)
    else:
        PCRmap = pcr.spatial(pcr.scalar(float(v)))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None
    cOut = None
    err = None
    warp = None
    del co
    del cOut
    del err
    del warp
    stdout = None
    del stdout
    stderr = None
    del stderr

    # SM: revisit this
    PCRmap = pcr.pcr2numpy(PCRmap, np.nan)

    return PCRmap
Exemplo n.º 7
0
    def __init__(self, iniItems, landmask, Dir, cloneMap, tmpDir):
        object.__init__(self)

        # clone map file names, temporary directory and global/absolute path of input directory
        self.cloneMap = cloneMap  #iniItems.cloneMap
        self.tmpDir = tmpDir  #iniItems.tmpDir
        self.inputDir = Dir  #iniItems.globalOptions['inputDir']
        self.landmask = landmask

        # local drainage direction:
        self.lddMap = vos.readPCRmapClone(
            iniItems.get("routingOptions", "lddMap"), self.cloneMap,
            self.tmpDir, self.inputDir, True)
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))
        self.lddMap = pcr.lddrepair(self.lddMap)

        # option to activate water balance check
        self.debugWaterBalance = True
        if configget(iniItems, "routingOptions", "debugWaterBalance",
                     "True") == "False":
            self.debugWaterBalance = False

        # option to perform a run with only natural lakes (without reservoirs)
        self.onlyNaturalWaterBodies = False
        if "onlyNaturalWaterBodies" in iniItems._sections[
                'routingOptions'] and configget(iniItems, "routingOptions",
                                                "onlyNaturalWaterBodies",
                                                "False") == "True":
            logger.info(
                "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes."
            )
            self.onlyNaturalWaterBodies = True
            self.dateForNaturalCondition = "1900-01-01"  # The run for a natural condition should access only this date.

        # names of files containing water bodies parameters
        if configget(iniItems, "routingOptions", "waterBodyInputNC",
                     "None") == str(None):
            self.useNetCDF = False
            self.fracWaterInp = iniItems.get("routingOptions", "fracWaterInp")
            self.waterBodyIdsInp = iniItems.get("routingOptions",
                                                "waterBodyIds")
            self.waterBodyTypInp = iniItems.get("routingOptions",
                                                "waterBodyTyp")
            self.resMaxCapInp = iniItems.get("routingOptions", "resMaxCapInp")
            self.resSfAreaInp = iniItems.get("routingOptions", "resSfAreaInp")
        else:
            self.useNetCDF = True
            self.ncFileInp       = vos.getFullPath(\
                                   iniItems.get("routingOptions","waterBodyInputNC"),\
                                   self.inputDir)

        # minimum width (m) used in the weir formula  # TODO: define minWeirWidth based on the GLWD, GRanD database and/or bankfull discharge formula
        self.minWeirWidth = 10.

        # lower and upper limits at which reservoir release is terminated and
        #                        at which reservoir release is equal to long-term average outflow
        self.minResvrFrac = 0.10
        self.maxResvrFrac = 0.75
Exemplo n.º 8
0
 def initial(self):
     #####################
     # * initial section #
     #####################
     #-constants
     # betaQ [-]: constant of kinematic wave momentum equation
     self.betaQ = 0.6
     #-channel LDD
     self.channelLDD= pcr.ifthenelse(self.waterBodies.distribution != 0,\
      pcr.ldd(5),self.LDD)
     #-channel area and storage
     self.channelArea = self.channelWidth * self.channelLength
     self.channelStorageCapacity= pcr.ifthenelse(self.waterBodies.distribution == 0,\
      self.channelArea*self.channelDepth,pcr.scalar(0.))
     #-basin outlets
     self.basinOutlet = pcr.pit(self.LDD) != 0
     #-read initial conditions
     self.Q = clippedRead.get(self.QIniMap)
     self.actualStorage = clippedRead.get(self.actualStorageIniMap)
     self.actualStorage= pcr.ifthenelse(self.waterBodies.distribution != 0,\
      pcr.ifthenelse(self.waterBodies.location != 0,\
       pcr.areatotal(self.actualStorage,self.waterBodies.distribution),0),\
        self.actualStorage)
     self.waterBodies.actualStorage = self.waterBodies.retrieveMapValue(
         self.actualStorage)
     #-update targets of average and bankful discharge
     self.waterBodies.averageQ = self.waterBodies.retrieveMapValue(
         self.averageQ)
     self.waterBodies.bankfulQ = self.waterBodies.retrieveMapValue(
         self.bankfulQ)
     #-return the parameters for the kinematic wave,
     # including alpha, wetted area, flood fraction, flood volume and depth
     # and the corresponding land area
     floodedFraction,floodedDepth,\
      self.wettedArea,self.alphaQ= self.kinAlphaComposite(self.actualStorage,self.floodplainMask)
     self.wettedArea= self.waterBodies.returnMapValue(self.wettedArea,\
      self.waterBodies.channelWidth+2.*self.waterBodies.updateWaterHeight())
     self.waterFraction= pcr.ifthenelse(self.waterBodies.distribution == 0,\
      pcr.max(self.waterFractionMask,floodedFraction),self.waterFractionMask)
     self.landFraction = pcr.max(0., 1. - self.waterFraction)
     #-update on velocity and check on Q - NOTE: does not work in case of reservoirs!
     self.flowVelocity = pcr.ifthenelse(self.wettedArea > 0,
                                        self.Q / self.wettedArea, 0.)
     pcr.report(
         self.flowVelocity,
         pcrm.generateNameT(flowVelocityFileName,
                            0).replace('.000', '.ini'))
     #-setting initial values for specific runoff and surface water extraction
     self.landSurfaceQ = pcr.scalar(0.)
     self.potWaterSurfaceQ = pcr.scalar(0.)
     self.surfaceWaterExtraction = pcr.scalar(0.)
     #-budget check: setting initial values for cumulative discharge and
     # net cumulative input, including initial storage [m3]
     self.totalDischarge = pcr.scalar(0.)
     self.cumulativeDeltaStorage = pcr.catchmenttotal(
         self.actualStorage, self.LDD)
Exemplo n.º 9
0
  def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict):

    line = re.sub("\n","",line)
    line = re.sub("\t"," ",line)
    result = None

    # read until first comment
    content = ""
    content,sep,comment = line.partition("#")
    if len(content) > 1:
      collectionVariableName, sep, tail = content.partition(" ")
      if collectionVariableName == self._varName:
        tail = tail.strip()
        key, sep, variableValue = tail.rpartition(" ")

        if len(key.split()) != nrColumns:
          tmp = re.sub("\(|\)|,","",str(key))
          msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" %(self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2)
          raise ValueError(msg)

        variableValue = re.sub('\"', "", variableValue)

        tmp = None
        try:
          tmp = int(variableValue)
          if self._dataType == pcraster.Boolean:
            tmp = pcraster.boolean(tmp)
          elif self._dataType == pcraster.Nominal:
            tmp = pcraster.nominal(tmp)
          elif self._dataType == pcraster.Ordinal:
            tmp = pcraster.ordinal(tmp)
          elif self._dataType == pcraster.Ldd:
            tmp = pcraster.ldd(tmp)
          else:
            msg = "Conversion to %s failed" % (self._dataType)
            raise Exception(msg)
        except ValueError, e:
          try:
            tmp = float(variableValue)
            if self._dataType == pcraster.Scalar:
              tmp = pcraster.scalar(tmp)
            elif self._dataType == pcraster.Directional:
              tmp = pcraster.directional(tmp)
            else:
              msg = "Conversion to %s failed" % (self._dataType)
              raise Exception(msg)

          except ValueError,e:
            variableValue = re.sub("\\\\","/",variableValue)
            variableValue = variableValue.strip()
            path = os.path.normpath(variableValue)
            try:
              tmp = pcraster.readmap(path)
            except RuntimeError, e:
              msg = "Error reading %s line %d, %s" %(self._fileName, lineNumber, e)
              raise ValueError(msg)
	def initial(self):
		#####################
		# * initial section #
		#####################
		#-constants
		# betaQ [-]: constant of kinematic wave momentum equation
		self.betaQ= 0.6
		#-channel LDD
		self.channelLDD= pcr.ifthenelse(self.waterBodies.distribution != 0,\
			pcr.ldd(5),self.LDD)
		#-channel area and storage
		self.channelArea= self.channelWidth*self.channelLength
		self.channelStorageCapacity= pcr.ifthenelse(self.waterBodies.distribution == 0,\
			self.channelArea*self.channelDepth,pcr.scalar(0.))
		#-basin outlets
		self.basinOutlet= pcr.pit(self.LDD) != 0
		#-read initial conditions
		self.Q= clippedRead.get(self.QIniMap)
		self.actualStorage= clippedRead.get(self.actualStorageIniMap)
		self.actualStorage= pcr.ifthenelse(self.waterBodies.distribution != 0,\
			pcr.ifthenelse(self.waterBodies.location != 0,\
				pcr.areatotal(self.actualStorage,self.waterBodies.distribution),0),\
					self.actualStorage)   
		self.waterBodies.actualStorage= self.waterBodies.retrieveMapValue(self.actualStorage)
		#-update targets of average and bankful discharge
		self.waterBodies.averageQ= self.waterBodies.retrieveMapValue(self.averageQ)
		self.waterBodies.bankfulQ= self.waterBodies.retrieveMapValue(self.bankfulQ)
		#-return the parameters for the kinematic wave,
		# including alpha, wetted area, flood fraction, flood volume and depth
		# and the corresponding land area
		floodedFraction,floodedDepth,\
			self.wettedArea,self.alphaQ= self.kinAlphaComposite(self.actualStorage,self.floodplainMask)
		self.wettedArea= self.waterBodies.returnMapValue(self.wettedArea,\
			self.waterBodies.channelWidth+2.*self.waterBodies.updateWaterHeight())
		self.waterFraction= pcr.ifthenelse(self.waterBodies.distribution == 0,\
			pcr.max(self.waterFractionMask,floodedFraction),self.waterFractionMask)
		self.landFraction= pcr.max(0.,1.-self.waterFraction)
		#-update on velocity and check on Q - NOTE: does not work in case of reservoirs!
		self.flowVelocity= pcr.ifthenelse(self.wettedArea > 0,self.Q/self.wettedArea,0.)
		pcr.report(self.flowVelocity,pcrm.generateNameT(flowVelocityFileName,0).replace('.000','.ini'))
		#-setting initial values for specific runoff and surface water extraction
		self.landSurfaceQ= pcr.scalar(0.)
		self.potWaterSurfaceQ= pcr.scalar(0.)
		self.surfaceWaterExtraction= pcr.scalar(0.)
		#-budget check: setting initial values for cumulative discharge and 
		# net cumulative input, including initial storage [m3]   
		self.totalDischarge= pcr.scalar(0.)
		self.cumulativeDeltaStorage= pcr.catchmenttotal(self.actualStorage,self.LDD)
Exemplo n.º 11
0
 def testNotEqualsLdd(self):
     pcraster.setclone("accu_Ldd.map")
     ldd = pcraster.readmap("accu_Ldd.map")
     nonSpatial = pcraster._pcraster._newNonSpatialField(5)
     # we need to explicitly cast PODs to ldd (or directional)
     # when using the multicore module
     #raster = mcop.pcrmcNE("accu_Ldd.map", 5)
     raster = mcop.pcrmcNE("accu_Ldd.map", pcraster.ldd(5))
     warnings.warn("Difference between pcraster and multicore module...")
     value, isValid = pcraster.cellvalue(raster, 1)
     self.assertEqual(isValid, True)
     self.assertEqual(value, True)
     value, isValid = pcraster.cellvalue(raster, 22)
     self.assertEqual(isValid, True)
     self.assertEqual(value, False)
     value, isValid = pcraster.cellvalue(raster, 25)
     self.assertEqual(isValid, True)
     self.assertEqual(value, True)
Exemplo n.º 12
0
 def testNotEqualsLdd(self):
   pcraster.setclone("accu_Ldd.map")
   ldd = pcraster.readmap("accu_Ldd.map")
   nonSpatial = pcraster._pcraster._newNonSpatialField(5)
   # we need to explicitly cast PODs to ldd (or directional)
   # when using the multicore module
   #raster = mcop.pcrmcNE("accu_Ldd.map", 5)
   raster = mcop.pcrmcNE("accu_Ldd.map", pcraster.ldd(5))
   warnings.warn("Difference between pcraster and multicore module...")
   value, isValid = pcraster.cellvalue(raster, 1)
   self.assertEqual(isValid, True)
   self.assertEqual(value, True)
   value, isValid = pcraster.cellvalue(raster, 22)
   self.assertEqual(isValid, True)
   self.assertEqual(value, False)
   value, isValid = pcraster.cellvalue(raster, 25)
   self.assertEqual(isValid, True)
   self.assertEqual(value, True)
Exemplo n.º 13
0
    def __init__(self, configuration, currTimeStep, initialState=None):
        self._configuration = configuration
        self._modelTime = currTimeStep

        pcr.setclone(configuration.cloneMap)

        # Read the ldd map.
        self.lddMap = vos.readPCRmapClone(\
                  configuration.routingOptions['lddMap'],
                  configuration.cloneMap,configuration.tmpDir,configuration.globalOptions['inputDir'],True)
        #ensure ldd map is correct, and actually of type "ldd"
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))

        if configuration.globalOptions['landmask'] != "None":
            self.landmask = vos.readPCRmapClone(\
            configuration.globalOptions['landmask'],
            configuration.cloneMap,configuration.tmpDir,configuration.globalOptions['inputDir'])
        else:
            self.landmask = pcr.defined(self.lddMap)

        # defining catchment areas
        self.catchment_class = 1.0

        # number of upperSoilLayers:
        self.numberOfSoilLayers = int(
            configuration.landSurfaceOptions['numberOfUpperSoilLayers'])

        # preparing sub-modules
        self.createSubmodels(initialState)

        # option for debugging to PCR-GLOBWB version 1.0
        self.debug_to_version_one = False
        if configuration.debug_to_version_one: self.debug_to_version_one = True
        if self.debug_to_version_one:

            # preparing initial folder directory
            self.directory_for_initial_maps = vos.getFullPath(
                "initials/", self.configuration.mapsDir)
            if os.path.exists(self.directory_for_initial_maps):
                shutil.rmtree(self.directory_for_initial_maps)
            os.makedirs(self.directory_for_initial_maps)

            # dump the initial state
            self.dumpState(self.directory_for_initial_maps, "initial")
Exemplo n.º 14
0
  def testDeepCopyRasterNonSpatial(self):
    pcraster.setclone("validated/boolean_Result.map")

    raster = pcraster.boolean(1)
    tmp = copy.deepcopy(raster)
    self.assertEqual(True, self.arbitraryMapEquals(raster, tmp))
    raster1 = pcraster.nominal(1)
    tmp1 = copy.deepcopy(raster1)
    self.assertEqual(True, self.arbitraryMapEquals(raster1, tmp1))
    raster2 = pcraster.ordinal(1)
    tmp2 = copy.deepcopy(raster2)
    self.assertEqual(True, self.arbitraryMapEquals(raster2, tmp2))
    raster3 = pcraster.scalar(1)
    tmp3 = copy.deepcopy(raster3)
    self.assertEqual(True, self.arbitraryMapEquals(raster3, tmp3))
    raster4 = pcraster.directional(1)
    tmp4 = copy.deepcopy(raster4)
    self.assertEqual(True, self.arbitraryMapEquals(raster4, tmp4))
    raster5 = pcraster.ldd(1)
    tmp5 = copy.deepcopy(raster5)
    self.assertEqual(True, self.arbitraryMapEquals(raster5, tmp5))
Exemplo n.º 15
0
    def testDeepCopyRasterNonSpatial(self):
        pcraster.setclone("validated/boolean_Result.map")

        raster = pcraster.boolean(1)
        tmp = copy.deepcopy(raster)
        self.assertEqual(True, self.arbitraryMapEquals(raster, tmp))
        raster1 = pcraster.nominal(1)
        tmp1 = copy.deepcopy(raster1)
        self.assertEqual(True, self.arbitraryMapEquals(raster1, tmp1))
        raster2 = pcraster.ordinal(1)
        tmp2 = copy.deepcopy(raster2)
        self.assertEqual(True, self.arbitraryMapEquals(raster2, tmp2))
        raster3 = pcraster.scalar(1)
        tmp3 = copy.deepcopy(raster3)
        self.assertEqual(True, self.arbitraryMapEquals(raster3, tmp3))
        raster4 = pcraster.directional(1)
        tmp4 = copy.deepcopy(raster4)
        self.assertEqual(True, self.arbitraryMapEquals(raster4, tmp4))
        raster5 = pcraster.ldd(1)
        tmp5 = copy.deepcopy(raster5)
        self.assertEqual(True, self.arbitraryMapEquals(raster5, tmp5))
Exemplo n.º 16
0
def pcr_preprocess(dem_in, x, y, itile, tempdir, ldd_in=None,
                    test=False, create_ldd=True):
    """
    function to set pcr clone and translate DEM (terrain) and ldd numpy 2d arrays to pcr maps

    :param terrain:     masked numpy 2d array with elevation data
    :param x:           numpy 1d array with x coordinates of elevation grid
    :param y:           numpy 1d array with Y coordinates of elevation grid
    :param tempdir:     string with directory to temporary save clone pcrmap
    :param ldd:         numpy 2d array with ldd grid, make sure it uses the pcrmap definition of ldd
    :param test:        if True do not remove clone maps

    :return:            pcr maps for dem and ldd
    """
    # create clone in temp_dir
    fn_clone = os.path.join(tempdir, '_{:03d}_dem.map'.format(itile))
    cl.makeDir(tempdir)  # make dir if not exist
    # DEM
    gdal_writemap(fn_clone, 'PCRaster', x, y, dem_in, -9999)  # note: missing value needs conversion in python item
    pcr.setclone(fn_clone)
    pcr.setglobaloption("unitcell")
    dem = pcr.readmap(fn_clone)
    # cleanup
    if not test:
        os.unlink(fn_clone)  # cleanup clone file
    os.unlink(fn_clone+'.aux.xml')
    # LDD
    if create_ldd:
        if ldd_in is None:
            print('Calculating LDD')
            ldd = pcr.lddcreate(dem, 1E31, 1E31, 1E31, 1E31)
        else:
            # TODO note that np.nan is default NoDataValue for ldd file. check this when reading data
            # TODO: x and y axis got mixed up when translating with numpy2pcr. check if it works here!
            # in process_tile function in coastal_inun.py
            ldd = pcr.lddrepair(pcr.ldd(pcr.numpy2pcr(pcr.Ldd, ldd_in, np.nan)))
    else:
        ldd = None
    return dem, ldd
Exemplo n.º 17
0
    def __init__(self, configuration, currTimeStep):
        self._configuration = configuration
        self._modelTime = currTimeStep

        pcr.setclone(configuration.cloneMap)

        # read the ldd map
        self.lddMap = vos.netcdf2PCRobjCloneWithoutTime(configuration.modflowParameterOptions['channelNC'],'lddMap',\
                                                        configuration.cloneMap)
        # ensure ldd map is correct, and actually of type "ldd"
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))

        # defining the landmask map
        if configuration.globalOptions['landmask'] != "None":
            self.landmask = vos.readPCRmapClone(\
            configuration.globalOptions['landmask'],
            configuration.cloneMap,configuration.tmpDir,configuration.globalOptions['inputDir'])
        else:
            self.landmask = pcr.defined(self.lddMap)

        # preparing the sub-model(s)         - Currently, there is only one sub-model.
        self.createSubmodels()
Exemplo n.º 18
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False,inputEPSG="EPSG:4326",outputEPSG="EPSG:4326",method="near"):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    logger.debug('read file/values: '+str(v))
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            # if no re-projection needed:
            if inputEPSG == outputEPSG or outputEPSG == None: 
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            else:
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap,inputEPSG,outputEPSG,method)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            if os.path.isdir(tmpDir):
                shutil.rmtree(tmpDir)
            os.makedirs(tmpDir)
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
Exemplo n.º 19
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False,inputEPSG="EPSG:4326",outputEPSG="EPSG:4326",method="near"):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    logger.debug('read file/values: '+str(v))
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            # if no re-projection needed:
            if inputEPSG == outputEPSG or outputEPSG == None: 
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            else:
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap,inputEPSG,outputEPSG,method)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            if os.path.isdir(tmpDir):
                shutil.rmtree(tmpDir)
            os.makedirs(tmpDir)
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
Exemplo n.º 20
0
def main():
    ### Read input arguments #####
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option('-q', '--quiet',
                      dest='verbose', default=True, action='store_false',
                      help='do not print status messages to stdout')
    parser.add_option('-i', '--ini', dest='inifile',
                      default='hand_contour_inun.ini', nargs=1,
                      help='ini configuration file')
    parser.add_option('-f', '--flood_map',
                      nargs=1, dest='flood_map',
                      help='Flood map file (NetCDF point time series file')
    parser.add_option('-v', '--flood_variable',
                      nargs=1, dest='flood_variable',
                      default='water_level',
                      help='variable name of flood water level')
    parser.add_option('-b', '--bankfull_map',
                      dest='bankfull_map', default='',
                      help='Map containing bank full level (is subtracted from flood map, in NetCDF)')
    parser.add_option('-c', '--catchment',
                      dest='catchment_strahler', default=7, type='int',
                      help='Strahler order threshold >= are selected as catchment boundaries')
    parser.add_option('-s', '--hand_strahler',
                      dest='hand_strahler', default=7, type='int',
                      help='Strahler order threshold >= selected as riverine')
    parser.add_option('-d', '--destination',
                      dest='dest_path', default='inun',
                      help='Destination path')
    (options, args) = parser.parse_args()

    if not os.path.exists(options.inifile):
        print 'path to ini file cannot be found'
        sys.exit(1)
    options.dest_path = os.path.abspath(options.dest_path)

    if not(os.path.isdir(options.dest_path)):
        os.makedirs(options.dest_path)

    # set up the logger
    flood_name = os.path.split(options.flood_map)[1].split('.')[0]
    case_name = 'inun_{:s}_hand_{:02d}_catch_{:02d}'.format(flood_name, options.hand_strahler, options.catchment_strahler)
    logfilename = os.path.join(options.dest_path, 'hand_contour_inun.log')
    logger, ch = inun_lib.setlogger(logfilename, 'HAND_INUN', options.verbose)
    logger.info('$Id: $')
    logger.info('Flood map: {:s}'.format(options.flood_map))
    logger.info('Bank full map: {:s}'.format(options.bankfull_map))
    logger.info('Destination path: {:s}'.format(options.dest_path))
    # read out ini file
    ### READ CONFIG FILE
    # open config-file
    config = inun_lib.open_conf(options.inifile)
    
    # read settings
    options.dem_file = inun_lib.configget(config, 'maps',
                                  'dem_file',
                                  True)
    options.ldd_file = inun_lib.configget(config, 'maps',
                                'ldd_file',
                                 True)
    options.stream_file = inun_lib.configget(config, 'maps',
                                'stream_file',
                                 True)
    options.riv_length_file = inun_lib.configget(config, 'maps',
                                'riv_length_file',
                                 True)
    options.riv_width_file = inun_lib.configget(config, 'maps',
                                'riv_width_file',
                                 True)
    options.file_format = inun_lib.configget(config, 'maps',
                                'file_format', 0, datatype='int')
    options.x_tile = inun_lib.configget(config, 'tiling',
                                  'x_tile', 10000, datatype='int')
    options.y_tile = inun_lib.configget(config, 'tiling',
                                  'y_tile', 10000, datatype='int')
    options.x_overlap = inun_lib.configget(config, 'tiling',
                                  'x_overlap', 1000, datatype='int')
    options.y_overlap = inun_lib.configget(config, 'tiling',
                                  'y_overlap', 1000, datatype='int')
    options.iterations = inun_lib.configget(config, 'inundation',
                                  'iterations', 20, datatype='int')
    options.initial_level = inun_lib.configget(config, 'inundation',
                                  'initial_level', 32., datatype='float')
    options.area_multiplier = inun_lib.configget(config, 'inundation',
                                  'area_multiplier', 1., datatype='float')
    logger.info('DEM file: {:s}'.format(options.dem_file))
    logger.info('LDD file: {:s}'.format(options.ldd_file))
    logger.info('Columns per tile: {:d}'.format(options.x_tile))
    logger.info('Rows per tile: {:d}'.format(options.y_tile))
    logger.info('Columns overlap: {:d}'.format(options.x_overlap))
    logger.info('Rows overlap: {:d}'.format(options.y_overlap))
    metadata_global = {}
    # add metadata from the section [metadata]
    meta_keys = config.options('metadata_global')
    for key in meta_keys:
        metadata_global[key] = config.get('metadata_global', key)
    # add a number of metadata variables that are mandatory
    metadata_global['config_file'] = os.path.abspath(options.inifile)
    metadata_var = {}
    metadata_var['units'] = 'm'
    metadata_var['standard_name'] = 'water_surface_height_above_reference_datum'
    metadata_var['long_name'] = 'Coastal flooding'
    metadata_var['comment'] = 'water_surface_reference_datum_altitude is given in file {:s}'.format(options.dem_file)
    if not os.path.exists(options.dem_file):
        logger.error('path to dem file {:s} cannot be found'.format(options.dem_file))
        sys.exit(1)
    if not os.path.exists(options.ldd_file):
        logger.error('path to ldd file {:s} cannot be found'.format(options.ldd_file))
        sys.exit(1)

    # Read extent from a GDAL compatible file
    try:
        extent = inun_lib.get_gdal_extent(options.dem_file)
    except:
        msg = 'Input file {:s} not a gdal compatible file'.format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    try:
        x, y = inun_lib.get_gdal_axes(options.dem_file, logging=logger)
        srs = inun_lib.get_gdal_projection(options.dem_file, logging=logger)
    except:
        msg = 'Input file {:s} not a gdal compatible file'.format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    # read history from flood file
    if options.file_format == 0:
        a = nc.Dataset(options.flood_map, 'r')
        metadata_global['history'] = 'Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}'.format(os.path.abspath(options.flood_map), a.history)
        a.close()
    else:
        metadata_global['history'] = 'Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}'.format(os.path.abspath(options.flood_map), 'PCRaster file, no history')

    # first write subcatch maps and hand maps
    ############### TODO ######
    # setup a HAND file
    dem_name = os.path.split(options.dem_file)[1].split('.')[0]
    hand_file = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif'.format(dem_name, options.hand_strahler))
    if not(os.path.isfile(hand_file)):
    # hand file does not exist yet! Generate it, otherwise skip!
        logger.info('HAND file {:s} setting up...please wait...'.format(hand_file))
        hand_file_tmp = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif.tmp'.format(dem_name, options.hand_strahler))
        ds_hand = inun_lib.prepare_gdal(hand_file_tmp, x, y, logging=logger, srs=srs)
        band_hand = ds_hand.GetRasterBand(1)

        # Open terrain data for reading
        ds_dem, rasterband_dem = inun_lib.get_gdal_rasterband(options.dem_file)
        ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
        ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)
        n = 0
        for x_loop in range(0, len(x), options.x_tile):
            x_start = np.maximum(x_loop, 0)
            x_end = np.minimum(x_loop + options.x_tile, len(x))
            # determine actual overlap for cutting
            for y_loop in range(0, len(y), options.y_tile):
                x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
                x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
                n += 1
                # print('tile {:001d}:'.format(n))
                y_start = np.maximum(y_loop, 0)
                y_end = np.minimum(y_loop + options.y_tile, len(y))
                y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
                y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
                # cut out DEM
                logger.debug('Computing HAND for xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}'.format(x_start, x_end,y_start, y_end))
                terrain = rasterband_dem.ReadAsArray(x_start - x_overlap_min,
                                                     y_start - y_overlap_min,
                                                     (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                     (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                     )

                drainage = rasterband_ldd.ReadAsArray(x_start - x_overlap_min,
                                                     y_start - y_overlap_min,
                                                     (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                     (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                     )
                stream = rasterband_stream.ReadAsArray(x_start - x_overlap_min,
                                                       y_start - y_overlap_min,
                                                       (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                       (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                       )
                # write to temporary file
                terrain_temp_file = os.path.join(options.dest_path, 'terrain_temp.map')
                drainage_temp_file = os.path.join(options.dest_path, 'drainage_temp.map')
                stream_temp_file = os.path.join(options.dest_path, 'stream_temp.map')
                if rasterband_dem.GetNoDataValue() is not None:
                    inun_lib.gdal_writemap(terrain_temp_file, 'PCRaster',
                                      np.arange(0, terrain.shape[1]),
                                      np.arange(0, terrain.shape[0]),
                                      terrain, rasterband_dem.GetNoDataValue(),
                                      gdal_type=gdal.GDT_Float32,
                                      logging=logger)
                else:
                    # in case no nodata value is found
                    logger.warning('No nodata value found in {:s}. assuming -9999'.format(options.dem_file))
                    inun_lib.gdal_writemap(terrain_temp_file, 'PCRaster',
                                      np.arange(0, terrain.shape[1]),
                                      np.arange(0, terrain.shape[0]),
                                      terrain, -9999.,
                                      gdal_type=gdal.GDT_Float32,
                                      logging=logger)

                inun_lib.gdal_writemap(drainage_temp_file, 'PCRaster',
                                  np.arange(0, terrain.shape[1]),
                                  np.arange(0, terrain.shape[0]),
                                  drainage, rasterband_ldd.GetNoDataValue(),
                                  gdal_type=gdal.GDT_Int32,
                                  logging=logger)
                inun_lib.gdal_writemap(stream_temp_file, 'PCRaster',
                                  np.arange(0, terrain.shape[1]),
                                  np.arange(0, terrain.shape[0]),
                                  stream, rasterband_ldd.GetNoDataValue(),
                                  gdal_type=gdal.GDT_Int32,
                                  logging=logger)
                # read as pcr objects
                pcr.setclone(terrain_temp_file)
                terrain_pcr = pcr.readmap(terrain_temp_file)
                drainage_pcr = pcr.lddrepair(pcr.ldd(pcr.readmap(drainage_temp_file)))  # convert to ldd type map
                stream_pcr = pcr.scalar(pcr.readmap(stream_temp_file))  # convert to ldd type map

                # compute streams
                stream_ge, subcatch = inun_lib.subcatch_stream(drainage_pcr, stream_pcr, options.hand_strahler) # generate streams

                basin = pcr.boolean(subcatch)
                hand_pcr, dist_pcr = inun_lib.derive_HAND(terrain_pcr, drainage_pcr, 3000, rivers=pcr.boolean(stream_ge), basin=basin)
                # convert to numpy
                hand = pcr.pcr2numpy(hand_pcr, -9999.)
                # cut relevant part
                if y_overlap_max == 0:
                    y_overlap_max = -hand.shape[0]
                if x_overlap_max == 0:
                    x_overlap_max = -hand.shape[1]
                hand_cut = hand[0+y_overlap_min:-y_overlap_max, 0+x_overlap_min:-x_overlap_max]

                band_hand.WriteArray(hand_cut, x_start, y_start)
                os.unlink(terrain_temp_file)
                os.unlink(drainage_temp_file)
                band_hand.FlushCache()
        ds_dem = None
        ds_ldd = None
        ds_stream = None
        band_hand.SetNoDataValue(-9999.)
        ds_hand = None
        logger.info('Finalizing {:s}'.format(hand_file))
        # rename temporary file to final hand file
        os.rename(hand_file_tmp, hand_file)
    else:
        logger.info('HAND file {:s} already exists...skipping...'.format(hand_file))

    #####################################################################################
    #  HAND file has now been prepared, moving to flood mapping part                    #
    #####################################################################################
    # load the staticmaps needed to estimate volumes across all
    xax, yax, riv_length, fill_value = inun_lib.gdal_readmap(options.riv_length_file, 'GTiff')
    riv_length = np.ma.masked_where(riv_length==fill_value, riv_length)
    xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(options.riv_width_file, 'GTiff')
    riv_width[riv_width == fill_value] = 0

    x_res = np.abs((xax[-1]-xax[0])/(len(xax)-1))
    y_res = np.abs((yax[-1]-yax[0])/(len(yax)-1))

    flood_folder = os.path.join(options.dest_path, case_name)
    flood_vol_map = os.path.join(flood_folder, '{:s}_vol.tif'.format(os.path.split(options.flood_map)[1].split('.')[0]))
    if not(os.path.isdir(flood_folder)):
        os.makedirs(flood_folder)
    inun_file_tmp = os.path.join(flood_folder, '{:s}.tif.tmp'.format(case_name))
    inun_file = os.path.join(flood_folder, '{:s}.tif'.format(case_name))
    hand_temp_file = os.path.join(flood_folder, 'hand_temp.map')
    drainage_temp_file = os.path.join(flood_folder, 'drainage_temp.map')
    stream_temp_file = os.path.join(flood_folder, 'stream_temp.map')
    flood_vol_temp_file = os.path.join(flood_folder, 'flood_warp_temp.tif')
    # load the data with river levels and compute the volumes
    if options.file_format == 0:
        # assume we need the maximum value in a NetCDF time series grid
        a = nc.Dataset(options.flood_map, 'r')
        xax = a.variables['x'][:]
        yax = a.variables['y'][:]

        flood_series = a.variables[options.flood_variable][:]
        flood_data = flood_series.max(axis=0)
        if np.ma.is_masked(flood_data):
            flood = flood_data.data
            flood[flood_data.mask] = 0
        if yax[-1] > yax[0]:
            yax = np.flipud(yax)
            flood = np.flipud(flood)
        a.close()
    elif options.file_format == 1:
        xax, yax, flood, flood_fill_value = inun_lib.gdal_readmap(options.flood_map, 'PCRaster')
        flood[flood==flood_fill_value] = 0.
    #res_x = x[1]-x[0]
    #res_y = y[1]-y[0]

    # load the bankfull depths
    if options.bankfull_map == '':
        bankfull = np.zeros(flood.shape)
    else:
        if options.file_format == 0:
            a = nc.Dataset(options.bankfull_map, 'r')
            xax = a.variables['x'][:]
            yax = a.variables['y'][:]
            bankfull = a.variables[options.flood_variable][0, :, :]
            if yax[-1] > yax[0]:
                yax = np.flipud(yax)
                bankfull = np.flipud(bankful)
            a.close()
        elif options.file_format == 1:
            xax, yax, bankfull, bankfull_fill_value = inun_lib.gdal_readmap(options.bankfull_map, 'PCRaster')
#     flood = bankfull*2
    # res_x = 2000
    # res_y = 2000
    # subtract the bankfull water level to get flood levels (above bankfull)
    flood_vol = np.maximum(flood-bankfull, 0)
    flood_vol_m = riv_length*riv_width*flood_vol/(x_res * y_res)  # volume expressed in meters water disc (1e6 is the surface area of one wflow grid cell)
    flood_vol_m_data = flood_vol_m.data
    flood_vol_m_data[flood_vol_m.mask] = -999.
    print('Saving water layer map to {:s}'.format(flood_vol_map))
    # write to a tiff file
    inun_lib.gdal_writemap(flood_vol_map, 'GTiff', xax, yax, np.maximum(flood_vol_m_data, 0), -999.)
    ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
    ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
    ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)

    logger.info('Preparing flood map in {:s} ...please wait...'.format(inun_file))
    ds_inun = inun_lib.prepare_gdal(inun_file_tmp, x, y, logging=logger, srs=srs)
    band_inun = ds_inun.GetRasterBand(1)

    # loop over all the tiles
    n = 0
    for x_loop in range(0, len(x), options.x_tile):
        x_start = np.maximum(x_loop, 0)
        x_end = np.minimum(x_loop + options.x_tile, len(x))
        # determine actual overlap for cutting
        for y_loop in range(0, len(y), options.y_tile):
            x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
            x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
            n += 1
            # print('tile {:001d}:'.format(n))
            y_start = np.maximum(y_loop, 0)
            y_end = np.minimum(y_loop + options.y_tile, len(y))
            y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
            y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
            x_tile_ax = x[x_start - x_overlap_min:x_end + x_overlap_max]
            y_tile_ax = y[y_start - y_overlap_min:y_end + y_overlap_max]

            # cut out DEM
            logger.debug('handling xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}'.format(x_start, x_end, y_start, y_end))
            hand = rasterband_hand.ReadAsArray(x_start - x_overlap_min,
                                                 y_start - y_overlap_min,
                                                 (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                 (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                 )

            drainage = rasterband_ldd.ReadAsArray(x_start - x_overlap_min,
                                                 y_start - y_overlap_min,
                                                 (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                 (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                 )
            stream = rasterband_stream.ReadAsArray(x_start - x_overlap_min,
                                                   y_start - y_overlap_min,
                                                   (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                   (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                   )
            print('len x-ax: {:d} len y-ax {:d} x-shape {:d} y-shape {:d}'.format(len(x_tile_ax), len(y_tile_ax), hand.shape[1], hand.shape[0]))
            inun_lib.gdal_writemap(hand_temp_file, 'PCRaster',
                              x_tile_ax,
                              y_tile_ax,
                              hand, rasterband_hand.GetNoDataValue(),
                              gdal_type=gdal.GDT_Float32,
                              logging=logger)
            inun_lib.gdal_writemap(drainage_temp_file, 'PCRaster',
                              x_tile_ax,
                              y_tile_ax,
                              drainage, rasterband_ldd.GetNoDataValue(),
                              gdal_type=gdal.GDT_Int32,
                              logging=logger)
            inun_lib.gdal_writemap(stream_temp_file, 'PCRaster',
                              x_tile_ax,
                              y_tile_ax,
                              stream, rasterband_stream.GetNoDataValue(),
                              gdal_type=gdal.GDT_Int32,
                              logging=logger)
            # read as pcr objects
            pcr.setclone(hand_temp_file)
            hand_pcr = pcr.readmap(hand_temp_file)
            drainage_pcr = pcr.lddrepair(pcr.ldd(pcr.readmap(drainage_temp_file)))  # convert to ldd type map
            stream_pcr = pcr.scalar(pcr.readmap(drainage_temp_file))  # convert to ldd type map
            # prepare a subcatchment map

            stream_ge, subcatch = inun_lib.subcatch_stream(drainage_pcr, stream_pcr, options.catchment_strahler) # generate subcatchments
            drainage_surf = pcr.ifthen(stream_ge > 0, pcr.accuflux(drainage_pcr, 1))  # proxy of drainage surface inaccurate at tile edges
           # compute weights for spreadzone (1/drainage_surf)
            subcatch = pcr.spreadzone(subcatch, 0, 0)

            # TODO check weighting scheme, perhaps not necessary
            # weight = 1./pcr.scalar(pcr.spreadzone(pcr.cover(pcr.ordinal(drainage_surf), 0), 0, 0))
            # subcatch_fill = pcr.scalar(pcr.spreadzone(subcatch, 0, weight))
            # # cover subcatch with subcatch_fill
            # pcr.report(weight, 'weight_{:02d}.map'.format(n))
            # pcr.report(subcatch, 'subcatch_{:02d}.map'.format(n))
            # pcr.report(pcr.nominal(subcatch_fill), 'subcatch_fill_{:02d}.map'.format(n))
            inun_lib.gdal_warp(flood_vol_map, hand_temp_file, flood_vol_temp_file, gdal_interp=gdalconst.GRA_NearestNeighbour) # ,
            x_tile_ax, y_tile_ax, flood_meter, fill_value = inun_lib.gdal_readmap(flood_vol_temp_file, 'GTiff')
            # convert meter depth to volume [m3]
            flood_vol = pcr.numpy2pcr(pcr.Scalar, flood_meter, fill_value)*((x_tile_ax[1] - x_tile_ax[0]) * (y_tile_ax[0] - y_tile_ax[1]))  # resolution of SRTM *1166400000.
            ## now we have some nice volume. Now we need to redistribute!
            inundation_pcr = inun_lib.volume_spread(drainage_pcr, hand_pcr, subcatch, flood_vol,
                                           volume_thres=0., iterations=options.iterations,
                                           area_multiplier=options.area_multiplier) # 1166400000.
            inundation = pcr.pcr2numpy(inundation_pcr, -9999.)
            # cut relevant part
            if y_overlap_max == 0:
                y_overlap_max = -inundation.shape[0]
            if x_overlap_max == 0:
                x_overlap_max = -inundation.shape[1]
            inundation_cut = inundation[0+y_overlap_min:-y_overlap_max, 0+x_overlap_min:-x_overlap_max]
            # inundation_cut
            band_inun.WriteArray(inundation_cut, x_start, y_start)
            band_inun.FlushCache()
            # clean up
            os.unlink(flood_vol_temp_file)
            os.unlink(drainage_temp_file)
            os.unlink(hand_temp_file)

            # if n == 35:
            #     band_inun.SetNoDataValue(-9999.)
            #     ds_inun = None
            #     sys.exit(0)
    os.unlink(flood_vol_map)

    logger.info('Finalizing {:s}'.format(inun_file))
    # add the metadata to the file and band
    band_inun.SetNoDataValue(-9999.)
    ds_inun.SetMetadata(metadata_global)
    band_inun.SetMetadata(metadata_var)
    ds_inun = None
    ds_hand = None
    ds_ldd = None
    # rename temporary file to final hand file
    if os.path.isfile(inun_file):
        # remove an old result if available
        os.unlink(inun_file)
    os.rename(inun_file_tmp, inun_file)

    logger.info('Done! Thank you for using hand_contour_inun.py')
    logger, ch = inun_lib.closeLogger(logger, ch)
    del logger, ch
    sys.exit(0)
Exemplo n.º 21
0
print("")
print(cmd)
os.system(cmd)
print("")
input_files['averageClimatologyDischargeMonthAvg'] = out_file

# set the pcraster clone, ldd, landmask, and cell area map
msg = "Setting the clone, ldd, landmask, and cell area maps" + ":"
logger.info(msg)
# - clone
clone_map_file = input_files['clone_map_05min']
pcr.setclone(clone_map_file)
# - ldd
ldd = vos.readPCRmapClone(input_files['ldd_map_05min'], clone_map_file,
                          output_files['tmp_folder'], None, True)
ldd = pcr.lddrepair(pcr.ldd(ldd))
ldd = pcr.lddrepair(ldd)
# - landmask
landmask = pcr.ifthen(pcr.defined(ldd), pcr.boolean(1.0))
# - cell area
cell_area = vos.readPCRmapClone(input_files['cell_area_05min'], clone_map_file,
                                output_files['tmp_folder'])

# set the basin map
msg = "Setting the basin map" + ":"
logger.info(msg)
basin_map = pcr.nominal(\
            vos.readPCRmapClone(input_files['basin_map_05min'],
                                input_files['clone_map_05min'],
                                output_files['tmp_folder'],
                                None, False, None, True))
# - set the landmask
landmask_map_file = "/projects/0/dfguu/data/hydroworld/others/05ArcMinCloneMaps/new_masks_from_top/mask_"  + str(mask_code) + ".map"
msg = "Set the landmask to : " + str(landmask_map_file)
logger.info(msg)
landmask = pcr.readmap(landmask_map_file)

# resampling low resolution ldd map
msg = "Resample the low resolution ldd map."
logger.info(msg)
ldd_map_low_resolution_file_name = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/lddsound_05min.map"
ldd_map_low_resolution = vos.readPCRmapClone(ldd_map_low_resolution_file_name, \
                                             clone_map_file, \
                                             tmp_folder, \
                                             None, True, None, False)
ldd_map_low_resolution = pcr.ifthen(landmask, ldd_map_low_resolution)    # NOTE THAT YOU MAY NOT HAVE TO MASK-OUT THE LDD.
ldd_map_low_resolution = pcr.lddrepair(pcr.ldd(ldd_map_low_resolution))
ldd_map_low_resolution = pcr.lddrepair(ldd_map_low_resolution)
pcr.report(ldd_map_low_resolution, "resampled_low_resolution_ldd.map")


# permanent water bodies files (at 5 arc-minute resolution)
reservoir_capacity_file = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/reservoirs/waterBodiesFinal_version15Sept2013/maps/reservoircapacity_2010.map"
fracwat_file            = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/reservoirs/waterBodiesFinal_version15Sept2013/maps/fracwat_2010.map"
water_body_id_file      = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/reservoirs/waterBodiesFinal_version15Sept2013/maps/waterbodyid_2010.map"


# cell_area_file
cell_area_file = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/cellsize05min.correct.map"


# bankfull capacity (5 arcmin, volume: m3)
Exemplo n.º 23
0
def main():
    ### Read input arguments #####
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option(
        "-q",
        "--quiet",
        dest="verbose",
        default=True,
        action="store_false",
        help="do not print status messages to stdout",
    )
    parser.add_option(
        "-i",
        "--ini",
        dest="inifile",
        default="hand_contour_inun.ini",
        nargs=1,
        help="ini configuration file",
    )
    parser.add_option(
        "-f",
        "--flood_map",
        nargs=1,
        dest="flood_map",
        help="Flood map file (NetCDF point time series file",
    )
    parser.add_option(
        "-v",
        "--flood_variable",
        nargs=1,
        dest="flood_variable",
        default="water_level",
        help="variable name of flood water level",
    )
    parser.add_option(
        "-b",
        "--bankfull_map",
        dest="bankfull_map",
        default="",
        help="Map containing bank full level (is subtracted from flood map, in NetCDF)",
    )
    parser.add_option(
        "-c",
        "--catchment",
        dest="catchment_strahler",
        default=7,
        type="int",
        help="Strahler order threshold >= are selected as catchment boundaries",
    )
    parser.add_option(
        "-t",
        "--time",
        dest="time",
        default="",
        help="time in YYYYMMDDHHMMSS, overrides time in NetCDF input if set",
    )
    # parser.add_option('-s', '--hand_strahler',
    #                   dest='hand_strahler', default=7, type='int',
    #                   help='Strahler order threshold >= selected as riverine')
    parser.add_option(
        "-m",
        "--max_strahler",
        dest="max_strahler",
        default=1000,
        type="int",
        help="Maximum Strahler order to loop over",
    )
    parser.add_option(
        "-d", "--destination", dest="dest_path", default="inun", help="Destination path"
    )
    parser.add_option(
        "-H",
        "--hand_file_prefix",
        dest="hand_file_prefix",
        default="",
        help="optional HAND file prefix of already generated HAND files",
    )
    parser.add_option(
        "-n",
        "--neg_HAND",
        dest="neg_HAND",
        default=0,
        type="int",
        help="if set to 1, allow for negative HAND values in HAND maps",
    )
    (options, args) = parser.parse_args()

    if not os.path.exists(options.inifile):
        print "path to ini file cannot be found"
        sys.exit(1)
    options.dest_path = os.path.abspath(options.dest_path)

    if not (os.path.isdir(options.dest_path)):
        os.makedirs(options.dest_path)

    # set up the logger
    flood_name = os.path.split(options.flood_map)[1].split(".")[0]
    # case_name = 'inun_{:s}_hand_{:02d}_catch_{:02d}'.format(flood_name, options.hand_strahler, options.catchment_strahler)
    case_name = "inun_{:s}_catch_{:02d}".format(flood_name, options.catchment_strahler)
    logfilename = os.path.join(options.dest_path, "hand_contour_inun.log")
    logger, ch = inun_lib.setlogger(logfilename, "HAND_INUN", options.verbose)
    logger.info("$Id: $")
    logger.info("Flood map: {:s}".format(options.flood_map))
    logger.info("Bank full map: {:s}".format(options.bankfull_map))
    logger.info("Destination path: {:s}".format(options.dest_path))
    # read out ini file
    ### READ CONFIG FILE
    # open config-file
    config = inun_lib.open_conf(options.inifile)

    # read settings
    options.dem_file = inun_lib.configget(config, "HighResMaps", "dem_file", True)
    options.ldd_file = inun_lib.configget(config, "HighResMaps", "ldd_file", True)
    options.stream_file = inun_lib.configget(config, "HighResMaps", "stream_file", True)
    options.riv_length_fact_file = inun_lib.configget(
        config, "wflowResMaps", "riv_length_fact_file", True
    )
    options.ldd_wflow = inun_lib.configget(config, "wflowResMaps", "ldd_wflow", True)
    options.riv_width_file = inun_lib.configget(
        config, "wflowResMaps", "riv_width_file", True
    )
    options.file_format = inun_lib.configget(
        config, "file_settings", "file_format", 0, datatype="int"
    )
    options.out_format = inun_lib.configget(
        config, "file_settings", "out_format", 0, datatype="int"
    )
    options.latlon = inun_lib.configget(
        config, "file_settings", "latlon", 0, datatype="int"
    )
    options.x_tile = inun_lib.configget(
        config, "tiling", "x_tile", 10000, datatype="int"
    )
    options.y_tile = inun_lib.configget(
        config, "tiling", "y_tile", 10000, datatype="int"
    )
    options.x_overlap = inun_lib.configget(
        config, "tiling", "x_overlap", 1000, datatype="int"
    )
    options.y_overlap = inun_lib.configget(
        config, "tiling", "y_overlap", 1000, datatype="int"
    )
    options.iterations = inun_lib.configget(
        config, "inundation", "iterations", 20, datatype="int"
    )
    options.initial_level = inun_lib.configget(
        config, "inundation", "initial_level", 32., datatype="float"
    )
    options.flood_volume_type = inun_lib.configget(
        config, "inundation", "flood_volume_type", 0, datatype="int"
    )

    # options.area_multiplier = inun_lib.configget(config, 'inundation',
    #                               'area_multiplier', 1., datatype='float')
    logger.info("DEM file: {:s}".format(options.dem_file))
    logger.info("LDD file: {:s}".format(options.ldd_file))
    logger.info("streamfile: {:s}".format(options.stream_file))
    logger.info("Columns per tile: {:d}".format(options.x_tile))
    logger.info("Rows per tile: {:d}".format(options.y_tile))
    logger.info("Columns overlap: {:d}".format(options.x_overlap))
    logger.info("Rows overlap: {:d}".format(options.y_overlap))
    metadata_global = {}
    # add metadata from the section [metadata]
    meta_keys = config.options("metadata_global")
    for key in meta_keys:
        metadata_global[key] = config.get("metadata_global", key)
    # add a number of metadata variables that are mandatory
    metadata_global["config_file"] = os.path.abspath(options.inifile)
    metadata_var = {}
    metadata_var["units"] = "m"
    metadata_var["standard_name"] = "water_surface_height_above_reference_datum"
    metadata_var["long_name"] = "flooding"
    metadata_var[
        "comment"
    ] = "water_surface_reference_datum_altitude is given in file {:s}".format(
        options.dem_file
    )
    if not os.path.exists(options.dem_file):
        logger.error("path to dem file {:s} cannot be found".format(options.dem_file))
        sys.exit(1)
    if not os.path.exists(options.ldd_file):
        logger.error("path to ldd file {:s} cannot be found".format(options.ldd_file))
        sys.exit(1)

    # Read extent from a GDAL compatible file
    try:
        extent = inun_lib.get_gdal_extent(options.dem_file)
    except:
        msg = "Input file {:s} not a gdal compatible file".format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    try:
        x, y = inun_lib.get_gdal_axes(options.dem_file, logging=logger)
        srs = inun_lib.get_gdal_projection(options.dem_file, logging=logger)
    except:
        msg = "Input file {:s} not a gdal compatible file".format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    # read history from flood file
    if options.file_format == 0:
        a = nc.Dataset(options.flood_map, "r")
        metadata_global[
            "history"
        ] = "Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}".format(
            os.path.abspath(options.flood_map), a.history
        )
        a.close()
    else:
        metadata_global[
            "history"
        ] = "Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}".format(
            os.path.abspath(options.flood_map), "PCRaster file, no history"
        )

    # first write subcatch maps and hand maps
    ############### TODO ######
    # setup a HAND file for each strahler order

    max_s = inun_lib.define_max_strahler(options.stream_file, logging=logger)
    stream_max = np.minimum(max_s, options.max_strahler)

    for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
        dem_name = os.path.split(options.dem_file)[1].split(".")[0]
        if os.path.isfile(
            "{:s}_{:02d}.tif".format(options.hand_file_prefix, hand_strahler)
        ):
            hand_file = "{:s}_{:02d}.tif".format(
                options.hand_file_prefix, hand_strahler
            )
        else:
            logger.info(
                "No HAND files with HAND prefix were found, checking {:s}_hand_strahler_{:02d}.tif".format(
                    dem_name, hand_strahler
                )
            )
            hand_file = os.path.join(
                options.dest_path,
                "{:s}_hand_strahler_{:02d}.tif".format(dem_name, hand_strahler),
            )
        if not (os.path.isfile(hand_file)):
            # hand file does not exist yet! Generate it, otherwise skip!
            logger.info(
                "HAND file {:s} not found, start setting up...please wait...".format(
                    hand_file
                )
            )
            hand_file_tmp = os.path.join(
                options.dest_path,
                "{:s}_hand_strahler_{:02d}.tif.tmp".format(dem_name, hand_strahler),
            )
            ds_hand, band_hand = inun_lib.prepare_gdal(
                hand_file_tmp, x, y, logging=logger, srs=srs
            )
            # band_hand = ds_hand.GetRasterBand(1)

            # Open terrain data for reading
            ds_dem, rasterband_dem = inun_lib.get_gdal_rasterband(options.dem_file)
            ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
            ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(
                options.stream_file
            )
            n = 0
            for x_loop in range(0, len(x), options.x_tile):
                x_start = np.maximum(x_loop, 0)
                x_end = np.minimum(x_loop + options.x_tile, len(x))
                # determine actual overlap for cutting
                for y_loop in range(0, len(y), options.y_tile):
                    x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
                    x_overlap_max = (
                        np.minimum(x_end + options.x_overlap, len(x)) - x_end
                    )
                    n += 1
                    # print('tile {:001d}:'.format(n))
                    y_start = np.maximum(y_loop, 0)
                    y_end = np.minimum(y_loop + options.y_tile, len(y))
                    y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
                    y_overlap_max = (
                        np.minimum(y_end + options.y_overlap, len(y)) - y_end
                    )
                    # cut out DEM
                    logger.debug(
                        "Computing HAND for xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}".format(
                            x_start, x_end, y_start, y_end
                        )
                    )
                    terrain = rasterband_dem.ReadAsArray(
                        x_start - x_overlap_min,
                        y_start - y_overlap_min,
                        (x_end + x_overlap_max) - (x_start - x_overlap_min),
                        (y_end + y_overlap_max) - (y_start - y_overlap_min),
                    )

                    drainage = rasterband_ldd.ReadAsArray(
                        x_start - x_overlap_min,
                        y_start - y_overlap_min,
                        (x_end + x_overlap_max) - (x_start - x_overlap_min),
                        (y_end + y_overlap_max) - (y_start - y_overlap_min),
                    )
                    stream = rasterband_stream.ReadAsArray(
                        x_start - x_overlap_min,
                        y_start - y_overlap_min,
                        (x_end + x_overlap_max) - (x_start - x_overlap_min),
                        (y_end + y_overlap_max) - (y_start - y_overlap_min),
                    )
                    # write to temporary file
                    terrain_temp_file = os.path.join(
                        options.dest_path, "terrain_temp.map"
                    )
                    drainage_temp_file = os.path.join(
                        options.dest_path, "drainage_temp.map"
                    )
                    stream_temp_file = os.path.join(
                        options.dest_path, "stream_temp.map"
                    )
                    if rasterband_dem.GetNoDataValue() is not None:
                        inun_lib.gdal_writemap(
                            terrain_temp_file,
                            "PCRaster",
                            np.arange(0, terrain.shape[1]),
                            np.arange(0, terrain.shape[0]),
                            terrain,
                            rasterband_dem.GetNoDataValue(),
                            gdal_type=gdal.GDT_Float32,
                            logging=logger,
                        )
                    else:
                        # in case no nodata value is found
                        logger.warning(
                            "No nodata value found in {:s}. assuming -9999".format(
                                options.dem_file
                            )
                        )
                        inun_lib.gdal_writemap(
                            terrain_temp_file,
                            "PCRaster",
                            np.arange(0, terrain.shape[1]),
                            np.arange(0, terrain.shape[0]),
                            terrain,
                            -9999.,
                            gdal_type=gdal.GDT_Float32,
                            logging=logger,
                        )

                    inun_lib.gdal_writemap(
                        drainage_temp_file,
                        "PCRaster",
                        np.arange(0, terrain.shape[1]),
                        np.arange(0, terrain.shape[0]),
                        drainage,
                        rasterband_ldd.GetNoDataValue(),
                        gdal_type=gdal.GDT_Int32,
                        logging=logger,
                    )
                    inun_lib.gdal_writemap(
                        stream_temp_file,
                        "PCRaster",
                        np.arange(0, terrain.shape[1]),
                        np.arange(0, terrain.shape[0]),
                        stream,
                        rasterband_ldd.GetNoDataValue(),
                        gdal_type=gdal.GDT_Int32,
                        logging=logger,
                    )
                    # read as pcr objects
                    pcr.setclone(terrain_temp_file)
                    terrain_pcr = pcr.readmap(terrain_temp_file)
                    drainage_pcr = pcr.lddrepair(
                        pcr.ldd(pcr.readmap(drainage_temp_file))
                    )  # convert to ldd type map
                    stream_pcr = pcr.scalar(
                        pcr.readmap(stream_temp_file)
                    )  # convert to ldd type map

                    # check if the highest stream order of the tile is below the hand_strahler
                    # if the highest stream order of the tile is smaller than hand_strahler, than DEM values are taken instead of HAND values.
                    max_stream_tile = inun_lib.define_max_strahler(
                        stream_temp_file, logging=logger
                    )
                    if max_stream_tile < hand_strahler:
                        hand_pcr = terrain_pcr
                        logger.info(
                            "For this tile, DEM values are used instead of HAND because there is no stream order larger than {:02d}".format(
                                hand_strahler
                            )
                        )
                    else:
                        # compute streams
                        stream_ge, subcatch = inun_lib.subcatch_stream(
                            drainage_pcr, hand_strahler, stream=stream_pcr
                        )  # generate streams
                        # compute basins
                        stream_ge_dummy, subcatch = inun_lib.subcatch_stream(
                            drainage_pcr, options.catchment_strahler, stream=stream_pcr
                        )  # generate streams
                        basin = pcr.boolean(subcatch)
                        hand_pcr, dist_pcr = inun_lib.derive_HAND(
                            terrain_pcr,
                            drainage_pcr,
                            3000,
                            rivers=pcr.boolean(stream_ge),
                            basin=basin,
                            neg_HAND=options.neg_HAND,
                        )
                    # convert to numpy
                    hand = pcr.pcr2numpy(hand_pcr, -9999.)
                    # cut relevant part
                    if y_overlap_max == 0:
                        y_overlap_max = -hand.shape[0]
                    if x_overlap_max == 0:
                        x_overlap_max = -hand.shape[1]
                    hand_cut = hand[
                        0 + y_overlap_min : -y_overlap_max,
                        0 + x_overlap_min : -x_overlap_max,
                    ]

                    band_hand.WriteArray(hand_cut, x_start, y_start)
                    os.unlink(terrain_temp_file)
                    os.unlink(drainage_temp_file)
                    os.unlink(stream_temp_file)
                    band_hand.FlushCache()
            ds_dem = None
            ds_ldd = None
            ds_stream = None
            band_hand.SetNoDataValue(-9999.)
            ds_hand = None
            logger.info("Finalizing {:s}".format(hand_file))
            # rename temporary file to final hand file
            os.rename(hand_file_tmp, hand_file)
        else:
            logger.info("HAND file {:s} already exists...skipping...".format(hand_file))

    #####################################################################################
    #  HAND file has now been prepared, moving to flood mapping part                    #
    #####################################################################################
    # set the clone
    pcr.setclone(options.ldd_wflow)
    # read wflow ldd as pcraster object
    ldd_pcr = pcr.readmap(options.ldd_wflow)
    xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(
        options.riv_width_file, "GTiff", logging=logger
    )

    # determine cell length in meters using ldd_pcr as clone (if latlon=True, values are converted to m2
    x_res, y_res, reallength_wflow = pcrut.detRealCellLength(
        pcr.scalar(ldd_pcr), not (bool(options.latlon))
    )
    cell_surface_wflow = pcr.pcr2numpy(x_res * y_res, 0)

    if options.flood_volume_type == 0:
        # load the staticmaps needed to estimate volumes across all
        # xax, yax, riv_length, fill_value = inun_lib.gdal_readmap(options.riv_length_file, 'GTiff', logging=logger)
        # riv_length = np.ma.masked_where(riv_length==fill_value, riv_length)
        xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(
            options.riv_width_file, "GTiff", logging=logger
        )
        riv_width[riv_width == fill_value] = 0

        # read river length factor file (multiplier)
        xax, yax, riv_length_fact, fill_value = inun_lib.gdal_readmap(
            options.riv_length_fact_file, "GTiff", logging=logger
        )
        riv_length_fact = np.ma.masked_where(
            riv_length_fact == fill_value, riv_length_fact
        )
        drain_length = wflow_lib.detdrainlength(ldd_pcr, x_res, y_res)

        # compute river length in each cell
        riv_length = pcr.pcr2numpy(drain_length, 0) * riv_length_fact
        # riv_length_pcr = pcr.numpy2pcr(pcr.Scalar, riv_length, 0)

    flood_folder = os.path.join(options.dest_path, case_name)
    flood_vol_map = os.path.join(
        flood_folder,
        "{:s}_vol.tif".format(os.path.split(options.flood_map)[1].split(".")[0]),
    )
    if not (os.path.isdir(flood_folder)):
        os.makedirs(flood_folder)
    if options.out_format == 0:
        inun_file_tmp = os.path.join(flood_folder, "{:s}.tif.tmp".format(case_name))
        inun_file = os.path.join(flood_folder, "{:s}.tif".format(case_name))
    else:
        inun_file_tmp = os.path.join(flood_folder, "{:s}.nc.tmp".format(case_name))
        inun_file = os.path.join(flood_folder, "{:s}.nc".format(case_name))

    hand_temp_file = os.path.join(flood_folder, "hand_temp.map")
    drainage_temp_file = os.path.join(flood_folder, "drainage_temp.map")
    stream_temp_file = os.path.join(flood_folder, "stream_temp.map")
    flood_vol_temp_file = os.path.join(flood_folder, "flood_warp_temp.tif")
    # load the data with river levels and compute the volumes
    if options.file_format == 0:
        # assume we need the maximum value in a NetCDF time series grid
        logger.info("Reading flood from {:s} NetCDF file".format(options.flood_map))
        a = nc.Dataset(options.flood_map, "r")
        if options.latlon == 0:
            xax = a.variables["x"][:]
            yax = a.variables["y"][:]
        else:
            try:
                xax = a.variables["lon"][:]
                yax = a.variables["lat"][:]
            except:
                xax = a.variables["x"][:]
                yax = a.variables["y"][:]
        if options.time == "":
            time_list = nc.num2date(
                a.variables["time"][:],
                units=a.variables["time"].units,
                calendar=a.variables["time"].calendar,
            )
            time = [time_list[len(time_list) / 2]]
        else:
            time = [dt.datetime.strptime(options.time, "%Y%m%d%H%M%S")]

        flood_series = a.variables[options.flood_variable][:]
        flood_data = flood_series.max(axis=0)
        if np.ma.is_masked(flood_data):
            flood = flood_data.data
            flood[flood_data.mask] = 0
        if yax[-1] > yax[0]:
            yax = np.flipud(yax)
            flood = np.flipud(flood)
        a.close()
    elif options.file_format == 1:
        logger.info("Reading flood from {:s} PCRaster file".format(options.flood_map))
        xax, yax, flood, flood_fill_value = inun_lib.gdal_readmap(
            options.flood_map, "PCRaster", logging=logger
        )
        flood = np.ma.masked_equal(flood, flood_fill_value)
        if options.time == "":
            options.time = "20000101000000"
        time = [dt.datetime.strptime(options.time, "%Y%m%d%H%M%S")]

        flood[flood == flood_fill_value] = 0.
    # load the bankfull depths
    if options.bankfull_map == "":
        bankfull = np.zeros(flood.shape)
    else:
        if options.file_format == 0:
            logger.info(
                "Reading bankfull from {:s} NetCDF file".format(options.bankfull_map)
            )
            a = nc.Dataset(options.bankfull_map, "r")
            xax = a.variables["x"][:]
            yax = a.variables["y"][:]
            #            xax = a.variables['lon'][:]
            #            yax = a.variables['lat'][:]

            bankfull_series = a.variables[options.flood_variable][:]
            bankfull_data = bankfull_series.max(axis=0)
            if np.ma.is_masked(bankfull_data):
                bankfull = bankfull_data.data
                bankfull[bankfull_data.mask] = 0
            if yax[-1] > yax[0]:
                yax = np.flipud(yax)
                bankfull = np.flipud(bankfull)
            a.close()
        elif options.file_format == 1:
            logger.info(
                "Reading bankfull from {:s} PCRaster file".format(options.bankfull_map)
            )
            xax, yax, bankfull, bankfull_fill_value = inun_lib.gdal_readmap(
                options.bankfull_map, "PCRaster", logging=logger
            )
            bankfull = np.ma.masked_equal(bankfull, bankfull_fill_value)
    #     flood = bankfull*2
    # res_x = 2000
    # res_y = 2000
    # subtract the bankfull water level to get flood levels (above bankfull)
    flood_vol = np.maximum(flood - bankfull, 0)
    if options.flood_volume_type == 0:
        flood_vol_m = (
            riv_length * riv_width * flood_vol / cell_surface_wflow
        )  # volume expressed in meters water disc
        flood_vol_m_pcr = pcr.numpy2pcr(pcr.Scalar, flood_vol_m, 0)
    else:
        flood_vol_m = flood_vol / cell_surface_wflow
    flood_vol_m_data = flood_vol_m.data
    flood_vol_m_data[flood_vol_m.mask] = -999.
    logger.info("Saving water layer map to {:s}".format(flood_vol_map))
    # write to a tiff file
    inun_lib.gdal_writemap(
        flood_vol_map,
        "GTiff",
        xax,
        yax,
        np.maximum(flood_vol_m_data, 0),
        -999.,
        logging=logger,
    )
    # this is placed later in the hand loop
    # ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
    ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
    ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)

    logger.info("Preparing flood map in {:s} ...please wait...".format(inun_file))
    if options.out_format == 0:
        ds_inun, band_inun = inun_lib.prepare_gdal(
            inun_file_tmp, x, y, logging=logger, srs=srs
        )
        # band_inun = ds_inun.GetRasterBand(1)
    else:
        ds_inun, band_inun = inun_lib.prepare_nc(
            inun_file_tmp,
            time,
            x,
            np.flipud(y),
            metadata=metadata_global,
            metadata_var=metadata_var,
            logging=logger,
        )
    # loop over all the tiles
    n = 0
    for x_loop in range(0, len(x), options.x_tile):
        x_start = np.maximum(x_loop, 0)
        x_end = np.minimum(x_loop + options.x_tile, len(x))
        # determine actual overlap for cutting
        for y_loop in range(0, len(y), options.y_tile):
            x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
            x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
            n += 1
            # print('tile {:001d}:'.format(n))
            y_start = np.maximum(y_loop, 0)
            y_end = np.minimum(y_loop + options.y_tile, len(y))
            y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
            y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
            x_tile_ax = x[x_start - x_overlap_min : x_end + x_overlap_max]
            y_tile_ax = y[y_start - y_overlap_min : y_end + y_overlap_max]
            # cut out DEM
            logger.debug(
                "handling xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}".format(
                    x_start, x_end, y_start, y_end
                )
            )

            drainage = rasterband_ldd.ReadAsArray(
                x_start - x_overlap_min,
                y_start - y_overlap_min,
                (x_end + x_overlap_max) - (x_start - x_overlap_min),
                (y_end + y_overlap_max) - (y_start - y_overlap_min),
            )
            stream = rasterband_stream.ReadAsArray(
                x_start - x_overlap_min,
                y_start - y_overlap_min,
                (x_end + x_overlap_max) - (x_start - x_overlap_min),
                (y_end + y_overlap_max) - (y_start - y_overlap_min),
            )

            # stream_max = np.minimum(stream.max(), options.max_strahler)

            inun_lib.gdal_writemap(
                drainage_temp_file,
                "PCRaster",
                x_tile_ax,
                y_tile_ax,
                drainage,
                rasterband_ldd.GetNoDataValue(),
                gdal_type=gdal.GDT_Int32,
                logging=logger,
            )
            inun_lib.gdal_writemap(
                stream_temp_file,
                "PCRaster",
                x_tile_ax,
                y_tile_ax,
                stream,
                rasterband_stream.GetNoDataValue(),
                gdal_type=gdal.GDT_Int32,
                logging=logger,
            )

            # read as pcr objects
            pcr.setclone(stream_temp_file)
            drainage_pcr = pcr.lddrepair(
                pcr.ldd(pcr.readmap(drainage_temp_file))
            )  # convert to ldd type map
            stream_pcr = pcr.scalar(
                pcr.readmap(stream_temp_file)
            )  # convert to ldd type map

            # warp of flood volume to inundation resolution
            inun_lib.gdal_warp(
                flood_vol_map,
                stream_temp_file,
                flood_vol_temp_file,
                gdal_interp=gdalconst.GRA_NearestNeighbour,
            )  # ,
            x_tile_ax, y_tile_ax, flood_meter, fill_value = inun_lib.gdal_readmap(
                flood_vol_temp_file, "GTiff", logging=logger
            )
            # make sure that the option unittrue is on !! (if unitcell was is used in another function)
            x_res_tile, y_res_tile, reallength = pcrut.detRealCellLength(
                pcr.scalar(stream_pcr), not (bool(options.latlon))
            )
            cell_surface_tile = pcr.pcr2numpy(x_res_tile * y_res_tile, 0)

            # convert meter depth to volume [m3]
            flood_vol = pcr.numpy2pcr(
                pcr.Scalar, flood_meter * cell_surface_tile, fill_value
            )

            # first prepare a basin map, belonging to the lowest order we are looking at
            inundation_pcr = pcr.scalar(stream_pcr) * 0
            for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
                # hand_temp_file = os.path.join(flood_folder, 'hand_temp.map')
                if os.path.isfile(
                    os.path.join(
                        options.dest_path,
                        "{:s}_hand_strahler_{:02d}.tif".format(dem_name, hand_strahler),
                    )
                ):
                    hand_file = os.path.join(
                        options.dest_path,
                        "{:s}_hand_strahler_{:02d}.tif".format(dem_name, hand_strahler),
                    )
                else:
                    hand_file = "{:s}_{:02d}.tif".format(
                        options.hand_file_prefix, hand_strahler
                    )
                ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
                hand = rasterband_hand.ReadAsArray(
                    x_start - x_overlap_min,
                    y_start - y_overlap_min,
                    (x_end + x_overlap_max) - (x_start - x_overlap_min),
                    (y_end + y_overlap_max) - (y_start - y_overlap_min),
                )
                print (
                    "len x-ax: {:d} len y-ax {:d} x-shape {:d} y-shape {:d}".format(
                        len(x_tile_ax), len(y_tile_ax), hand.shape[1], hand.shape[0]
                    )
                )

                inun_lib.gdal_writemap(
                    hand_temp_file,
                    "PCRaster",
                    x_tile_ax,
                    y_tile_ax,
                    hand,
                    rasterband_hand.GetNoDataValue(),
                    gdal_type=gdal.GDT_Float32,
                    logging=logger,
                )

                hand_pcr = pcr.readmap(hand_temp_file)

                stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(
                    drainage_pcr, options.catchment_strahler, stream=stream_pcr
                )
                # stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(drainage_pcr, hand_strahler, stream=stream_pcr)
                stream_ge, subcatch = inun_lib.subcatch_stream(
                    drainage_pcr,
                    options.catchment_strahler,
                    stream=stream_pcr,
                    basin=pcr.boolean(pcr.cover(subcatch_hand, 0)),
                    assign_existing=True,
                    min_strahler=hand_strahler,
                    max_strahler=hand_strahler,
                )  # generate subcatchments, only within basin for HAND
                flood_vol_strahler = pcr.ifthenelse(
                    pcr.boolean(pcr.cover(subcatch, 0)), flood_vol, 0
                )  # mask the flood volume map with the created subcatch map for strahler order = hand_strahler

                inundation_pcr_step = inun_lib.volume_spread(
                    drainage_pcr,
                    hand_pcr,
                    pcr.subcatchment(
                        drainage_pcr, subcatch
                    ),  # to make sure backwater effects can occur from higher order rivers to lower order rivers
                    flood_vol_strahler,
                    volume_thres=0.,
                    iterations=options.iterations,
                    cell_surface=pcr.numpy2pcr(pcr.Scalar, cell_surface_tile, -9999),
                    logging=logger,
                    order=hand_strahler,
                    neg_HAND=options.neg_HAND,
                )  # 1166400000.
                # use maximum value of inundation_pcr_step and new inundation for higher strahler order
                inundation_pcr = pcr.max(inundation_pcr, inundation_pcr_step)
            inundation = pcr.pcr2numpy(inundation_pcr, -9999.)
            # cut relevant part
            if y_overlap_max == 0:
                y_overlap_max = -inundation.shape[0]
            if x_overlap_max == 0:
                x_overlap_max = -inundation.shape[1]
            inundation_cut = inundation[
                0 + y_overlap_min : -y_overlap_max, 0 + x_overlap_min : -x_overlap_max
            ]
            # inundation_cut
            if options.out_format == 0:
                band_inun.WriteArray(inundation_cut, x_start, y_start)
                band_inun.FlushCache()
            else:
                # with netCDF, data is up-side-down.
                inun_lib.write_tile_nc(band_inun, inundation_cut, x_start, y_start)
            # clean up
            os.unlink(flood_vol_temp_file)
            os.unlink(drainage_temp_file)
            os.unlink(hand_temp_file)
            os.unlink(
                stream_temp_file
            )  # also remove temp stream file from output folder

            # if n == 35:
            #     band_inun.SetNoDataValue(-9999.)
            #     ds_inun = None
            #     sys.exit(0)
    # os.unlink(flood_vol_map)

    logger.info("Finalizing {:s}".format(inun_file))
    # add the metadata to the file and band
    # band_inun.SetNoDataValue(-9999.)
    # ds_inun.SetMetadata(metadata_global)
    # band_inun.SetMetadata(metadata_var)
    if options.out_format == 0:
        ds_inun = None
        ds_hand = None
    else:
        ds_inun.close()

    ds_ldd = None
    # rename temporary file to final hand file
    if os.path.isfile(inun_file):
        # remove an old result if available
        os.unlink(inun_file)
    os.rename(inun_file_tmp, inun_file)

    logger.info("Done! Thank you for using hand_contour_inun.py")
    logger, ch = inun_lib.closeLogger(logger, ch)
    del logger, ch
    sys.exit(0)
Exemplo n.º 24
0
    def __init__(self, iniItems, landmask):
        object.__init__(self)
        
        # cloneMap, temporary directory for the resample process, temporary directory for the modflow process, absolute path for input directory, landmask
        self.cloneMap        = iniItems.cloneMap
        self.tmpDir          = iniItems.tmpDir
        self.tmp_modflow_dir = iniItems.tmp_modflow_dir
        self.inputDir        = iniItems.globalOptions['inputDir']
        self.landmask        = landmask
        
        # configuration from the ini file
        self.iniItems = iniItems
                
        # topography properties: read several variables from the netcdf file
        for var in ['dem_minimum','dem_maximum','dem_average','dem_standard_deviation',\
                    'slopeLength','orographyBeta','tanslope',\
                    'dzRel0000','dzRel0001','dzRel0005',\
                    'dzRel0010','dzRel0020','dzRel0030','dzRel0040','dzRel0050',\
                    'dzRel0060','dzRel0070','dzRel0080','dzRel0090','dzRel0100']:
            vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['topographyNC'], \
                                                                var, self.cloneMap)
            vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        # channel properties: read several variables from the netcdf file
        for var in ['lddMap','cellAreaMap','gradient','bankfull_width',
                    'bankfull_depth','dem_floodplain','dem_riverbed']:
            vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['channelNC'], \
                                                                var, self.cloneMap)
            vars(self)[var] = pcr.cover(vars(self)[var], 0.0)
        
        # minimum channel width
        minimum_channel_width = 0.5                                               # TODO: Define this one in the configuration file
        self.bankfull_width = pcr.max(minimum_channel_width, self.bankfull_width)
        
        #~ # cell fraction if channel water reaching the flood plan               # NOT USED YET 
        #~ self.flood_plain_fraction = self.return_innundation_fraction(pcr.max(0.0, self.dem_floodplain - self.dem_minimum))
        
        # coefficient of Manning
        self.manningsN = vos.readPCRmapClone(self.iniItems.modflowParameterOptions['manningsN'],\
                                             self.cloneMap,self.tmpDir,self.inputDir)
        
        # minimum channel gradient
        minGradient   = 0.00005                                                   # TODO: Define this one in the configuration file
        self.gradient = pcr.max(minGradient, pcr.cover(self.gradient, minGradient))

        # correcting lddMap
        self.lddMap = pcr.ifthen(pcr.scalar(self.lddMap) > 0.0, self.lddMap)
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))
        
        # channelLength = approximation of channel length (unit: m)  # This is approximated by cell diagonal. 
        cellSizeInArcMin      = np.round(pcr.clone().cellSize()*60.)               # FIXME: This one will not work if you use the resolution: 0.5, 1.5, 2.5 arc-min
        verticalSizeInMeter   = cellSizeInArcMin*1852.                            
        horizontalSizeInMeter = self.cellAreaMap/verticalSizeInMeter
        self.channelLength    = ((horizontalSizeInMeter)**(2)+\
                                 (verticalSizeInMeter)**(2))**(0.5)
        
        # option for lakes and reservoir
        self.onlyNaturalWaterBodies = False
        if self.iniItems.modflowParameterOptions['onlyNaturalWaterBodies'] == "True": self.onlyNaturalWaterBodies = True

        # groundwater linear recession coefficient (day-1) ; the linear reservoir concept is still being used to represent fast response flow  
        #                                                                                                                  particularly from karstic aquifer in mountainous regions                    
        self.recessionCoeff = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                                 'recessionCoeff', self.cloneMap)
        self.recessionCoeff = pcr.cover(self.recessionCoeff,0.00)       
        self.recessionCoeff = pcr.min(1.0000,self.recessionCoeff)       
        #
        if 'minRecessionCoeff' in iniItems.modflowParameterOptions.keys():
            minRecessionCoeff = float(iniItems.modflowParameterOptions['minRecessionCoeff'])
        else:
            minRecessionCoeff = 1.0e-4                                       # This is the minimum value used in Van Beek et al. (2011). 
        self.recessionCoeff = pcr.max(minRecessionCoeff,self.recessionCoeff)      
        
        # aquifer saturated conductivity (m/day)
        self.kSatAquifer = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                             'kSatAquifer', self.cloneMap)
        self.kSatAquifer = pcr.cover(self.kSatAquifer,pcr.mapmaximum(self.kSatAquifer))       
        self.kSatAquifer = pcr.max(0.001,self.kSatAquifer)
        # TODO: Define the minimum value as part of the configuration file
        
        # aquifer specific yield (dimensionless)
        self.specificYield = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                               'specificYield', self.cloneMap)
        self.specificYield = pcr.cover(self.specificYield,pcr.mapmaximum(self.specificYield))       
        self.specificYield = pcr.max(0.010,self.specificYield)         # TODO: TO BE CHECKED: The resample process of specificYield     
        self.specificYield = pcr.min(1.000,self.specificYield)       
        # TODO: Define the minimum value as part of the configuration file

        # estimate of thickness (unit: m) of accesible groundwater 
        totalGroundwaterThickness = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['estimateOfTotalGroundwaterThicknessNC'],\
                                    'thickness', self.cloneMap)
        # extrapolation 
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,\
                                    pcr.windowaverage(totalGroundwaterThickness, 1.0))
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,\
                                    pcr.windowaverage(totalGroundwaterThickness, 1.5))
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness, 0.0)
        #
        # set minimum thickness
        minimumThickness = pcr.scalar(float(\
                           self.iniItems.modflowParameterOptions['minimumTotalGroundwaterThickness']))
        totalGroundwaterThickness = pcr.max(minimumThickness, totalGroundwaterThickness)
        #
        # set maximum thickness: 250 m.   # TODO: Define this one as part of the ini file
        maximumThickness = 250.
        self.totalGroundwaterThickness = pcr.min(maximumThickness, totalGroundwaterThickness)
        # TODO: Define the maximum value as part of the configuration file

        # surface water bed thickness  (unit: m)
        bed_thickness  = 0.1              # TODO: Define this as part of the configuration file
        # surface water bed resistance (unit: day)
        bed_resistance = bed_thickness / (self.kSatAquifer) 
        minimum_bed_resistance = 1.0      # TODO: Define this as part of the configuration file
        self.bed_resistance = pcr.max(minimum_bed_resistance,\
                                              bed_resistance,)
        
        # option to ignore capillary rise
        self.ignoreCapRise = True
        if self.iniItems.modflowParameterOptions['ignoreCapRise'] == "False": self.ignoreCapRise = False
        
        # a variable to indicate if the modflow has been called or not
        self.modflow_has_been_called = False
        
        # list of the convergence criteria for HCLOSE (unit: m)
        # - Deltares default's value is 0.001 m                         # check this value with Jarno
        self.criteria_HCLOSE = [0.001, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]  
        self.criteria_HCLOSE = sorted(self.criteria_HCLOSE)
        
        # list of the convergence criteria for RCLOSE (unit: m3)
        # - Deltares default's value for their 25 and 250 m resolution models is 10 m3  # check this value with Jarno
        cell_area_assumption = verticalSizeInMeter * float(pcr.cellvalue(pcr.mapmaximum(horizontalSizeInMeter),1)[0])
        self.criteria_RCLOSE = [10., 10.* cell_area_assumption/(250.*250.), 10.* cell_area_assumption/(25.*25.)]
        self.criteria_RCLOSE = sorted(self.criteria_RCLOSE)

        # initiate the index for HCLOSE and RCLOSE
        self.iteration_HCLOSE = 0
        self.iteration_RCLOSE = 0
        
        # initiate old style reporting                                  # TODO: remove this!
        self.initiate_old_style_groundwater_reporting(iniItems)
Exemplo n.º 25
0
    def __init__(self, iniItems, landmask, Dir, cloneMap, tmpDir):
        object.__init__(self)

        # clone map file names, temporary directory and global/absolute path of input directory
        self.cloneMap = cloneMap  # iniItems.cloneMap
        self.tmpDir = tmpDir  # iniItems.tmpDir
        self.inputDir = Dir  # iniItems.globalOptions['inputDir']
        self.landmask = landmask

        # local drainage direction:
        self.lddMap = vos.readPCRmapClone(
            iniItems.get("routingOptions", "lddMap"),
            self.cloneMap,
            self.tmpDir,
            self.inputDir,
            True,
        )
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))
        self.lddMap = pcr.lddrepair(self.lddMap)

        # option to activate water balance check
        self.debugWaterBalance = True
        if (
            configget(iniItems, "routingOptions", "debugWaterBalance", "True")
            == "False"
        ):
            self.debugWaterBalance = False

        # option to perform a run with only natural lakes (without reservoirs)
        self.onlyNaturalWaterBodies = False
        if (
            "onlyNaturalWaterBodies" in iniItems._sections["routingOptions"]
            and configget(iniItems, "routingOptions", "onlyNaturalWaterBodies", "False")
            == "True"
        ):
            logger.info(
                "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes."
            )
            self.onlyNaturalWaterBodies = True
            self.dateForNaturalCondition = (
                "1900-01-01"
            )  # The run for a natural condition should access only this date.

        # names of files containing water bodies parameters
        if configget(iniItems, "routingOptions", "waterBodyInputNC", "None") == str(
            None
        ):
            self.useNetCDF = False
            self.fracWaterInp = iniItems.get("routingOptions", "fracWaterInp")
            self.waterBodyIdsInp = iniItems.get("routingOptions", "waterBodyIds")
            self.waterBodyTypInp = iniItems.get("routingOptions", "waterBodyTyp")
            self.resMaxCapInp = iniItems.get("routingOptions", "resMaxCapInp")
            self.resSfAreaInp = iniItems.get("routingOptions", "resSfAreaInp")
        else:
            self.useNetCDF = True
            self.ncFileInp = vos.getFullPath(
                iniItems.get("routingOptions", "waterBodyInputNC"), self.inputDir
            )

        # minimum width (m) used in the weir formula  # TODO: define minWeirWidth based on the GLWD, GRanD database and/or bankfull discharge formula
        self.minWeirWidth = 10.0

        # lower and upper limits at which reservoir release is terminated and
        #                        at which reservoir release is equal to long-term average outflow
        self.minResvrFrac = 0.10
        self.maxResvrFrac = 0.75
Exemplo n.º 26
0
call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
      'Float32', rivers_tif, rivers_map))
call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
      'Float32', outlets_tif, outlets_map))

# burn the layers in DEM
outletsburn = pcr.scalar(pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets)
connectionsburn = pcr.scalar(
    pcr.readmap(connections_map)) * pcr.scalar(burn_connections)
riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers)
ldddem = pcr.cover(dem_resample_map, pcr.ifthen(riverburn > 0, pcr.scalar(0)))
ldddem = ldddem - outletsburn - connectionsburn - riverburn
ldddem = pcr.cover(ldddem, pcr.scalar(0))
pcr.report(ldddem, workdir + "dem_burn.map")
''' create ldd for multi-catchments '''
ldd = pcr.ldd(empty)
# reproject catchment shape-file
ds = ogr.Open(catchshp)
file_att = os.path.splitext(os.path.basename(catchshp))[0]
lyr = ds.GetLayerByName(file_att)
spatialref = lyr.GetSpatialRef()
#    if not spatialref == None:
#        srs = osr.SpatialReference()
#        srs.ImportFromWkt(spatialref.ExportToWkt())
#        srs.AutoIdentifyEPSG()
#        catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
#        spatialref == None
#    else:
catchshp_EPSG = clone_EPSG
print 'No projection defined for ' + file_att + '.shp'
print 'Assumed to be the same as model projection (' + clone_EPSG + ')'
Exemplo n.º 27
0
def main():

    # output folder
    clean_out_folder = True
    if os.path.exists(out_folder):
        if clean_out_folder:
            shutil.rmtree(out_folder)
            os.makedirs(out_folder)
    else:
        os.makedirs(out_folder)
    os.chdir(out_folder)
    os.system("pwd")

    # tmp folder
    tmp_folder = out_folder + "/tmp/"
    if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
    os.makedirs(tmp_folder)

    # set the clone map
    print("set the clone map")
    pcr.setclone(global_clone_map_file)

    # read ldd map
    print("define the ldd")
    # ~ ldd_map = pcr.readmap(global_ldd_inp_file)
    ldd_map     = pcr.lddrepair(pcr.lddrepair(pcr.ldd(vos.readPCRmapClone(v                = global_ldd_inp_file, \
                                                                          cloneMapFileName = global_clone_map_file, \
                                                                          tmpDir           = tmp_folder, \
                                                                          absolutePath     = None, \
                                                                          isLddMap         = True, \
                                                                          cover            = None, \
                                                                          isNomMap         = False))))

    # define the landmask
    if landmask_map_file == None:
        print("define the landmask based on the ldd input")
        # ~ landmask = pcr.defined(pcr.readmap(global_ldd_inp_file))
        landmask = pcr.defined(ldd_map)
        landmask = pcr.ifthen(landmask, landmask)
    else:
        print("define the landmask based on the input landmask_map_file")
        landmask = pcr.readmap(landmask_map_file)
        ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5)))
        ldd_map = pcr.lddrepair(pcr.lddrepair(pcr.ldd(ldd_map)))
        landmask = pcr.defined(ldd_map)
    landmask = pcr.ifthen(landmask, landmask)

    # save ldd files used
    # - global ldd
    cmd = "cp " + str(global_ldd_inp_file) + " ."
    print(cmd)
    os.system(cmd)
    # - ldd map that is used
    pcr.report(ldd_map, "lddmap_used.map")

    # make catchment map
    print("make catchment map")
    catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map))

    # read global subdomain file
    print("read global subdomain file")
    global_subdomain_map = vos.readPCRmapClone(
        v=global_subdomain_file,
        cloneMapFileName=global_clone_map_file,
        tmpDir=tmp_folder,
        absolutePath=None,
        isLddMap=False,
        cover=None,
        isNomMap=True)

    # set initial subdomain
    print("assign subdomains to all catchments")
    subdomains_initial = pcr.areamajority(global_subdomain_map, catchment_map)
    subdomains_initial = pcr.ifthen(landmask, subdomains_initial)

    pcr.aguila(subdomains_initial)

    pcr.report(subdomains_initial, "global_subdomains_initial.map")

    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0])))
    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])))

    print("Checking all subdomains, avoid too large subdomains")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])

    # clone code that will be assigned
    assigned_number = 0

    subdomains_final = pcr.ifthen(
        pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0))

    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the landmask %s" % (str(nr))
        print(msg)

        mask_selected_boolean = pcr.ifthen(subdomains_initial == nr,
                                           pcr.boolean(1.0))

        process_this_clone = False
        if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(mask_selected_boolean)), 1,
                         1)[0] > 0:
            process_this_clone = True

        # ~ if nr == 1: pcr.aguila(mask_selected_boolean)

        # - initial check value
        check_ok = True

        if process_this_clone:
            xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
            area_in_degree2 = (xmax - xmin) * (ymax - ymin)

            # ~ print(str(area_in_degree2))

            # check whether the size of bounding box is ok
            reference_area_in_degree2 = 2500.
            if area_in_degree2 > 1.50 * reference_area_in_degree2:
                check_ok = False
            if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False

        # ~ # ignore checking
        # ~ check_ok = True

        if check_ok == True and process_this_clone == True:

            msg = "Clump is not needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # assign the clone code
            assigned_number = assigned_number + 1

            # update global landmask for river and land
            mask_selected_nominal = pcr.ifthen(mask_selected_boolean,
                                               pcr.nominal(assigned_number))
            subdomains_final = pcr.cover(subdomains_final,
                                         mask_selected_nominal)

        if check_ok == False and process_this_clone == True:

            msg = "Clump is needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # make clump
            clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean))

            # merge clumps that are close together
            clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0)
            clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids)
            # ~ pcr.aguila(clump_ids)

            # minimimum and maximum values
            min_clump_id = int(
                pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0])
            max_clump_id = int(
                pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0])

            for clump_id in range(min_clump_id, max_clump_id + 1, 1):

                msg = "Processing the clump %s of %s from the landmask %s" % (
                    str(clump_id), str(max_clump_id), str(nr))
                msg = "\n\n" + str(msg) + "\n\n"
                print(msg)

                # identify mask based on the clump
                mask_selected_boolean_from_clump = pcr.ifthen(
                    clump_ids == pcr.nominal(clump_id), mask_selected_boolean)
                mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean_from_clump,
                    mask_selected_boolean_from_clump)

                # check whether the clump is empty
                check_mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean, mask_selected_boolean_from_clump)
                check_if_empty = float(
                    pcr.cellvalue(
                        pcr.mapmaximum(
                            pcr.scalar(
                                pcr.defined(
                                    check_mask_selected_boolean_from_clump))),
                        1)[0])

                if check_if_empty == 0.0:

                    msg = "Map is empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                else:

                    msg = "Map is NOT empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                    # assign the clone code
                    assigned_number = assigned_number + 1

                    # update global landmask for river and land
                    mask_selected_nominal = pcr.ifthen(
                        mask_selected_boolean_from_clump,
                        pcr.nominal(assigned_number))
                    subdomains_final = pcr.cover(subdomains_final,
                                                 mask_selected_nominal)

    # ~ # kill all aguila processes if exist
    # ~ os.system('killall aguila')

    pcr.aguila(subdomains_final)

    print("")
    print("")
    print("")

    print("The subdomain map is READY.")

    pcr.report(subdomains_final, "global_subdomains_final.map")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])
    print(num_of_masks)

    print("")
    print("")
    print("")

    print("Making the clone and landmask maps for all subdomains")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])

    # clone and mask folders
    clone_folder = out_folder + "/clone/"
    if os.path.exists(clone_folder): shutil.rmtree(clone_folder)
    os.makedirs(clone_folder)
    mask_folder = out_folder + "/mask/"
    if os.path.exists(mask_folder): shutil.rmtree(mask_folder)
    os.makedirs(mask_folder)

    print("")
    print("")

    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the subdomain %s" % (str(nr))
        print(msg)

        # set the global clone
        pcr.setclone(global_clone_map_file)

        mask_selected_boolean = pcr.ifthen(subdomains_final == nr,
                                           pcr.boolean(1.0))

        mask_selected_nominal = pcr.ifthen(subdomains_final == nr,
                                           pcr.nominal(nr))
        mask_file = "mask/mask_%s.map" % (str(nr))
        pcr.report(mask_selected_nominal, mask_file)

        xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
        area_in_degree2 = (xmax - xmin) * (ymax - ymin)

        print(
            str(nr) + " ; " + str(area_in_degree2) + " ; " +
            str((xmax - xmin)) + " ; " + str((ymax - ymin)))

        # cellsize in arcdegree
        cellsize = cellsize_in_arcmin / 60.

        # number of rows and cols
        num_rows = int(round(ymax - ymin) / cellsize)
        num_cols = int(round(xmax - xmin) / cellsize)

        # make the clone map using mapattr
        clonemap_mask_file = "clone/clonemap_mask_%s.map" % (str(nr))
        cmd = "mapattr -s -R %s -C %s -B -P yb2t -x %s -y %s -l %s %s" % (
            str(num_rows), str(num_cols), str(xmin), str(ymax), str(cellsize),
            clonemap_mask_file)
        print(cmd)
        os.system(cmd)

        # set the local landmask for the clump
        pcr.setclone(clonemap_mask_file)
        local_mask = vos.readPCRmapClone(v = mask_file, \
                                         cloneMapFileName = clonemap_mask_file,
                                         tmpDir = tmp_folder, \
                                         absolutePath = None, isLddMap = False, cover = None, isNomMap = True)
        local_mask_boolean = pcr.defined(local_mask)
        local_mask_boolean = pcr.ifthen(local_mask_boolean, local_mask_boolean)
        pcr.report(local_mask_boolean, mask_file)

    print("")
    print("")
    print("")

    print(num_of_masks)
Exemplo n.º 28
0
def loadmap(name,
            pcr=False,
            lddflag=False,
            timestampflag='exact',
            averageyearflag=False):
    """ Load a static map either value or pcraster map or netcdf (single or stack)
    
    Load a static map either value or pcraster map or netcdf (single or stack)
    If a netCDF stack is loaded, map is read according to timestepInit date (i.e. model time step). If timestepInit is a
    step number, step number is converted to date (referred to CalendarDayStart in settings.xml). Then date is used to
    read time step from netCDF file.
    if timestampflag = 'closest' and loadmap is reading a NetCDF stack, the timestep with the closest timestamp will be
    loaded if the exact one is not available.
    
    :param name: name of key in Settings.xml input file containing path and name of the map file (as string)
    :param pcr: flag for output maps in pcraster format 
    :param lddflag: flag for local drain direction map (CM??)
    :param timestampflag: look for exact time stamp in netcdf file ('exact') or for the closest (left) time stamp available ('closest')
    :param averageyearflag: if True, use "average year" netcdf file over the entire model simulation period
    :return: map or mapC
    :except: pcr: maps must have the same size of clone.map
             netCDF: time step timestepInit must be included into the stack 
    """
    # name of the key in Settimgs.xml file containing path and name of the map file
    settings = LisSettings.instance()
    binding = settings.binding
    flags = settings.flags
    value = binding[name]
    # path and name of the map file
    filename = value
    load = False
    pcrmap = False
    # try reading in PCRaster map format
    try:
        # try reading constant value
        mapC = float(value)
        flagmap = False
        load = True
        if pcr: map = mapC
    except ValueError:
        try:
            # try reading pcraster map exploiting the iterAccess class
            map = iterReadPCRasterMap(value)
            flagmap = True
            load = True
            pcrmap = True
        except:
            load = False

    if load and pcrmap:
        #map is loaded and it is in pcraster format
        try:
            # test if map is same size as clone map, if not it will make an error
            test = pcraster.scalar(map) + pcraster.scalar(map)
        except:
            raise LisfloodError(
                "{} might be of a different size than clone size".format(
                    value))
    # if failed before try reading from netCDF map format
    if not load:
        # read a netcdf  (single one not a stack)
        filename = os.path.splitext(value)[0] + '.nc'
        # get mapextend of netcdf map and calculate the cutting
        cut0, cut1, cut2, cut3 = mapattrNetCDF(filename)
        # load netcdf map but only the rectangle needed
        nf1 = iterOpenNetcdf(filename, "", 'r')
        value = listitems(nf1.variables)[-1][0]
        # get the last variable name (it must be the variable to be read by Lisflood)
        if not settings.timestep_init:
            # if timestep_init is missing, read netcdf as single static map
            mapnp = nf1.variables[value][cut2:cut3, cut0:cut1]
        else:
            if 'time' in nf1.variables:
                # read a netcdf  (stack) - state files
                # get information from netCDF stack
                t_steps = nf1.variables[
                    'time'][:]  # get values for timesteps ([  0.,  24.,  48.,  72.,  96.])
                t_unit = nf1.variables[
                    'time'].units  # get unit (u'hours since 2015-01-01 06:00:00')
                t_cal = get_calendar_type(nf1)
                # get year from time unit in case average year is used
                if averageyearflag:
                    # get date of the first step in netCDF file containing average year values
                    first_date = num2date(t_steps[0], t_unit, t_cal)
                    # get year of the first step in netCDF file containing average year values
                    t_ref_year = first_date.year

                # select timestep to use for reading from netCDF stack based on timestep_init (state file time step)
                timestepI = calendar(settings.timestep_init,
                                     binding['calendar_type'])
                if isinstance(timestepI, datetime.datetime):
                    #reading dates in XML settings file
                    # get step id number in netCDF stack for timestepInit date
                    if averageyearflag:
                        #if using an average year don't care about the year in timestepIDate and change it to the netCDF first time step year
                        try:
                            timestepI = timestepI.replace(year=t_ref_year)
                        except:
                            timestepI = timestepI.replace(day=28)
                            timestepI = timestepI.replace(year=t_ref_year)
                    timestepI = date2num(timestepI,
                                         nf1.variables['time'].units)
                else:
                    # reading step numbers in XML file
                    # timestepI = int(timestepI) -1
                    begin = calendar(binding['CalendarDayStart'])
                    DtSec = float(binding['DtSec'])
                    DtDay = DtSec / 86400.
                    # Time step, expressed as fraction of day (same as self.var.DtSec and self.var.DtDay)
                    # get date for step number timestepI (referred to CalendarDayStart)
                    timestepIDate = begin + datetime.timedelta(
                        days=(timestepI - 1) * DtDay)
                    # get step id number in netCDF stack for step timestepInit
                    # timestepInit refers to CalenradDayStart
                    # timestepI now refers to first date in netCDF stack
                    if averageyearflag:
                        #using an average year, don't care about the year in timestepIDate and change it to the netCDF time unit year
                        try:
                            timestepIDate = timestepIDate.replace(
                                year=t_ref_year)
                        except:
                            #if simulation year is leap and average year is not, switch 29/2 with 28/2
                            timestepIDate = timestepIDate.replace(day=28)
                            timestepIDate = timestepIDate.replace(
                                year=t_ref_year)
                    timestepI = date2num(timestepIDate,
                                         units=t_unit,
                                         calendar=t_cal)

                if not (timestepI in nf1.variables['time'][:]):
                    if timestampflag == 'exact':
                        #look for exact time stamp when loading data
                        msg = "time step " + str(int(
                            timestepI) + 1) + " is not stored in " + filename
                        raise LisfloodError(msg)
                    elif timestampflag == 'closest':
                        #get the closest value
                        timestepInew = takeClosest(t_steps, timestepI)
                        #set timestepI to the closest available time step in netCDF file
                        timestepI = timestepInew

                itime = np.where(nf1.variables['time'][:] == timestepI)[0][0]
                mapnp = nf1.variables[value][itime, cut2:cut3, cut0:cut1]
            else:
                # read a netcdf (single one)
                mapnp = nf1.variables[value][cut2:cut3, cut0:cut1]

        # masking
        try:
            maskinfo = MaskInfo.instance()
            mapnp.mask = maskinfo.info.mask
        except (KeyError, AttributeError):
            pass
        nf1.close()

        # if a map should be pcraster
        if pcr:
            # check if integer map (like outlets, lakes etc
            checkint = str(mapnp.dtype)
            if checkint == "int16" or checkint == "int32":
                mapnp[mapnp.mask] = -9999
                map = numpy2pcr(Nominal, mapnp, -9999)
            elif checkint == "int8":
                mapnp[mapnp < 0] = -9999
                map = numpy2pcr(Nominal, mapnp, -9999)
            else:
                mapnp[np.isnan(mapnp)] = -9999
                map = numpy2pcr(Scalar, mapnp, -9999)
            # if the map is a ldd
            if lddflag:
                map = pcraster.ldd(pcraster.nominal(map))
        else:
            mapC = compressArray(mapnp, pcr=False, name=filename)
        flagmap = True

    # pcraster map but it has to be an array
    if pcrmap and not pcr:
        mapC = compressArray(map, name=filename)

    if flags['checkfiles']:
        print(name, filename)
        if flagmap == False:
            checkmap(name, filename, mapC, flagmap, 0)
        elif pcr:
            checkmap(name, filename, map, flagmap, 0)
        else:
            print(name, mapC.size)
            if mapC.size > 0:
                map = decompress(mapC)
                checkmap(name, filename, map, flagmap, 0)
    if pcr:
        if flags['nancheck'] and name != 'Ldd':
            nanCheckMap(map, filename, name)
        return map
    elif isinstance(mapC, np.ndarray):
        return mapC.astype(float)
    else:
        if flags['nancheck'] and name != 'Ldd':
            nanCheckMap(mapC, filename, name)
        return mapC
Exemplo n.º 29
0
    def __init__(self, iniItems, landmask):
        object.__init__(self)

        # cloneMap, temporary directory, absolute path for input directory, landmask
        self.cloneMap = iniItems.cloneMap
        self.tmpDir = iniItems.tmpDir
        self.inputDir = iniItems.globalOptions['inputDir']
        self.landmask = landmask

        # configuration from the ini file
        self.iniItems = iniItems

        # topography properties: read several variables from the netcdf file
        for var in ['dem_minimum','dem_maximum','dem_average','dem_standard_deviation',\
                    'slopeLength','orographyBeta','tanslope',\
                    'dzRel0000','dzRel0001','dzRel0005',\
                    'dzRel0010','dzRel0020','dzRel0030','dzRel0040','dzRel0050',\
                    'dzRel0060','dzRel0070','dzRel0080','dzRel0090','dzRel0100']:
            vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['topographyNC'], \
                                                                var, self.cloneMap)
            vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        # channel properties: read several variables from the netcdf file
        for var in [
                'lddMap', 'cellAreaMap', 'gradient', 'bankfull_width',
                'bankfull_depth', 'dem_floodplain', 'dem_riverbed'
        ]:
            vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['channelNC'], \
                                                                var, self.cloneMap)
            vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        # minimum channel width
        minimum_channel_width = 0.1
        self.bankfull_width = pcr.max(minimum_channel_width,
                                      self.bankfull_width)

        #~ # cell fraction if channel water reaching the flood plan # NOT USED
        #~ self.flood_plain_fraction = self.return_innundation_fraction(pcr.max(0.0, self.dem_floodplain - self.dem_minimum))

        # coefficient of Manning
        self.manningsN = vos.readPCRmapClone(self.iniItems.modflowParameterOptions['manningsN'],\
                                             self.cloneMap,self.tmpDir,self.inputDir)

        # minimum channel gradient
        minGradient = 0.00005
        self.gradient = pcr.max(minGradient,
                                pcr.cover(self.gradient, minGradient))

        # correcting lddMap
        self.lddMap = pcr.ifthen(pcr.scalar(self.lddMap) > 0.0, self.lddMap)
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))

        # channelLength = approximation of channel length (unit: m)  # This is approximated by cell diagonal.
        cellSizeInArcMin = np.round(pcr.clone().cellSize() * 60.)
        verticalSizeInMeter = cellSizeInArcMin * 1852.
        self.channelLength  = ((self.cellAreaMap/verticalSizeInMeter)**(2)+\
                                                (verticalSizeInMeter)**(2))**(0.5)

        # option for lakes and reservoir
        self.onlyNaturalWaterBodies = False
        if self.iniItems.modflowParameterOptions[
                'onlyNaturalWaterBodies'] == "True":
            self.onlyNaturalWaterBodies = True

        # groundwater linear recession coefficient (day-1) ; the linear reservoir concept is still being used to represent fast response flow
        #                                                                                                                  particularly from karstic aquifer in mountainous regions
        self.recessionCoeff = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                                 'recessionCoeff', self.cloneMap)
        self.recessionCoeff = pcr.cover(self.recessionCoeff, 0.00)
        self.recessionCoeff = pcr.min(1.0000, self.recessionCoeff)
        #
        if 'minRecessionCoeff' in iniItems.modflowParameterOptions.keys():
            minRecessionCoeff = float(
                iniItems.modflowParameterOptions['minRecessionCoeff'])
        else:
            minRecessionCoeff = 1.0e-4  # This is the minimum value used in Van Beek et al. (2011).
        self.recessionCoeff = pcr.max(minRecessionCoeff, self.recessionCoeff)

        # aquifer saturated conductivity (m/day)
        self.kSatAquifer = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                             'kSatAquifer', self.cloneMap)
        self.kSatAquifer = pcr.cover(self.kSatAquifer,
                                     pcr.mapmaximum(self.kSatAquifer))
        self.kSatAquifer = pcr.max(0.010, self.kSatAquifer)

        self.kSatAquifer *= 0.001

        # aquifer specific yield (dimensionless)
        self.specificYield = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                               'specificYield', self.cloneMap)
        self.specificYield = pcr.cover(self.specificYield,
                                       pcr.mapmaximum(self.specificYield))
        self.specificYield = pcr.max(
            0.010, self.specificYield
        )  # TODO: TO BE CHECKED: The resample process of specificYield
        self.specificYield = pcr.min(1.000, self.specificYield)

        # estimate of thickness (unit: m) of accesible groundwater
        totalGroundwaterThickness = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['estimateOfTotalGroundwaterThicknessNC'],\
                                    'thickness', self.cloneMap)
        # extrapolation
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,\
                                    pcr.windowaverage(totalGroundwaterThickness, 1.0))
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,\
                                    pcr.windowaverage(totalGroundwaterThickness, 1.5))
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness, 0.0)
        #
        # set minimum thickness
        minimumThickness = pcr.scalar(float(\
                           self.iniItems.modflowParameterOptions['minimumTotalGroundwaterThickness']))
        totalGroundwaterThickness = pcr.max(minimumThickness,
                                            totalGroundwaterThickness)
        #
        # set maximum thickness: 250 m.
        maximumThickness = 250.
        self.totalGroundwaterThickness = pcr.min(maximumThickness,
                                                 totalGroundwaterThickness)

        # river bed resistance (unit: day)
        self.bed_resistance = 1.0

        # option to ignore capillary rise
        self.ignoreCapRise = True
        if self.iniItems.modflowParameterOptions['ignoreCapRise'] == "False":
            self.ignoreCapRise = False

        # initiate old style reporting                                  # TODO: remove this!
        self.initiate_old_style_groundwater_reporting(iniItems)
Exemplo n.º 30
0
    def __init__(self, modelTime, input_file, output_file, variable_name,
                 variable_unit):
        DynamicModel.__init__(self)

        self.modelTime = modelTime

        # netcdf input file - based on PCR-GLOBWB output
        self.input_file = "/projects/0/dfguu/users/edwin/pcr-globwb-aqueduct/historical/1951-2005/gfdl-esm2m/temperature_annuaAvg_output_%s-12-31_to_%s-12-31.nc"
        self.input_file = input_file

        # output file - in netcdf format
        self.output_file = output_folder + "/mekong/basin_temperature_annuaAvg_output.nc"
        self.output_file = output_file

        # output variable name and unit
        self.variable_name = variable_name
        self.variable_unit = variable_unit

        # preparing temporary directory
        self.temporary_directory = output_folder + "/tmp/"
        os.makedirs(self.temporary_directory)

        # clone and landmask maps
        logger.info("Set the clone and landmask maps.")
        self.clonemap_file_name = "/projects/0/dfguu/users/edwinhs/data/mekong_etc_clone/version_2018-10-22/final/clone_mekong.map"
        pcr.setclone(self.clonemap_file_name)
        landmask_file_name = "/projects/0/dfguu/users/edwinhs/data/mekong_etc_clone/version_2018-10-22/final/mask_mekong.map"
        self.landmask = vos.readPCRmapClone(landmask_file_name, \
                                            self.clonemap_file_name, \
                                            self.temporary_directory, \
                                            None, False, None, True)

        # pcraster input files
        # - river network map and sub-catchment map
        ldd_file_name = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/lddsound_05min.map"
        # - cell area (unit: m2)
        cell_area_file_name = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/cellsize05min.correct.map"

        # loading pcraster input maps
        self.ldd_network   = vos.readPCRmapClone(ldd_file_name, \
                                                 self.clonemap_file_name, \
                                                 self.temporary_directory, \
                                                 None, \
                                                 True)
        self.ldd_network = pcr.lddrepair(pcr.ldd(self.ldd_network))
        self.ldd_network = pcr.lddrepair(self.ldd_network)
        self.cell_area     = vos.readPCRmapClone(cell_area_file_name, \
                                                 self.clonemap_file_name, \
                                                 self.temporary_directory, \
                                                 None)

        # set/limit all input maps to the defined landmask
        self.ldd_network = pcr.ifthen(self.landmask, self.ldd_network)
        self.cell_area = pcr.ifthen(self.landmask, self.cell_area)

        # calculate basin/catchment area
        self.basin_area = pcr.catchmenttotal(self.cell_area, self.ldd_network)
        self.basin_area = pcr.ifthen(self.landmask, self.basin_area)

        # preparing an object for reporting netcdf files:
        self.netcdf_report = netcdf_writer.PCR2netCDF(self.clonemap_file_name)

        # preparing netcdf output files:
        self.netcdf_report.createNetCDF(self.output_file, \
                                        self.variable_name, \
                                        self.variable_unit)
def main():

    # output folder (and tmp folder)
    clean_out_folder = True
    if os.path.exists(out_folder):
        if clean_out_folder:
            shutil.rmtree(out_folder)
            os.makedirs(out_folder)
    else:
        os.makedirs(out_folder)
    os.chdir(out_folder)
    os.system("pwd")

    # set the clone map
    print("set the clone")
    pcr.setclone(global_ldd_30min_inp_file)

    # define the landmask
    print("define the landmask")
    # - based on the 30min input
    landmask_30min = define_landmask(input_file = global_landmask_30min_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_30min_only.map")
    # - based on the 05min input
    landmask_05min = define_landmask(input_file = global_landmask_05min_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_05min_only.map")
    # - based on the 06min input
    landmask_06min = define_landmask(input_file = global_landmask_06min_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_06min_only.map")
    # - based on the 30sec input
    landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_30sec_only.map")
    # - based on the 30sec input
    landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_03sec_only.map")
    #
    # - merge all landmasks
    landmask = pcr.cover(landmask_30min, landmask_05min, landmask_06min,
                         landmask_30sec, landmask_03sec)
    pcr.report(landmask, "global_landmask_extended_30min.map")
    # ~ pcr.aguila(landmask)

    # extend ldd
    print("extend/define the ldd")
    ldd_map = pcr.readmap(global_ldd_30min_inp_file)
    ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5)))
    pcr.report(ldd_map, "global_ldd_extended_30min.map")
    # ~ pcr.aguila(ldd_map)

    # catchment map and size
    catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map))
    catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map)
    # ~ pcr.aguila(catchment_size)

    # identify small islands
    print("identify small islands")
    # - maps of islands smaller than 15000 cells (at half arc degree resolution)
    island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map)))
    island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map)
    island_map = pcr.ifthen(island_size < 15000., island_map)
    # ~ # - use catchments (instead of islands)
    # ~ island_map  = catchment_map
    # ~ island_size = catchment_size
    # ~ island_map  = pcr.ifthen(island_size < 10000., island_map)
    # - sort from the largest island
    # -- take one cell per island as a representative
    island_map_rep_size = pcr.ifthen(
        pcr.areaorder(island_size, island_map) == 1.0, island_size)
    # -- sort from the largest island
    island_map_rep_ids = pcr.areaorder(
        island_map_rep_size * -1.00,
        pcr.ifthen(pcr.defined(island_map_rep_size), pcr.nominal(1.0)))
    # -- map of smaller islands, sorted from the largest one
    island_map = pcr.areamajority(pcr.nominal(island_map_rep_ids), island_map)

    # identify the biggest island for every group of small islands within a certain window (arcdeg cells)
    print("the biggest island for every group of small islands")
    large_island_map = pcr.ifthen(
        pcr.scalar(island_map) == pcr.windowminimum(pcr.scalar(island_map),
                                                    15.), island_map)
    # ~ pcr.aguila(large_island_map)

    # identify big catchments
    print("identify large catchments")
    catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map))
    catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map)

    # ~ # - identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 50, catchment_map)
    # ~ # - identify all large catchments with size >= 10 cells (at the resolution of 30 arcmin)
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 10, catchment_map)
    # ~ # - identify all large catchments with size >= 5 cells (at the resolution of 30 arcmin)
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 5, catchment_map)
    # ~ # - identify all large catchments with size >= 20 cells (at the resolution of 30 arcmin)
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 20, catchment_map)

    # - identify all large catchments with size >= 25 cells (at the resolution of 30 arcmin)
    large_catchment_map = pcr.ifthen(catchment_size >= 25, catchment_map)

    # - give the codes that are different than islands
    large_catchment_map = pcr.nominal(
        pcr.scalar(large_catchment_map) +
        10. * vos.getMinMaxMean(pcr.scalar(large_island_map))[1])

    # merge biggest islands and big catchments
    print("merge large catchments and islands")
    large_catchment_and_island_map = pcr.cover(large_catchment_map,
                                               large_island_map)
    # ~ large_catchment_and_island_map = pcr.cover(large_island_map, large_catchment_map)
    large_catchment_and_island_map_size = pcr.areatotal(
        pcr.spatial(pcr.scalar(1.0)), large_catchment_and_island_map)

    # - sort from the largest one
    # -- take one cell per island as a representative
    large_catchment_and_island_map_rep_size = pcr.ifthen(
        pcr.areaorder(large_catchment_and_island_map_size,
                      large_catchment_and_island_map) == 1.0,
        large_catchment_and_island_map_size)
    # -- sort from the largest
    large_catchment_and_island_map_rep_ids = pcr.areaorder(
        large_catchment_and_island_map_rep_size * -1.00,
        pcr.ifthen(pcr.defined(large_catchment_and_island_map_rep_size),
                   pcr.nominal(1.0)))
    # -- map of largest catchments and islands, sorted from the largest one
    large_catchment_and_island_map = pcr.areamajority(
        pcr.nominal(large_catchment_and_island_map_rep_ids),
        large_catchment_and_island_map)
    # ~ pcr.report(large_catchment_and_island_map, "large_catchments_and_islands.map")

    # ~ # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments
    # ~ print("spatial interpolation/extrapolation using cdo fillmiss2 to get initial subdomains")
    # ~ cmd = "gdal_translate -of NETCDF large_catchments_and_islands.map large_catchments_and_islands.nc"
    # ~ print(cmd); os.system(cmd)
    # ~ cmd = "cdo fillmiss2 large_catchments_and_islands.nc large_catchments_and_islands_filled.nc"
    # ~ print(cmd); os.system(cmd)
    # ~ cmd = "gdal_translate -of PCRaster large_catchments_and_islands_filled.nc large_catchments_and_islands_filled.map"
    # ~ print(cmd); os.system(cmd)
    # ~ cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "large_catchments_and_islands_filled.map"
    # ~ print(cmd); os.system(cmd)
    # ~ # - initial subdomains
    # ~ subdomains_initial = pcr.nominal(pcr.readmap("large_catchments_and_islands_filled.map"))
    # ~ subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map)
    # ~ pcr.aguila(subdomains_initial)

    # spatial interpolation/extrapolation in order to merge the small catchments to the nearest large catchments
    print("spatial interpolation/extrapolation to get initial subdomains")
    field = large_catchment_and_island_map
    cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field)))
    zoneID = pcr.spreadzone(cellID, 0, 1)
    field = pcr.areamajority(field, zoneID)
    subdomains_initial = field
    subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map)
    pcr.aguila(subdomains_initial)

    pcr.report(subdomains_initial, "global_subdomains_30min_initial.map")

    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0])))
    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])))

    # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0])))
    # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1])))

    print("Checking all subdomains, avoid too large subdomains")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])

    # clone code that will be assigned
    assigned_number = 0

    subdomains_final = pcr.ifthen(
        pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0))

    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the landmask %s" % (str(nr))
        print(msg)

        mask_selected_boolean = pcr.ifthen(subdomains_initial == nr,
                                           pcr.boolean(1.0))

        # ~ if nr == 1: pcr.aguila(mask_selected_boolean)

        xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
        area_in_degree2 = (xmax - xmin) * (ymax - ymin)

        # ~ print(str(area_in_degree2))

        # check whether the size of bounding box is ok
        # - initial check value
        check_ok = True

        reference_area_in_degree2 = 2500.
        if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False
        if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False

        if check_ok == True:

            msg = "Clump is not needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # assign the clone code
            assigned_number = assigned_number + 1

            # update global landmask for river and land
            mask_selected_nominal = pcr.ifthen(mask_selected_boolean,
                                               pcr.nominal(assigned_number))
            subdomains_final = pcr.cover(subdomains_final,
                                         mask_selected_nominal)

        if check_ok == False:

            msg = "Clump is needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # make clump
            clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean))

            # merge clumps that are close together
            clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0)
            clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids)
            # ~ pcr.aguila(clump_ids)

            # minimimum and maximum values
            min_clump_id = int(
                pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0])
            max_clump_id = int(
                pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0])

            for clump_id in range(min_clump_id, max_clump_id + 1, 1):

                msg = "Processing the clump %s of %s from the landmask %s" % (
                    str(clump_id), str(max_clump_id), str(nr))
                msg = "\n\n" + str(msg) + "\n\n"
                print(msg)

                # identify mask based on the clump
                mask_selected_boolean_from_clump = pcr.ifthen(
                    clump_ids == pcr.nominal(clump_id), mask_selected_boolean)
                mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean_from_clump,
                    mask_selected_boolean_from_clump)

                # check whether the clump is empty
                check_mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean, mask_selected_boolean_from_clump)
                check_if_empty = float(
                    pcr.cellvalue(
                        pcr.mapmaximum(
                            pcr.scalar(
                                pcr.defined(
                                    check_mask_selected_boolean_from_clump))),
                        1)[0])

                if check_if_empty == 0.0:

                    msg = "Map is empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                else:

                    msg = "Map is NOT empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                    # assign the clone code
                    assigned_number = assigned_number + 1

                    # update global landmask for river and land
                    mask_selected_nominal = pcr.ifthen(
                        mask_selected_boolean_from_clump,
                        pcr.nominal(assigned_number))
                    subdomains_final = pcr.cover(subdomains_final,
                                                 mask_selected_nominal)

    # ~ # kill all aguila processes if exist
    # ~ os.system('killall aguila')

    pcr.aguila(subdomains_final)

    print("")
    print("")
    print("")

    print("The subdomain map is READY.")

    pcr.report(subdomains_final, "global_subdomains_30min_final.map")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])
    print(num_of_masks)

    print("")
    print("")
    print("")

    for nr in range(1, num_of_masks + 1, 1):

        mask_selected_boolean = pcr.ifthen(subdomains_final == nr,
                                           pcr.boolean(1.0))

        xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
        area_in_degree2 = (xmax - xmin) * (ymax - ymin)

        print(
            str(nr) + " ; " + str(area_in_degree2) + " ; " +
            str((xmax - xmin)) + " ; " + str((ymax - ymin)))

    print("")
    print("")
    print("")

    print("Number of subdomains: " + str(num_of_masks))

    print("")
    print("")
    print("")

    # spatial extrapolation in order to cover the entire map
    print("spatial interpolation/extrapolation to cover the entire map")
    field = subdomains_final
    cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field)))
    zoneID = pcr.spreadzone(cellID, 0, 1)
    field = pcr.areamajority(field, zoneID)
    subdomains_final_filled = field
    pcr.aguila(subdomains_final_filled)

    pcr.report(subdomains_final_filled,
               "global_subdomains_30min_final_filled.map")
Exemplo n.º 32
0
def main():
    ### Read input arguments #####
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option('-q', '--quiet',
                      dest='verbose', default=True, action='store_false',
                      help='do not print status messages to stdout')
    parser.add_option('-i', '--ini', dest='inifile',
                      default='hand_contour_inun.ini', nargs=1,
                      help='ini configuration file')
    parser.add_option('-f', '--flood_map',
                      nargs=1, dest='flood_map',
                      help='Flood map file (NetCDF point time series file')
    parser.add_option('-v', '--flood_variable',
                      nargs=1, dest='flood_variable',
                      default='water_level',
                      help='variable name of flood water level')
    parser.add_option('-b', '--bankfull_map',
                      dest='bankfull_map', default='',
                      help='Map containing bank full level (is subtracted from flood map, in NetCDF)')
    parser.add_option('-c', '--catchment',
                      dest='catchment_strahler', default=7, type='int',
                      help='Strahler order threshold >= are selected as catchment boundaries')
    parser.add_option('-t', '--time',
                      dest='time', default='',
                      help='time in YYYYMMDDHHMMSS, overrides time in NetCDF input if set')
    # parser.add_option('-s', '--hand_strahler',
    #                   dest='hand_strahler', default=7, type='int',
    #                   help='Strahler order threshold >= selected as riverine')
    parser.add_option('-m', '--max_strahler',
                      dest = 'max_strahler', default=1000, type='int',
                      help='Maximum Strahler order to loop over')
    parser.add_option('-d', '--destination',
                      dest='dest_path', default='inun',
                      help='Destination path')
    parser.add_option('-H', '--hand_file_prefix',
                      dest='hand_file_prefix', default='',
                      help='optional HAND file prefix of already generated HAND files')
    parser.add_option('-n', '--neg_HAND',
                      dest='neg_HAND', default=0, type='int',
                      help='if set to 1, allow for negative HAND values in HAND maps')
    (options, args) = parser.parse_args()

    if not os.path.exists(options.inifile):
        print 'path to ini file cannot be found'
        sys.exit(1)
    options.dest_path = os.path.abspath(options.dest_path)

    if not(os.path.isdir(options.dest_path)):
        os.makedirs(options.dest_path)

    # set up the logger
    flood_name = os.path.split(options.flood_map)[1].split('.')[0]
    # case_name = 'inun_{:s}_hand_{:02d}_catch_{:02d}'.format(flood_name, options.hand_strahler, options.catchment_strahler)
    case_name = 'inun_{:s}_catch_{:02d}'.format(flood_name, options.catchment_strahler)
    logfilename = os.path.join(options.dest_path, 'hand_contour_inun.log')
    logger, ch = inun_lib.setlogger(logfilename, 'HAND_INUN', options.verbose)
    logger.info('$Id: $')
    logger.info('Flood map: {:s}'.format(options.flood_map))
    logger.info('Bank full map: {:s}'.format(options.bankfull_map))
    logger.info('Destination path: {:s}'.format(options.dest_path))
    # read out ini file
    ### READ CONFIG FILE
    # open config-file
    config = inun_lib.open_conf(options.inifile)
    
    # read settings
    options.dem_file = inun_lib.configget(config, 'HighResMaps',
                                  'dem_file',
                                  True)
    options.ldd_file = inun_lib.configget(config, 'HighResMaps',
                                'ldd_file',
                                 True)
    options.stream_file = inun_lib.configget(config, 'HighResMaps',
                                'stream_file',
                                 True)
    options.riv_length_fact_file = inun_lib.configget(config, 'wflowResMaps',
                                'riv_length_fact_file',
                                 True)
    options.ldd_wflow = inun_lib.configget(config, 'wflowResMaps',
                                'ldd_wflow',
                                True)
    options.riv_width_file = inun_lib.configget(config, 'wflowResMaps',
                                'riv_width_file',
                                 True)
    options.file_format = inun_lib.configget(config, 'file_settings',
                                'file_format', 0, datatype='int')
    options.out_format = inun_lib.configget(config, 'file_settings',
                                'out_format', 0, datatype='int')
    options.latlon = inun_lib.configget(config, 'file_settings',
                                 'latlon', 0, datatype='int')
    options.x_tile = inun_lib.configget(config, 'tiling',
                                  'x_tile', 10000, datatype='int')
    options.y_tile = inun_lib.configget(config, 'tiling',
                                  'y_tile', 10000, datatype='int')
    options.x_overlap = inun_lib.configget(config, 'tiling',
                                  'x_overlap', 1000, datatype='int')
    options.y_overlap = inun_lib.configget(config, 'tiling',
                                  'y_overlap', 1000, datatype='int')
    options.iterations = inun_lib.configget(config, 'inundation',
                                  'iterations', 20, datatype='int')
    options.initial_level = inun_lib.configget(config, 'inundation',
                                  'initial_level', 32., datatype='float')
    options.flood_volume_type = inun_lib.configget(config, 'inundation',
                                  'flood_volume_type', 0, datatype='int')

    # options.area_multiplier = inun_lib.configget(config, 'inundation',
    #                               'area_multiplier', 1., datatype='float')
    logger.info('DEM file: {:s}'.format(options.dem_file))
    logger.info('LDD file: {:s}'.format(options.ldd_file))
    logger.info('Columns per tile: {:d}'.format(options.x_tile))
    logger.info('Rows per tile: {:d}'.format(options.y_tile))
    logger.info('Columns overlap: {:d}'.format(options.x_overlap))
    logger.info('Rows overlap: {:d}'.format(options.y_overlap))
    metadata_global = {}
    # add metadata from the section [metadata]
    meta_keys = config.options('metadata_global')
    for key in meta_keys:
        metadata_global[key] = config.get('metadata_global', key)
    # add a number of metadata variables that are mandatory
    metadata_global['config_file'] = os.path.abspath(options.inifile)
    metadata_var = {}
    metadata_var['units'] = 'm'
    metadata_var['standard_name'] = 'water_surface_height_above_reference_datum'
    metadata_var['long_name'] = 'flooding'
    metadata_var['comment'] = 'water_surface_reference_datum_altitude is given in file {:s}'.format(options.dem_file)
    if not os.path.exists(options.dem_file):
        logger.error('path to dem file {:s} cannot be found'.format(options.dem_file))
        sys.exit(1)
    if not os.path.exists(options.ldd_file):
        logger.error('path to ldd file {:s} cannot be found'.format(options.ldd_file))
        sys.exit(1)

    # Read extent from a GDAL compatible file
    try:
        extent = inun_lib.get_gdal_extent(options.dem_file)
    except:
        msg = 'Input file {:s} not a gdal compatible file'.format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    try:
        x, y = inun_lib.get_gdal_axes(options.dem_file, logging=logger)
        srs = inun_lib.get_gdal_projection(options.dem_file, logging=logger)
    except:
        msg = 'Input file {:s} not a gdal compatible file'.format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    # read history from flood file
    if options.file_format == 0:
        a = nc.Dataset(options.flood_map, 'r')
        metadata_global['history'] = 'Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}'.format(os.path.abspath(options.flood_map), a.history)
        a.close()
    else:
        metadata_global['history'] = 'Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}'.format(os.path.abspath(options.flood_map), 'PCRaster file, no history')

    # first write subcatch maps and hand maps
    ############### TODO ######
    # setup a HAND file for each strahler order

    max_s = inun_lib.define_max_strahler(options.stream_file, logging=logger)
    stream_max = np.minimum(max_s, options.max_strahler)

    for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
        dem_name = os.path.split(options.dem_file)[1].split('.')[0]
        if os.path.isfile('{:s}_{:02d}.tif'.format(options.hand_file_prefix, hand_strahler)):
            hand_file = '{:s}_{:02d}.tif'.format(options.hand_file_prefix, hand_strahler)
        else:
            logger.info('No HAND files with HAND prefix were found, checking {:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))
            hand_file = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))
        if not(os.path.isfile(hand_file)):
        # hand file does not exist yet! Generate it, otherwise skip!
            logger.info('HAND file {:s} not found, start setting up...please wait...'.format(hand_file))
            hand_file_tmp = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif.tmp'.format(dem_name, hand_strahler))
            ds_hand, band_hand = inun_lib.prepare_gdal(hand_file_tmp, x, y, logging=logger, srs=srs)
            # band_hand = ds_hand.GetRasterBand(1)

            # Open terrain data for reading
            ds_dem, rasterband_dem = inun_lib.get_gdal_rasterband(options.dem_file)
            ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
            ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)
            n = 0
            for x_loop in range(0, len(x), options.x_tile):
                x_start = np.maximum(x_loop, 0)
                x_end = np.minimum(x_loop + options.x_tile, len(x))
                # determine actual overlap for cutting
                for y_loop in range(0, len(y), options.y_tile):
                    x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
                    x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
                    n += 1
                    # print('tile {:001d}:'.format(n))
                    y_start = np.maximum(y_loop, 0)
                    y_end = np.minimum(y_loop + options.y_tile, len(y))
                    y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
                    y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
                    # cut out DEM
                    logger.debug('Computing HAND for xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}'.format(x_start, x_end,y_start, y_end))
                    terrain = rasterband_dem.ReadAsArray(x_start - x_overlap_min,
                                                         y_start - y_overlap_min,
                                                         (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                         (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                         )

                    drainage = rasterband_ldd.ReadAsArray(x_start - x_overlap_min,
                                                         y_start - y_overlap_min,
                                                         (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                         (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                         )
                    stream = rasterband_stream.ReadAsArray(x_start - x_overlap_min,
                                                           y_start - y_overlap_min,
                                                           (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                           (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                           )
                    # write to temporary file
                    terrain_temp_file = os.path.join(options.dest_path, 'terrain_temp.map')
                    drainage_temp_file = os.path.join(options.dest_path, 'drainage_temp.map')
                    stream_temp_file = os.path.join(options.dest_path, 'stream_temp.map')
                    if rasterband_dem.GetNoDataValue() is not None:
                        inun_lib.gdal_writemap(terrain_temp_file, 'PCRaster',
                                          np.arange(0, terrain.shape[1]),
                                          np.arange(0, terrain.shape[0]),
                                          terrain, rasterband_dem.GetNoDataValue(),
                                          gdal_type=gdal.GDT_Float32,
                                          logging=logger)
                    else:
                        # in case no nodata value is found
                        logger.warning('No nodata value found in {:s}. assuming -9999'.format(options.dem_file))
                        inun_lib.gdal_writemap(terrain_temp_file, 'PCRaster',
                                          np.arange(0, terrain.shape[1]),
                                          np.arange(0, terrain.shape[0]),
                                          terrain, -9999.,
                                          gdal_type=gdal.GDT_Float32,
                                          logging=logger)

                    inun_lib.gdal_writemap(drainage_temp_file, 'PCRaster',
                                      np.arange(0, terrain.shape[1]),
                                      np.arange(0, terrain.shape[0]),
                                      drainage, rasterband_ldd.GetNoDataValue(),
                                      gdal_type=gdal.GDT_Int32,
                                      logging=logger)
                    inun_lib.gdal_writemap(stream_temp_file, 'PCRaster',
                                      np.arange(0, terrain.shape[1]),
                                      np.arange(0, terrain.shape[0]),
                                      stream, rasterband_ldd.GetNoDataValue(),
                                      gdal_type=gdal.GDT_Int32,
                                      logging=logger)
                    # read as pcr objects
                    pcr.setclone(terrain_temp_file)
                    terrain_pcr = pcr.readmap(terrain_temp_file)
                    drainage_pcr = pcr.lddrepair(pcr.ldd(pcr.readmap(drainage_temp_file)))  # convert to ldd type map
                    stream_pcr = pcr.scalar(pcr.readmap(stream_temp_file))  # convert to ldd type map

                    #check if the highest stream order of the tile is below the hand_strahler
                    # if the highest stream order of the tile is smaller than hand_strahler, than DEM values are taken instead of HAND values.
                    max_stream_tile = inun_lib.define_max_strahler(stream_temp_file, logging=logger)
                    if max_stream_tile < hand_strahler:
                        hand_pcr = terrain_pcr
                        logger.info('For this tile, DEM values are used instead of HAND because there is no stream order larger than {:02d}'.format(hand_strahler))
                    else:
                    # compute streams
                        stream_ge, subcatch = inun_lib.subcatch_stream(drainage_pcr, hand_strahler, stream=stream_pcr) # generate streams
                        # compute basins
                        stream_ge_dummy, subcatch = inun_lib.subcatch_stream(drainage_pcr, options.catchment_strahler, stream=stream_pcr) # generate streams
                        basin = pcr.boolean(subcatch)
                        hand_pcr, dist_pcr = inun_lib.derive_HAND(terrain_pcr, drainage_pcr, 3000,
                                                                  rivers=pcr.boolean(stream_ge), basin=basin, neg_HAND=options.neg_HAND)
                    # convert to numpy
                    hand = pcr.pcr2numpy(hand_pcr, -9999.)
                    # cut relevant part
                    if y_overlap_max == 0:
                        y_overlap_max = -hand.shape[0]
                    if x_overlap_max == 0:
                        x_overlap_max = -hand.shape[1]
                    hand_cut = hand[0+y_overlap_min:-y_overlap_max, 0+x_overlap_min:-x_overlap_max]

                    band_hand.WriteArray(hand_cut, x_start, y_start)
                    os.unlink(terrain_temp_file)
                    os.unlink(drainage_temp_file)
                    os.unlink(stream_temp_file)
                    band_hand.FlushCache()
            ds_dem = None
            ds_ldd = None
            ds_stream = None
            band_hand.SetNoDataValue(-9999.)
            ds_hand = None
            logger.info('Finalizing {:s}'.format(hand_file))
            # rename temporary file to final hand file
            os.rename(hand_file_tmp, hand_file)
        else:
            logger.info('HAND file {:s} already exists...skipping...'.format(hand_file))

    #####################################################################################
    #  HAND file has now been prepared, moving to flood mapping part                    #
    #####################################################################################
    # set the clone
    pcr.setclone(options.ldd_wflow)
    # read wflow ldd as pcraster object
    ldd_pcr = pcr.readmap(options.ldd_wflow)
    xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(options.riv_width_file, 'GTiff', logging=logger)

    # determine cell length in meters using ldd_pcr as clone (if latlon=True, values are converted to m2
    x_res, y_res, reallength_wflow = pcrut.detRealCellLength(pcr.scalar(ldd_pcr), not(bool(options.latlon)))
    cell_surface_wflow = pcr.pcr2numpy(x_res * y_res, 0)

    if options.flood_volume_type == 0:
        # load the staticmaps needed to estimate volumes across all
        # xax, yax, riv_length, fill_value = inun_lib.gdal_readmap(options.riv_length_file, 'GTiff', logging=logger)
        # riv_length = np.ma.masked_where(riv_length==fill_value, riv_length)
        xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(options.riv_width_file, 'GTiff', logging=logger)
        riv_width[riv_width == fill_value] = 0

        # read river length factor file (multiplier)
        xax, yax, riv_length_fact, fill_value = inun_lib.gdal_readmap(options.riv_length_fact_file, 'GTiff', logging=logger)
        riv_length_fact = np.ma.masked_where(riv_length_fact==fill_value, riv_length_fact)
        drain_length = wflow_lib.detdrainlength(ldd_pcr, x_res, y_res)

        # compute river length in each cell
        riv_length = pcr.pcr2numpy(drain_length, 0) * riv_length_fact
        # riv_length_pcr = pcr.numpy2pcr(pcr.Scalar, riv_length, 0)

    flood_folder = os.path.join(options.dest_path, case_name)
    flood_vol_map = os.path.join(flood_folder, '{:s}_vol.tif'.format(os.path.split(options.flood_map)[1].split('.')[0]))
    if not(os.path.isdir(flood_folder)):
        os.makedirs(flood_folder)
    if options.out_format == 0:
        inun_file_tmp = os.path.join(flood_folder, '{:s}.tif.tmp'.format(case_name))
        inun_file = os.path.join(flood_folder, '{:s}.tif'.format(case_name))
    else:
        inun_file_tmp = os.path.join(flood_folder, '{:s}.nc.tmp'.format(case_name))
        inun_file = os.path.join(flood_folder, '{:s}.nc'.format(case_name))

    hand_temp_file = os.path.join(flood_folder, 'hand_temp.map')
    drainage_temp_file = os.path.join(flood_folder, 'drainage_temp.map')
    stream_temp_file = os.path.join(flood_folder, 'stream_temp.map')
    flood_vol_temp_file = os.path.join(flood_folder, 'flood_warp_temp.tif')
    # load the data with river levels and compute the volumes
    if options.file_format == 0:
        # assume we need the maximum value in a NetCDF time series grid
        logger.info('Reading flood from {:s} NetCDF file'.format(options.flood_map))
        a = nc.Dataset(options.flood_map, 'r')
        if options.latlon == 0:
            xax = a.variables['x'][:]
            yax = a.variables['y'][:]
        else:
            xax = a.variables['lon'][:]
            yax = a.variables['lat'][:]
        if options.time == '':
            time_list = nc.num2date(a.variables['time'][:], units = a.variables['time'].units, calendar=a.variables['time'].calendar)
            time = [time_list[len(time_list)/2]]
        else:
            time = [dt.datetime.strptime(options.time, '%Y%m%d%H%M%S')]

        flood_series = a.variables[options.flood_variable][:]
        flood_data = flood_series.max(axis=0)
        if np.ma.is_masked(flood_data):
            flood = flood_data.data
            flood[flood_data.mask] = 0
        if yax[-1] > yax[0]:
            yax = np.flipud(yax)
            flood = np.flipud(flood)
        a.close()
    elif options.file_format == 1:
        logger.info('Reading flood from {:s} PCRaster file'.format(options.flood_map))
        xax, yax, flood, flood_fill_value = inun_lib.gdal_readmap(options.flood_map, 'PCRaster', logging=logger)
        flood = np.ma.masked_equal(flood, flood_fill_value)
        if options.time == '':
            options.time = '20000101000000'
        time = [dt.datetime.strptime(options.time, '%Y%m%d%H%M%S')]

        flood[flood==flood_fill_value] = 0.
    # load the bankfull depths
    if options.bankfull_map == '':
        bankfull = np.zeros(flood.shape)
    else:
        if options.file_format == 0:
            logger.info('Reading bankfull from {:s} NetCDF file'.format(options.bankfull_map))
            a = nc.Dataset(options.bankfull_map, 'r')
            xax = a.variables['x'][:]
            yax = a.variables['y'][:]

            bankfull_series = a.variables[options.flood_variable][:]
            bankfull_data = bankfull_series.max(axis=0)
            if np.ma.is_masked(bankfull_data):
                bankfull = bankfull_data.data
                bankfull[bankfull_data.mask] = 0
            if yax[-1] > yax[0]:
                yax = np.flipud(yax)
                bankfull = np.flipud(bankfull)
            a.close()
        elif options.file_format == 1:
            logger.info('Reading bankfull from {:s} PCRaster file'.format(options.bankfull_map))
            xax, yax, bankfull, bankfull_fill_value = inun_lib.gdal_readmap(options.bankfull_map, 'PCRaster', logging=logger)
            bankfull = np.ma.masked_equal(bankfull, bankfull_fill_value)
#     flood = bankfull*2
    # res_x = 2000
    # res_y = 2000
    # subtract the bankfull water level to get flood levels (above bankfull)
    flood_vol = np.maximum(flood-bankfull, 0)
    if options.flood_volume_type == 0:
        flood_vol_m = riv_length*riv_width*flood_vol/cell_surface_wflow  # volume expressed in meters water disc
        flood_vol_m_pcr = pcr.numpy2pcr(pcr.Scalar, flood_vol_m, 0)
    else:
        flood_vol_m = flood_vol/cell_surface_wflow
    flood_vol_m_data = flood_vol_m.data
    flood_vol_m_data[flood_vol_m.mask] = -999.
    logger.info('Saving water layer map to {:s}'.format(flood_vol_map))
    # write to a tiff file
    inun_lib.gdal_writemap(flood_vol_map, 'GTiff', xax, yax, np.maximum(flood_vol_m_data, 0), -999., logging=logger)
    # this is placed later in the hand loop
    # ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
    ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
    ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)

    logger.info('Preparing flood map in {:s} ...please wait...'.format(inun_file))
    if options.out_format == 0:
        ds_inun, band_inun = inun_lib.prepare_gdal(inun_file_tmp, x, y, logging=logger, srs=srs)
        # band_inun = ds_inun.GetRasterBand(1)
    else:
        ds_inun, band_inun = inun_lib.prepare_nc(inun_file_tmp, time, x, np.flipud(y), metadata=metadata_global,
                                                 metadata_var=metadata_var, logging=logger)
    # loop over all the tiles
    n = 0
    for x_loop in range(0, len(x), options.x_tile):
        x_start = np.maximum(x_loop, 0)
        x_end = np.minimum(x_loop + options.x_tile, len(x))
        # determine actual overlap for cutting
        for y_loop in range(0, len(y), options.y_tile):
            x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
            x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
            n += 1
            # print('tile {:001d}:'.format(n))
            y_start = np.maximum(y_loop, 0)
            y_end = np.minimum(y_loop + options.y_tile, len(y))
            y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
            y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
            x_tile_ax = x[x_start - x_overlap_min:x_end + x_overlap_max]
            y_tile_ax = y[y_start - y_overlap_min:y_end + y_overlap_max]
            # cut out DEM
            logger.debug('handling xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}'.format(x_start, x_end, y_start, y_end))


            drainage = rasterband_ldd.ReadAsArray(x_start - x_overlap_min,
                                                 y_start - y_overlap_min,
                                                 (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                 (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                 )
            stream = rasterband_stream.ReadAsArray(x_start - x_overlap_min,
                                                   y_start - y_overlap_min,
                                                   (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                   (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                   )

            # stream_max = np.minimum(stream.max(), options.max_strahler)


            inun_lib.gdal_writemap(drainage_temp_file, 'PCRaster',
                              x_tile_ax,
                              y_tile_ax,
                              drainage, rasterband_ldd.GetNoDataValue(),
                              gdal_type=gdal.GDT_Int32,
                              logging=logger)
            inun_lib.gdal_writemap(stream_temp_file, 'PCRaster',
                              x_tile_ax,
                              y_tile_ax,
                              stream, rasterband_stream.GetNoDataValue(),
                              gdal_type=gdal.GDT_Int32,
                              logging=logger)


            # read as pcr objects
            pcr.setclone(stream_temp_file)
            drainage_pcr = pcr.lddrepair(pcr.ldd(pcr.readmap(drainage_temp_file)))  # convert to ldd type map
            stream_pcr = pcr.scalar(pcr.readmap(stream_temp_file))  # convert to ldd type map

            # warp of flood volume to inundation resolution
            inun_lib.gdal_warp(flood_vol_map, stream_temp_file, flood_vol_temp_file, gdal_interp=gdalconst.GRA_NearestNeighbour) # ,
            x_tile_ax, y_tile_ax, flood_meter, fill_value = inun_lib.gdal_readmap(flood_vol_temp_file, 'GTiff', logging=logger)
            # make sure that the option unittrue is on !! (if unitcell was is used in another function)
            x_res_tile, y_res_tile, reallength = pcrut.detRealCellLength(pcr.scalar(stream_pcr), not(bool(options.latlon)))
            cell_surface_tile = pcr.pcr2numpy(x_res_tile * y_res_tile, 0)

            # convert meter depth to volume [m3]
            flood_vol = pcr.numpy2pcr(pcr.Scalar, flood_meter*cell_surface_tile, fill_value)

            # first prepare a basin map, belonging to the lowest order we are looking at
            inundation_pcr = pcr.scalar(stream_pcr) * 0
            for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
                # hand_temp_file = os.path.join(flood_folder, 'hand_temp.map')
                if os.path.isfile(os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))):
                    hand_file = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))
                else:
                    hand_file = '{:s}_{:02d}.tif'.format(options.hand_file_prefix, hand_strahler)
                ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
                hand = rasterband_hand.ReadAsArray(x_start - x_overlap_min,
                                             y_start - y_overlap_min,
                                             (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                             (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                             )
                print('len x-ax: {:d} len y-ax {:d} x-shape {:d} y-shape {:d}'.format(len(x_tile_ax), len(y_tile_ax), hand.shape[1], hand.shape[0]))

                inun_lib.gdal_writemap(hand_temp_file, 'PCRaster',
                          x_tile_ax,
                          y_tile_ax,
                          hand, rasterband_hand.GetNoDataValue(),
                          gdal_type=gdal.GDT_Float32,
                          logging=logger)

                hand_pcr = pcr.readmap(hand_temp_file)

                stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(drainage_pcr, options.catchment_strahler, stream=stream_pcr)
                # stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(drainage_pcr, hand_strahler, stream=stream_pcr)
                stream_ge, subcatch = inun_lib.subcatch_stream(drainage_pcr,
                                                               options.catchment_strahler,
                                                               stream=stream_pcr,
                                                               basin=pcr.boolean(pcr.cover(subcatch_hand, 0)),
                                                               assign_existing=True,
                                                               min_strahler=hand_strahler,
                                                               max_strahler=hand_strahler) # generate subcatchments, only within basin for HAND
                flood_vol_strahler = pcr.ifthenelse(pcr.boolean(pcr.cover(subcatch, 0)), flood_vol, 0) # mask the flood volume map with the created subcatch map for strahler order = hand_strahler

                inundation_pcr_step = inun_lib.volume_spread(drainage_pcr, hand_pcr,
                                                             pcr.subcatchment(drainage_pcr, subcatch), # to make sure backwater effects can occur from higher order rivers to lower order rivers
                                                             flood_vol_strahler,
                                                             volume_thres=0.,
                                                             iterations=options.iterations,
                                                             cell_surface=pcr.numpy2pcr(pcr.Scalar, cell_surface_tile, -9999),
                                                             logging=logger,
                                                             order=hand_strahler,
                                                             neg_HAND=options.neg_HAND) # 1166400000.
                # use maximum value of inundation_pcr_step and new inundation for higher strahler order
                inundation_pcr = pcr.max(inundation_pcr, inundation_pcr_step)
            inundation = pcr.pcr2numpy(inundation_pcr, -9999.)
            # cut relevant part
            if y_overlap_max == 0:
                y_overlap_max = -inundation.shape[0]
            if x_overlap_max == 0:
                x_overlap_max = -inundation.shape[1]
            inundation_cut = inundation[0+y_overlap_min:-y_overlap_max, 0+x_overlap_min:-x_overlap_max]
            # inundation_cut
            if options.out_format == 0:
                band_inun.WriteArray(inundation_cut, x_start, y_start)
                band_inun.FlushCache()
            else:
                # with netCDF, data is up-side-down.
                inun_lib.write_tile_nc(band_inun, inundation_cut, x_start, y_start)
            # clean up
            os.unlink(flood_vol_temp_file)
            os.unlink(drainage_temp_file)
            os.unlink(hand_temp_file)
            os.unlink(stream_temp_file)     #also remove temp stream file from output folder

            # if n == 35:
            #     band_inun.SetNoDataValue(-9999.)
            #     ds_inun = None
            #     sys.exit(0)
    # os.unlink(flood_vol_map)

    logger.info('Finalizing {:s}'.format(inun_file))
    # add the metadata to the file and band
    # band_inun.SetNoDataValue(-9999.)
    # ds_inun.SetMetadata(metadata_global)
    # band_inun.SetMetadata(metadata_var)
    if options.out_format == 0:
        ds_inun = None
        ds_hand = None
    else:
        ds_inun.close()

    ds_ldd = None
    # rename temporary file to final hand file
    if os.path.isfile(inun_file):
        # remove an old result if available
        os.unlink(inun_file)
    os.rename(inun_file_tmp, inun_file)

    logger.info('Done! Thank you for using hand_contour_inun.py')
    logger, ch = inun_lib.closeLogger(logger, ch)
    del logger, ch
    sys.exit(0)
msg = "Set the landmask to : " + str(landmask_map_file)
logger.info(msg)
landmask = pcr.readmap(landmask_map_file)

# resampling low resolution ldd map
msg = "Resample the low resolution ldd map."
logger.info(msg)
ldd_map_low_resolution_file_name = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/lddsound_05min.map"
ldd_map_low_resolution = vos.readPCRmapClone(ldd_map_low_resolution_file_name, \
                                             clone_map_file, \
                                             tmp_folder, \
                                             None, True, None, False)
ldd_map_low_resolution = pcr.ifthen(
    landmask,
    ldd_map_low_resolution)  # NOTE THAT YOU MAY NOT HAVE TO MASK-OUT THE LDD.
ldd_map_low_resolution = pcr.lddrepair(pcr.ldd(ldd_map_low_resolution))
ldd_map_low_resolution = pcr.lddrepair(ldd_map_low_resolution)
pcr.report(ldd_map_low_resolution, "resampled_low_resolution_ldd.map")

# permanent water bodies files (at 5 arc-minutes resolution)
reservoir_capacity_file = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/reservoirs/waterBodiesFinal_version15Sept2013/maps/reservoircapacity_2010.map"
fracwat_file = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/reservoirs/waterBodiesFinal_version15Sept2013/maps/fracwat_2010.map"
water_body_id_file = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/reservoirs/waterBodiesFinal_version15Sept2013/maps/waterbodyid_2010.map"

# cell_area_file
cell_area_file = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/cellsize05min.correct.map"

# read all extreme value maps (low resolution maps), resample them, and save them to the output folder
msg = "Resampling extreme value maps."
logger.info(msg)
file_names = [
Exemplo n.º 34
0
    def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict):

        line = re.sub("\n", "", line)
        line = re.sub("\t", " ", line)
        result = None

        # read until first comment
        content = ""
        content, sep, comment = line.partition("#")
        if len(content) > 1:
            collectionVariableName, sep, tail = content.partition(" ")
            if collectionVariableName == self._varName:
                tail = tail.strip()
                key, sep, variableValue = tail.rpartition(" ")

                if len(key.split()) != nrColumns:
                    tmp = re.sub("\(|\)|,", "", str(key))
                    msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" % (
                        self._fileName, lineNumber, len(key.split()) + 2,
                        int(nrColumns) + 2)
                    raise ValueError(msg)

                variableValue = re.sub('\"', "", variableValue)

                tmp = None
                try:
                    tmp = int(variableValue)
                    if self._dataType == pcraster.Boolean:
                        tmp = pcraster.boolean(tmp)
                    elif self._dataType == pcraster.Nominal:
                        tmp = pcraster.nominal(tmp)
                    elif self._dataType == pcraster.Ordinal:
                        tmp = pcraster.ordinal(tmp)
                    elif self._dataType == pcraster.Ldd:
                        tmp = pcraster.ldd(tmp)
                    else:
                        msg = "Conversion to %s failed" % (self._dataType)
                        raise Exception(msg)
                except ValueError as e:
                    try:
                        tmp = float(variableValue)
                        if self._dataType == pcraster.Scalar:
                            tmp = pcraster.scalar(tmp)
                        elif self._dataType == pcraster.Directional:
                            tmp = pcraster.directional(tmp)
                        else:
                            msg = "Conversion to %s failed" % (self._dataType)
                            raise Exception(msg)

                    except ValueError as e:
                        variableValue = re.sub("\\\\", "/", variableValue)
                        variableValue = variableValue.strip()
                        path = os.path.normpath(variableValue)
                        try:
                            tmp = pcraster.readmap(path)
                        except RuntimeError as e:
                            msg = "Error reading %s line %d, %s" % (
                                self._fileName, lineNumber, e)
                            raise ValueError(msg)

                # test if key is an external name
                transformedKeys = []
                counter = 0

                for k in key.split():
                    k = k.strip()
                    if externalNames[counter].get(k):
                        transformedKeys.append(externalNames[counter].get(k))
                    else:
                        transformedKeys.append(k)
                    counter += 1

                key = tuple(transformedKeys)

                if not key in keyDict:
                    tmp = re.sub("\(|\)|,", "", str(key))
                    msg = "Error reading %s line %d, %s unknown collection index" % (
                        self._fileName, lineNumber, tmp)
                    raise ValueError(msg)

                if not keyDict[key] is None:
                    tmp = re.sub("\(|\)|,", "", str(key))
                    msg = "Error reading %s line %d, %s %s already initialised" % (
                        self._fileName, lineNumber, self._varName, tmp)
                    raise ValueError(msg)

                keyDict[key] = tmp
Exemplo n.º 35
0
if lddin:
    pcr.setglobaloption("lddin")
ldd_map = workdir + 'ldd.map'
streamorder_map = workdir + 'streamorder.map'
river_map = workdir + 'river.map'
catchments_map = workdir + 'catchments.map'
catchments_tif = workdir + 'catchments.tif'
#catchments_shp = resultdir + 'catchments.shp'

generateldd = True

if skipldd:
    print 'Option -S is set'
    print 'ldd will be read from ' + ldd_map   
    if os.path.exists(ldd_map):
        ldd = pcr.ldd(pcr.readmap(ldd_map))
        generateldd = False
    else:
        print 'file ' + ldd_map + ' does not exist'
        print 'new ldd will be generated'

if generateldd:
    print 'Generating ldd...'
    if burndem:
        linescover = pcr.ifthen(lines==1,pcr.scalar(0))
        pointscover = pcr.ifthen(pcr.scalar(points)==1,pcr.scalar(0))
        #pcr.report(linescover,'lines.map')
        #pcr.report(pointscover,'points.map')
        dem = pcr.cover(dem,linescover,pointscover)
        #pcr.report(dem,'dem1.map')
        dem = dem + burn
Exemplo n.º 36
0
    def __init__(self,
                 iniItems,
                 landmask,
                 onlyNaturalWaterBodies=False,
                 lddMap=None):
        object.__init__(self)

        # clone map file names, temporary directory and global/absolute path of input directory
        self.cloneMap = iniItems.cloneMap
        self.tmpDir = iniItems.tmpDir
        self.inputDir = iniItems.globalOptions['inputDir']
        self.landmask = landmask

        self.iniItems = iniItems

        # local drainage direction:
        if lddMap is None:
            self.lddMap = vos.readPCRmapClone(
                iniItems.routingOptions['lddMap'], self.cloneMap, self.tmpDir,
                self.inputDir, True)
            self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))
            self.lddMap = pcr.lddrepair(self.lddMap)
        else:
            self.lddMap = lddMap

        # the following is needed for a modflowOfflineCoupling run
        if 'modflowOfflineCoupling' in list(iniItems.globalOptions.keys(
        )) and iniItems.globalOptions[
                'modflowOfflineCoupling'] == "True" and 'routingOptions' not in iniItems.allSections:
            logger.info(
                "The 'routingOptions' are not defined in the configuration ini file. We will adopt them from the 'modflowParameterOptions'."
            )
            iniItems.routingOptions = iniItems.modflowParameterOptions

        # option to activate water balance check
        self.debugWaterBalance = True
        if 'debugWaterBalance' in list(iniItems.routingOptions.keys(
        )) and iniItems.routingOptions['debugWaterBalance'] == "False":
            self.debugWaterBalance = False

        # option to perform a run with only natural lakes (without reservoirs)
        self.onlyNaturalWaterBodies = onlyNaturalWaterBodies
        if "onlyNaturalWaterBodies" in list(iniItems.routingOptions.keys(
        )) and iniItems.routingOptions['onlyNaturalWaterBodies'] == "True":
            logger.info(
                "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes."
            )
            self.onlyNaturalWaterBodies = True
            self.dateForNaturalCondition = "1900-01-01"  # The run for a natural condition should access only this date.

        # names of files containing water bodies parameters
        if iniItems.routingOptions['waterBodyInputNC'] == str(None):
            self.useNetCDF = False
            self.fracWaterInp = iniItems.routingOptions['fracWaterInp']
            self.waterBodyIdsInp = iniItems.routingOptions['waterBodyIds']
            self.waterBodyTypInp = iniItems.routingOptions['waterBodyTyp']
            self.resMaxCapInp = iniItems.routingOptions['resMaxCapInp']
            self.resSfAreaInp = iniItems.routingOptions['resSfAreaInp']
        else:
            self.useNetCDF = True
            self.ncFileInp       = vos.getFullPath(\
                                   iniItems.routingOptions['waterBodyInputNC'],\
                                   self.inputDir)

        # minimum width (m) used in the weir formula  # TODO: define minWeirWidth based on the GLWD, GRanD database and/or bankfull discharge formula
        self.minWeirWidth = 10.

        # lower and upper limits at which reservoir release is terminated and
        #                        at which reservoir release is equal to long-term average outflow
        # - default values
        self.minResvrFrac = 0.10
        self.maxResvrFrac = 0.75
        # - from the ini file
        if "minResvrFrac" in list(iniItems.routingOptions.keys()):
            minResvrFrac = iniItems.routingOptions['minResvrFrac']
            self.minResvrFrac = vos.readPCRmapClone(minResvrFrac,
                                                    self.cloneMap, self.tmpDir,
                                                    self.inputDir)
        if "maxResvrFrac" in list(iniItems.routingOptions.keys()):
            maxResvrFrac = iniItems.routingOptions['maxResvrFrac']
            self.maxResvrFrac = vos.readPCRmapClone(maxResvrFrac,
                                                    self.cloneMap, self.tmpDir,
                                                    self.inputDir)
Exemplo n.º 37
0
    def __init__(self, modelTime, output_folder, totat_runoff_input_file):
        DynamicModel.__init__(self)

        self.modelTime = modelTime
        
        # netcdf input files - based on PCR-GLOBWB output
        # - total runoff (m/month)
        self.totat_runoff_input_file = "/scratch-shared/edwin/05min_runs_for_gmd_paper_30_oct_2017/05min_runs_4LCs_accutraveltime_cru-forcing_1958-2015/non-natural_starting_from_1958/merged_1958_to_2015/totalRunoff_monthTot_output_1958-01-31_to_2015-12-31.nc"
        self.totat_runoff_input_file = totat_runoff_input_file
        #
        #~ # - discharge (m3/s) - NOT USED anymore
        #~ self.discharge_input_file    = "/scratch-shared/edwin/05min_runs_for_gmd_paper_30_oct_2017/05min_runs_4LCs_accutraveltime_cru-forcing_1958-2015/non-natural_starting_from_1958/merged_1958_to_2015/discharge_monthAvg_output_1958-01-31_to_2015-12-31.nc"

        # output files - in netcdf format
        self.total_flow_output_file    = output_folder + "/total_flow.nc"
        self.internal_flow_output_file = output_folder + "/internal_flow.nc" 
        # - all will have unit m3/s

        # preparing temporary directory
        self.temporary_directory = output_folder + "/tmp/"
        os.makedirs(self.temporary_directory)
        
        # clone map
        logger.info("Set the clone map")
        self.clonemap_file_name = "/home/edwinvua/github/edwinkost/estimate_discharge_from_local_runoff/making_subcatchment_map/version_20180202/clone_version_20180202.map"
        pcr.setclone(self.clonemap_file_name)
        
        # pcraster input files
        landmask_file_name      = None
        # - river network map and sub-catchment map
        ldd_file_name           = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/lddsound_05min.map"
        sub_catchment_file_name = "/home/edwinvua/github/edwinkost/estimate_discharge_from_local_runoff/making_subcatchment_map/version_20180202/subcatchments_of_station_pcraster_ids.nom.bigger_than_zero.map"
        # - cell area (unit: m2)
        cell_area_file_name     = "/projects/0/dfguu/data/hydroworld/PCRGLOBWB20/input5min/routing/cellsize05min.correct.map"
        
        # loading pcraster input maps
        self.sub_catchment = vos.readPCRmapClone(sub_catchment_file_name, \
                                                 self.clonemap_file_name, \
                                                 self.temporary_directory, \
                                                 None, False, None, \
                                                 True)
        self.ldd_network   = vos.readPCRmapClone(ldd_file_name, \
                                                 self.clonemap_file_name, \
                                                 self.temporary_directory, \
                                                 None, \
                                                 True)
        self.ldd_network   = pcr.lddrepair(pcr.ldd(self.ldd_network))
        self.ldd_network   = pcr.lddrepair(self.ldd_network)
        self.cell_area     = vos.readPCRmapClone(cell_area_file_name, \
                                                 self.clonemap_file_name, \
                                                 self.temporary_directory, \
                                                 None)

        # define the landmask
        self.landmask = pcr.defined(self.ldd_network)
        if landmask_file_name != None:
            self.landmask  = vos.readPCRmapClone(landmask_file_name, \
                                                 self.clonemap_file_name, \
                                                 self.temporary_directory, \
                                                 None)
        self.landmask = pcr.ifthen(pcr.defined(self.ldd_network), self.landmask)
        self.landmask = pcr.ifthen(self.landmask, self.landmask)
        
        # set/limit all input maps to the defined landmask
        self.sub_catchment = pcr.ifthen(self.landmask, self.sub_catchment)
        self.ldd_network   = pcr.ifthen(self.landmask, self.ldd_network)
        self.cell_area     = pcr.ifthen(self.landmask, self.cell_area)
        
        # preparing an object for reporting netcdf files:
        self.netcdf_report = netcdf_writer.PCR2netCDF(self.clonemap_file_name)
        
        # preparing netcdf output files:
        # - for total inflow
        self.netcdf_report.createNetCDF(self.total_flow_output_file,\
                                        "total_flow",\
                                        "m3/s")
        self.netcdf_report.createNetCDF(self.internal_flow_output_file,\
                                        "internal_flow",\
                                        "m3/s")
Exemplo n.º 38
0
def main():
    clone_map = "mask\mask.map"
    clone_shp = "mask\mask.shp"
    clone_prj = "mask\mask.prj"
    workdir = "work\\"
    resultdir = "staticmaps\\"
    ''' read commandline arguments '''
    argv = sys.argv
    clone_EPSG = False

    try:
        opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA')
    except getopt.error:
        print 'fout'
        Usage()
        sys.exit(1)

    inifile = None
    rivshp = None
    catchshp = None
    dem_in = None
    landuse = None
    soiltype = None
    clean = False
    gaugeshp = None
    alltouching = False

    for o, a in opts:
        if o == '-i': inifile = a
        if o == '-p': clone_EPSG = 'EPSG:' + a
        if o == '-r': rivshp = a
        if o == '-c': catchshp = a
        if o == '-d': dem_in = a
        if o == '-l': landuse = a
        if o == '-s': soiltype = a
        if o == '-C': clean = True
        if o == '-g': gaugeshp = a
        if o == '-A': alltouching = True

    if inifile == None or rivshp == None or catchshp == None or dem_in == None:
        print 'the following files are compulsory:'
        print ' - ini-file'
        print ' - DEM (raster)'
        print ' - river (shape)'
        print ' - catchment (shape)'
        Usage()
        sys.exit(1)

    if landuse == None:
        print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain'

    if soiltype == None:
        print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain'
    ''' read mask '''
    if not os.path.exists(clone_map):
        print 'Mask not found. Make sure the file mask\mask.map exists'
        print 'This file is usually created with the CreateGrid script'
        sys.exit(1)
    else:
        pcr.setclone(clone_map)
        ds = gdal.Open(clone_map, GA_ReadOnly)
        clone_trans = ds.GetGeoTransform()
        cellsize = clone_trans[1]
        clone_rows = ds.RasterYSize
        clone_columns = ds.RasterXSize
        extent_mask = [
            clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize,
            clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3]
        ]
        xmin, ymin, xmax, ymax = map(str, extent_mask)
        ds = None
        ones = pcr.scalar(pcr.readmap(clone_map))
        zeros = ones * 0
        empty = pcr.ifthen(ones == 0, pcr.scalar(0))
    ''' read projection from mask.shp '''
    # TODO: check how to deal with projections (add .prj to mask.shp in creategrid)
    if not os.path.exists(clone_prj):
        print 'please add prj-file to mask.shp'
        sys.exit(1)
    if os.path.exists(clone_shp):
        ds = ogr.Open(clone_shp)
        file_att = os.path.splitext(os.path.basename(clone_shp))[0]
        lyr = ds.GetLayerByName(file_att)
        spatialref = lyr.GetSpatialRef()
        if not spatialref == None:
            srs_clone = osr.SpatialReference()
            srs_clone.ImportFromWkt(spatialref.ExportToWkt())
            srs_clone.AutoIdentifyEPSG()
            unit_clone = False
            unit_clone = srs_clone.GetAttrValue('UNIT').lower()
            #clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1)
            # TODO: fix hard EPSG code below
            clone_EPSG = 'EPSG:' + '4167'
            print 'EPSG-code is read from mask.shp: ' + clone_EPSG
            spatialref == None
    if not clone_EPSG:
        print 'EPSG-code cannot be read from mask.shp'
        print 'please add prj-file to mask.shp or specify on command line'
        print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)'

    ds = None
    clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)])
    ''' open config-file '''
    config = wt.OpenConf(inifile)
    ''' read settings '''
    snapgaugestoriver = bool(
        int(wt.configget(config, "settings", "snapgaugestoriver", "1")))
    burnalltouching = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "1")))
    burninorder = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "0")))
    verticetollerance = float(
        wt.configget(config, "settings", "vertice_tollerance", "0.0001"))
    ''' read parameters '''
    burn_outlets = int(
        wt.configget(config, "parameters", "burn_outlets", 10000))
    burn_rivers = int(wt.configget(config, "parameters", "burn_rivers", 200))
    burn_connections = int(
        wt.configget(config, "parameters", "burn_connections", 100))
    burn_gauges = int(wt.configget(config, "parameters", "burn_gauges", 100))
    minorder = int(wt.configget(config, "parameters", "riverorder_min", 3))
    exec "percentile=tr.array(" + wt.configget(config, "parameters",
                                               "statisticmaps", [0, 100]) + ")"
    if not unit_clone:
        print 'failed to read unit (meter or degree) from mask projection'
        unit_clone = str(wt.configget(config, "settings", "unit", 'meter'))
        print 'unit read from settings: ' + unit_clone
    if unit_clone == 'degree':
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_degree", 0.0005))
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_metre", 50))

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    ''' read staticmap locations '''
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")
    ''' read mask location (optional) '''
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)
    ''' create directories '''
    if os.path.isdir(workdir):
        shutil.rmtree(workdir)
    os.makedirs(workdir)

    if os.path.isdir(resultdir):
        shutil.rmtree(resultdir)
    os.makedirs(resultdir)
    ''' Preperation steps '''
    zero_map = workdir + "zero.map"
    zero_tif = workdir + "zero.tif"
    pcr.report(zeros, zero_map)
    # TODO: replace gdal_translate call
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, zero_tif))
    pcr.setglobaloption("lddin")
    ''' resample DEM '''
    dem_resample = workdir + "dem_resampled.tif"
    ds = gdal.Open(dem_in, GA_ReadOnly)
    band = ds.GetRasterBand(1)
    nodata = band.GetNoDataValue()
    proj = ds.GetGeoTransform()
    cellsize_dem = proj[1]
    ''' read DEM projection '''
    spatialref == None
    spatialref = ds.GetProjection()
    if not spatialref == None:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(spatialref)
        srs.AutoIdentifyEPSG()
        dem_EPSG = 'EPSG:' + srs.GetAttrValue("AUTHORITY", 1)
        print 'EPSG-code is read from ' + os.path.basename(
            dem_in) + ': ' + dem_EPSG
        spatialref == None
        dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)])
        srs_DEM = osr.SpatialReference()
        srs_DEM.ImportFromEPSG(dem_EPSG_int)
        clone2dem_transform = osr.CoordinateTransformation(srs_clone, srs_DEM)
    else:
        dem_EPSG = clone_EPSG
        print 'No projection defined for ' + os.path.basename(dem_in)
        print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    ds = None
    print 'Resampling DEM...'
    if nodata == None:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin,
              xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-dstnodata',
              str(-9999), '-r', 'cubic', dem_in, dem_resample))
    else:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj,
              '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-srcnodata', str(nodata), '-dstnodata',
              str(nodata), '-r', 'cubic', dem_in, dem_resample))
    ''' create dem.map and statistic maps '''
    dem_resample_map = resultdir + dem_map
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', dem_resample, dem_resample_map))
    print 'Computing DEM statistics ....'
    stats = wt.windowstats(dem_in, clone_rows, clone_columns, clone_trans,
                           srs_clone, resultdir, percentile)
    ''' burn DEM '''
    ds = ogr.Open(rivshp)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    rivshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    # strip rivers to nodes
    xminc = str(float(xmin) + 0.5 * cellsize)
    yminc = str(float(ymin) + 0.5 * cellsize)
    xmaxc = str(float(xmax) - 0.5 * cellsize)
    ymaxc = str(float(ymax) - 0.5 * cellsize)
    if rivshp_EPSG == clone_EPSG:
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivshp))
    else:
        rivprojshp = workdir + 'rivshape_proj.shp'
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', rivshp_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, rivprojshp, rivshp))
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivprojshp))

    rivshp = rivclipshp

    #### BURNING BELOW ####

    # TODO: check if extraction can be done within memory and retun a burn layer
    shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int,
                            cellsize * verticetollerance, workdir)

    outlets = shapes[1]
    connections = shapes[2]
    outlets_att = os.path.splitext(os.path.basename(outlets))[0]
    connections_att = os.path.splitext(os.path.basename(connections))[0]
    dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0]
    connections_tif = workdir + connections_att + ".tif"
    outlets_tif = workdir + outlets_att + ".tif"
    # TODO: make the burning in memory
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, connections_tif))
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', outlets_att, outlets,
          outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', connections_att, connections,
          connections_tif))

    # convert rivers to order
    rivshp_att = os.path.splitext(os.path.basename(rivshp))[0]
    rivers_tif = workdir + rivshp_att + ".tif"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, rivers_tif))
    if burninorder:  # make river shape with an order attribute
        OrderSHPs = wt.ReachOrder(rivshp, clone_EPSG_int,
                                  cellsize * verticetollerance, workdir)
        wt.Burn2Tif(OrderSHPs, 'order', rivers_tif)
    else:
        call(('gdal_rasterize', '-burn', '1', '-l', rivshp_att, rivshp,
              rivers_tif))

    # convert 2 maps
    connections_map = workdir + connections_att + ".map"
    rivers_map = workdir + rivshp_att + ".map"
    outlets_map = workdir + outlets_att + ".map"
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', connections_tif, connections_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', rivers_tif, rivers_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', outlets_tif, outlets_map))

    # burn the layers in DEM
    outletsburn = pcr.scalar(
        pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets)
    connectionsburn = pcr.scalar(
        pcr.readmap(connections_map)) * pcr.scalar(burn_connections)
    riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers)
    ldddem = pcr.cover(dem_resample_map,
                       pcr.ifthen(riverburn > 0, pcr.scalar(0)))
    ldddem = ldddem - outletsburn - connectionsburn - riverburn
    ldddem = pcr.cover(ldddem, pcr.scalar(0))
    pcr.report(ldddem, workdir + "dem_burn.map")
    ''' create ldd for multi-catchments '''
    ldd = pcr.ldd(empty)
    # reproject catchment shape-file
    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    catchshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    if not rivshp_EPSG == clone_EPSG:
        catchprojshp = workdir + 'catchshape_proj.shp'
        call(('ogr2ogr', '-s_srs', catchshp_EPSG, '-t_srs', clone_ESPG,
              catchprojshp, catchshp))
        catchshp = catchprojshp
    ds.Destroy()

    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)

    fieldDef = ogr.FieldDefn("ID", ogr.OFTString)
    fieldDef.SetWidth(12)
    TEMP_out = Driver.CreateDataSource(workdir + "temp.shp")
    if not srs == None:
        TEMP_LYR = TEMP_out.CreateLayer("temp",
                                        srs,
                                        geom_type=ogr.wkbMultiPolygon)
    else:
        TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon)
    TEMP_LYR.CreateField(fieldDef)

    for i in range(lyr.GetFeatureCount()):
        orgfeature = lyr.GetFeature(i)
        geometry = orgfeature.geometry()
        feature = ogr.Feature(TEMP_LYR.GetLayerDefn())
        feature.SetGeometry(geometry)
        feature.SetField("ID", str(i + 1))
        TEMP_LYR.CreateFeature(feature)
    TEMP_out.Destroy()
    ds.Destroy

    # rasterize catchment map
    catchments_tif = workdir + "catchments.tif"
    catchments_map = workdir + "catchments.map"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map,
          catchments_tif))
    if alltouching:
        call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp",
              workdir + 'temp.shp', catchments_tif))
    else:
        call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp',
              catchments_tif))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG,
          catchments_tif, catchments_map))
    catchments = pcr.readmap(catchments_map)
    riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn)))
    rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique)
    #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0))
    catchments = pcr.cover(
        pcr.ifthen(catchments > 0, pcr.ordinal(catchments)),
        pcr.ifthen(
            riverburn > 0,
            pcr.ordinal(
                pcr.spreadzone(pcr.nominal(catchments),
                               pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1))))
    rivercatch_map = workdir + "catchments_river.map"
    catchclip_map = workdir + "catchments_clip.map"
    pcr.report(rivercatch, rivercatch_map)
    pcr.report(catchments, catchclip_map)

    ds = ogr.Open(workdir + "temp.shp")
    lyr = ds.GetLayerByName("temp")

    print 'calculating ldd'
    for i in range(lyr.GetFeatureCount()):
        feature = lyr.GetFeature(i)
        catch = int(feature.GetField("ID"))
        print "calculating ldd for catchment: " + str(i + 1) + "/" + str(
            lyr.GetFeatureCount()) + "...."
        ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch,
                                              catchments)) * 0 + 1 * ldddem
        ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"),
                                   float("1E35"), float("1E35"))
        ldd = pcr.cover(ldd, ldd_select)
    pcr.report(ldd, resultdir + ldd_map)
    ds.Destroy()
    ''' report stream order, river and dem '''
    streamorder = pcr.ordinal(pcr.streamorder(ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999)))
    dem_resample_map = pcr.cover(dem_resample_map,
                                 pcr.scalar(river) * 0 + mindem)
    pcr.report(dem_resample_map, resultdir + dem_map)
    pcr.report(streamorder, resultdir + streamorder_map)
    pcr.report(river, resultdir + river_map)
    ''' deal with your catchments '''
    if gaugeshp == None:
        print 'No gauges defined, using outlets instead'
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, resultdir + gauges_map)


#    ds = ogr.Open(gaugeshp)
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    lyr = ds.GetLayerByName(file_att)
#    spatialref = lyr.GetSpatialRef()
##    if not spatialref == None:
##        srs = osr.SpatialReference()
##        srs.ImportFromWkt(spatialref.ExportToWkt())
##        srs.AutoIdentifyEPSG()
##        gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
##        spatialref == None
#    #else:
#    gaugeshp_EPSG = clone_EPSG
#    print 'No projection defined for ' + file_att + '.shp'
#    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'
#
#    # reproject gauge shape if necesarry
#    if not gaugeshp_EPSG == clone_EPSG:
#        gaugeprojshp = workdir + 'gaugeshape_proj.shp'
#        call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp))
#        gaugeshp = gaugeprojshp
#
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    gaugestif = workdir + file_att + '.tif'
#    gaugesmap = workdir + file_att + '.map'
#    call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif))
#    call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif))
#    call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap))
#    gaugelocs = pcr.readmap(gaugesmap)
#    snapgaugestoriver = True
#
#    if snapgaugestoriver:
#        print "Snapping gauges to river"
#        gauges = pcr.uniqueid(pcr.boolean(gaugelocs))
#        gauges= wt.snaptomap(pcr.ordinal(gauges),river)
#
#    gaugesmap = pcr.ifthen(gauges > 0, gauges)
    ''' report riverlengthfrac '''
    riv_hr = workdir + 'river_highres.tif'
    wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs_clone, 0)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    call(('gdal_rasterize', '-burn', '1', '-l', file_att, rivshp, riv_hr))
    print 'Computing river length...'
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wt.windowstats(riv_hr, clone_rows, clone_columns,
                                 clone_trans, srs_clone, resultdir, 'frac')
    ''' report outlet map '''
    pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)),
               resultdir + outlet_map)
    ''' report  map '''
    catchment = pcr.ifthen(catchments > 0, pcr.ordinal(1))
    pcr.report(catchment, resultdir + catchment_map)
    ''' report subcatchment map '''
    subcatchment = pcr.subcatchment(ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment), resultdir + subcatch_map)
    ''' report landuse map '''
    if landuse == None:
        pcr.report(pcr.nominal(ones), resultdir + landuse_map)
    else:
        landuse_resample = workdir + 'landuse.tif'
        landuse_map = resultdir + landuse_map
        transform = wt.GetRasterTranform(landuse, srs_clone)
        if not transform[0]:
            call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        else:
            call(('gdalwarp', '-overwrite', '-s_srs', transform[1], '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              landuse_resample, landuse_map))
        landuse_work = pcr.readmap(landuse_map)
        pcr.report(pcr.nominal(landuse_work), landuse_map)
    ''' report soil map '''
    if soiltype == None:
        pcr.report(pcr.nominal(ones), resultdir + soil_map)
    else:
        soiltype_resample = workdir + 'soiltype.tif'
        soil_map = resultdir + soil_map
        #transform = wt.GetRasterTranform(soiltype,srs_clone)
        #        if not transform[0]:
        call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
              clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-r', 'mode', soiltype, soiltype_resample))
        #        else:
        #        call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              soiltype_resample, soil_map))
        soiltype_work = pcr.readmap(soil_map)
        pcr.report(pcr.nominal(soiltype_work), soil_map)

    if clean:
        wt.DeleteList(glob.glob(os.getcwd() + '\\' + resultdir + '/*.xml'))
Exemplo n.º 39
0
  def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict):

    line = re.sub("\n","",line)
    line = re.sub("\t"," ",line)
    result = None

    # read until first comment
    content = ""
    content,sep,comment = line.partition("#")
    if len(content) > 1:
      collectionVariableName, sep, tail = content.partition(" ")
      if collectionVariableName == self._varName:
        tail = tail.strip()
        key, sep, variableValue = tail.rpartition(" ")

        if len(key.split()) != nrColumns:
          tmp = re.sub("\(|\)|,","",str(key))
          msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" %(self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2)
          raise ValueError(msg)

        variableValue = re.sub('\"', "", variableValue)

        tmp = None
        try:
          tmp = int(variableValue)
          if self._dataType == pcraster.Boolean:
            tmp = pcraster.boolean(tmp)
          elif self._dataType == pcraster.Nominal:
            tmp = pcraster.nominal(tmp)
          elif self._dataType == pcraster.Ordinal:
            tmp = pcraster.ordinal(tmp)
          elif self._dataType == pcraster.Ldd:
            tmp = pcraster.ldd(tmp)
          else:
            msg = "Conversion to %s failed" % (self._dataType)
            raise Exception(msg)
        except ValueError as e:
          try:
            tmp = float(variableValue)
            if self._dataType == pcraster.Scalar:
              tmp = pcraster.scalar(tmp)
            elif self._dataType == pcraster.Directional:
              tmp = pcraster.directional(tmp)
            else:
              msg = "Conversion to %s failed" % (self._dataType)
              raise Exception(msg)

          except ValueError as e:
            variableValue = re.sub("\\\\","/",variableValue)
            variableValue = variableValue.strip()
            path = os.path.normpath(variableValue)
            try:
              tmp = pcraster.readmap(path)
            except RuntimeError as e:
              msg = "Error reading %s line %d, %s" %(self._fileName, lineNumber, e)
              raise ValueError(msg)

        # test if key is an external name
        transformedKeys = []
        counter = 0

        for k in key.split():
          k = k.strip()
          if externalNames[counter].get(k):
            transformedKeys.append(externalNames[counter].get(k))
          else:
            transformedKeys.append(k)
          counter += 1

        key = tuple(transformedKeys)

        if not key in keyDict:
          tmp = re.sub("\(|\)|,","",str(key))
          msg = "Error reading %s line %d, %s unknown collection index" %(self._fileName, lineNumber, tmp)
          raise ValueError(msg)


        if not keyDict[key] is None:
          tmp = re.sub("\(|\)|,","",str(key))
          msg = "Error reading %s line %d, %s %s already initialised" %(self._fileName, lineNumber, self._varName, tmp)
          raise ValueError(msg)

        keyDict[key] = tmp
Exemplo n.º 40
0
def main():

    try:
        opts, args = getopt.getopt(sys.argv[1:], "fhC:N:I:s:M:", ['version'])
    except getopt.error as msg:
        usage(msg)

    factor = 1
    Verbose = 1
    inmaps = True
    force = False
    caseName = "thecase"
    caseNameNew = "thecase_resamp"
    maxcpu = 4

    for o, a in opts:
        if o == "-C":
            caseName = a
        if o == "-N":
            caseNameNew = a
        if o == "-s":
            subcatch = int(a)
        if o == "-I":
            inmaps = False
        if o == "-h":
            usage()
        if o == "-f":
            force = True
        if o == "-M":
            maxcpu = int(a)
        if o == "--version":
            import wflow
            print("wflow version: ", wflow.__version__)
            sys.exit(0)

    dirs = [
        "/intbl/",
        "/staticmaps/",
        "/intss/",
        "/instate/",
        "/outstate/",
        "/inmaps/",
        "/inmaps/clim/",
        "/intbl/clim/",
    ]
    ext_to_copy = ["*.tss", "*.tbl", "*.col", "*.xml"]
    if os.path.isdir(caseNameNew) and not force:
        print("Refusing to write into an existing directory:" + caseNameNew)
        exit()

    # ddir = []
    dirs = []
    for (path, thedirs, files) in os.walk(caseName):
        print(path)
        dirs.append(path)

    if not os.path.isdir(caseNameNew):
        for ddir in dirs:
            os.makedirs(ddir.replace(caseName, caseNameNew))
        for inifile in glob.glob(caseName + "/*.ini"):
            shutil.copy(inifile, inifile.replace(caseName, caseNameNew))

    # read subcatchment map
    x, y, subcatchmap, FillVal = readMap(
        os.path.join(caseName, "staticmaps", "wflow_subcatch.map"), "PCRaster")
    for ddir in dirs:
        print(ddir)
        allcmd = []
        for mfile in glob.glob(ddir + "/*.map"):
            if not os.path.exists(mfile.replace(caseName, caseNameNew)):
                x, y, data, FillVal = readMap(mfile, "PCRaster")
                try:
                    good = 1
                    xn, yn, datan = cutMapById(data, subcatchmap, subcatch, x,
                                               y, FillVal)
                except Exception as e:
                    good = 0
                    print("Skipping: " + mfile + " exception: " + str(e))

                if xn.size == 0:
                    good = 0
                    print("Skipping: " + mfile + " size does not match...")

                if good:
                    ofile = mfile.replace(caseName, caseNameNew)
                    if data.dtype == np.int32 or data.dtype == np.uint8:
                        writeMap(ofile, "PCRaster", xn, yn,
                                 datan.astype(np.int32), FillVal)
                    else:
                        writeMap(ofile, "PCRaster", xn, yn, datan, FillVal)

                    # Assume ldd and repair
                    if (data.dtype == np.uint8 and 'wflow_ldd.map' in mfile):
                        myldd = pcr.ldd(pcr.readmap(ofile))
                        myldd = pcr.lddrepair(myldd)
                        pcr.report(myldd, ofile)

        for mfile in glob.glob(ddir + "/*.[0-9][0-9][0-9]"):
            if not os.path.exists(mfile.replace(caseName, caseNameNew)):
                x, y, data, FillVal = readMap(mfile, "PCRaster")
                try:
                    good = 1
                    xn, yn, datan = cutMapById(data, subcatchmap, subcatch, x,
                                               y, FillVal)
                except Exception as e:
                    good = 0
                    print("Skipping: " + mfile + " exception: " + str(e))

                if xn.size == 0:
                    good = 0
                    print("Skipping: " + mfile + " size does not match...")

                if good:
                    ofile = mfile.replace(caseName, caseNameNew)
                    if data.dtype == np.int32 or data.dtype == np.uint8:
                        writeMap(ofile, "PCRaster", xn, yn,
                                 datan.astype(np.int32), FillVal)
                    else:
                        writeMap(ofile, "PCRaster", xn, yn, datan, FillVal)

        for ext in ext_to_copy:
            for mfile in glob.glob(os.path.join(ddir, ext)):
                shutil.copy(mfile, mfile.replace(caseName, caseNameNew))

        # Copy ini files
        for mfile in glob.glob(os.path.join(caseName, "*.ini")):
            shutil.copy(mfile, mfile.replace(caseName, caseNameNew))
        logger.info(msg)


# set the pcraster clone, ldd, landmask, and cell area map 
msg = "Setting the clone, ldd, landmask, and cell area maps" + ":"
logger.info(msg)
# - clone 
clone_map_file = input_files['clone_map_05min']
pcr.setclone(clone_map_file)
# - ldd
ldd = vos.readPCRmapClone(input_files['ldd_map_05min'],
                          clone_map_file,
                          output_files['tmp_folder'],
                          None,
                          True)
ldd = pcr.lddrepair(pcr.ldd(ldd))
ldd = pcr.lddrepair(ldd)
# - landmask
landmask  = pcr.ifthen(pcr.defined(ldd), pcr.boolean(1.0))
# - cell area
cell_area = vos.readPCRmapClone(input_files['cell_area_05min'],
                          clone_map_file,
                          output_files['tmp_folder'])


# read the hydrological year 
msg = "Reading the hydrological year types" + ":"
logger.info(msg)
hydro_year_type = pcr.nominal(\
                  vos.readPCRmapClone(input_files['hydro_year_05min'],
                                      input_files['clone_map_05min'],
Exemplo n.º 42
0
event_file_name   = "channel_storage_" + chosen_date + ".map"
pcr.report(extreme_value_map, event_file_name)
#~ # - check it using aguila
#~ cmd = 'aguila ' + event_file_name
#~ os.system(cmd)

# resampling low resolution ldd map
msg = "Resample the low resolution ldd map."
logger.info(msg)
ldd_map_low_resolution_file_name = ldd_map_low_resolution_file_name
ldd_map_low_resolution = vos.readPCRmapClone(ldd_map_low_resolution_file_name, \
                                             clone_map_file, \
                                             tmp_folder, \
                                             None, True, None, False)
#~ ldd_map_low_resolution = pcr.ifthen(landmask, ldd_map_low_resolution)    # NOTE THAT YOU SHOULD NOT MASK-OUT THE LDD.
ldd_map_low_resolution = pcr.lddrepair(pcr.ldd(ldd_map_low_resolution))
ldd_map_low_resolution = pcr.lddrepair(ldd_map_low_resolution)
pcr.report(ldd_map_low_resolution, "resampled_low_resolution_ldd.map")

# resampling river length and width files (actually, we don't need these):
msg = "Resample the low resolution river length and width maps."
logger.info(msg)
# - river length
river_length_file_name = "/projects/0/dfguu/users/edwin/data/data_for_glofris_downscaling/input_data/maps_05min/celldiagonal05min.map"
river_length_low_resolution = vos.readPCRmapClone(river_length_file_name, \
                                                  clone_map_file, \
                                                  tmp_folder, \
                                                  None, False, None, False)
river_length_low_resolution = pcr.ifthen(landmask, river_length_low_resolution)
river_length_low_resolution = pcr.cover(river_length_low_resolution, 0.0)
pcr.report(river_length_low_resolution, "resampled_low_resolution_channel_length.map")