예제 #1
0
def waterBalanceCheck(fluxesIn,fluxesOut,preStorages,endStorages,processName,PrintOnlyErrors,dateStr,threshold=1e-5,landmask=None):
    """ Returns the water balance for a list of input, output, and storage map files  """
    # modified by Edwin (22 Apr 2013)

    inMap   = pcr.spatial(pcr.scalar(0.0))
    outMap  = pcr.spatial(pcr.scalar(0.0))
    dsMap   = pcr.spatial(pcr.scalar(0.0))
    
    for fluxIn in fluxesIn:
        inMap   += fluxIn
    for fluxOut in fluxesOut:
        outMap  += fluxOut
    for preStorage in preStorages:
        dsMap   += preStorage
    for endStorage in endStorages:
        dsMap   -= endStorage

    a,b,c = getMinMaxMean(inMap + dsMap- outMap)
    if abs(a) > threshold or abs(b) > threshold:
        if PrintOnlyErrors: 
            
            msg  = "\n"
            msg += "\n"
            msg  = "\n"
            msg += "\n"
            msg += "##############################################################################################################################################\n"
            msg += "WARNING !!!!!!!! Water Balance Error %s Min %f Max %f Mean %f" %(processName,a,b,c)
            msg += "\n"
            msg += "##############################################################################################################################################\n"
            msg += "\n"
            msg += "\n"
            msg += "\n"
            
            logger.error(msg)
예제 #2
0
 def initial(self):
   pythonVal = pcraster.scalar(random.random())
   self.report(pythonVal, "pyVal")
   pcrVal = pcraster.mapnormal()
   self.report(pcrVal, "pcrVal")
   numpyVal = pcraster.scalar(numpy.random.random())
   self.report(numpyVal, "npVal")
	def returnFloodedFraction(self,volume):
		#-returns the flooded fraction given the flood volume and the associated water height
		# using a logistic smoother near intersections (K&K, 2007)
		#-find the match on the basis of the shortest distance to the available intersections or steps
		deltaXMin= self.floodVolume[self.nrEntries-1]
		y_i= pcr.scalar(1.)
		k= [pcr.scalar(0.)]*2
		mInt= pcr.scalar(0.)
		for iCnt in range(self.nrEntries-1,0,-1):
			#-find x_i for current volume and update match if applicable
			# also update slope and intercept
			deltaX= volume-self.floodVolume[iCnt]
			mask= pcr.abs(deltaX) < pcr.abs(deltaXMin)
			deltaXMin= pcr.ifthenelse(mask,deltaX,deltaXMin)
			y_i= pcr.ifthenelse(mask,self.areaFractions[iCnt],y_i)
			k[0]= pcr.ifthenelse(mask,self.kSlope[iCnt-1],k[0])
			k[1]= pcr.ifthenelse(mask,self.kSlope[iCnt],k[1])
			mInt= pcr.ifthenelse(mask,self.mInterval[iCnt],mInt)
		#-all values returned, process data: calculate scaled deltaX and smoothed function
		# on the basis of the integrated logistic functions PHI(x) and 1-PHI(x)
		deltaX= deltaXMin
		deltaXScaled= pcr.ifthenelse(deltaX < 0.,pcr.scalar(-1.),1.)*\
			pcr.min(criterionKK,pcr.abs(deltaX/pcr.max(1.,mInt)))
		logInt= self.integralLogisticFunction(deltaXScaled)
		#-compute fractional flooded area and flooded depth
		floodedFraction= pcr.ifthenelse(volume > 0.,\
			pcr.ifthenelse(pcr.abs(deltaXScaled) < criterionKK,\
			y_i-k[0]*mInt*logInt[0]+k[1]*mInt*logInt[1],\
			y_i+pcr.ifthenelse(deltaX < 0.,k[0],k[1])*deltaX),0.)
		floodedFraction= pcr.max(0.,pcr.min(1.,floodedFraction))
		floodDepth= pcr.ifthenelse(floodedFraction > 0.,volume/(floodedFraction*self.cellArea),0.)
		return floodedFraction, floodDepth
예제 #4
0
def subcatch_order_a(ldd, oorder):
    """
    Determines subcatchments using the catchment order

    This version uses the last cell BELOW order to derive the
    catchments. In general you want the _b version

    Input:
        - ldd
        - order - order to use

    Output:
        - map with catchment for the given streamorder
    """
    outl = find_outlet(ldd)
    large = pcr.subcatchment(ldd, pcr.boolean(outl))
    stt = pcr.streamorder(ldd)
    sttd = pcr.downstream(ldd, stt)
    pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
    dif = pcr.upstream(
        ldd,
        pcr.cover(
            pcr.ifthen(
                large,
                pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts))),
            ),
            0,
        ),
    )
    dif = pcr.cover(pcr.scalar(outl), dif)  # Add catchment outlet
    dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(dif)))
    sc = pcr.subcatchment(ldd, dif)

    return sc, dif, stt
예제 #5
0
 def weirFormula(self, waterHeight, weirWidth):  # output: m3/s
     sillElev = pcr.scalar(0.0)
     weirCoef = pcr.scalar(1.0)
     weirFormula = (
         1.7 * weirCoef * pcr.max(0, waterHeight - sillElev) ** 1.5
     ) * weirWidth  # m3/s
     return weirFormula
예제 #6
0
def snaptomap(points, mmap):
    """
    Snap the points in _points_ to nearest non missing
    values in _mmap_. Can be used to move gauge locations
    to the nearest rivers.

    Input:
        - points - map with points to move
        - mmap - map with points to move to

    Return:
        - map with shifted points
    """
    points = pcr.cover(points, 0)
    # Create unique id map of mmap cells
    unq = pcr.nominal(pcr.cover(pcr.uniqueid(pcr.defined(mmap)), pcr.scalar(0.0)))
    # Now fill holes in mmap map with lues indicating the closes mmap cell.
    dist_cellid = pcr.scalar(pcr.spreadzone(unq, 0, 1))
    # Get map with values at location in points with closes mmap cell
    dist_cellid = pcr.ifthenelse(points > 0, dist_cellid, 0)
    # Spread this out
    dist_fill = pcr.spreadzone(pcr.nominal(dist_cellid), 0, 1)
    # Find the new (moved) locations
    npt = pcr.uniqueid(pcr.boolean(pcr.ifthen(dist_fill == unq, unq)))
    # Now recreate the original value in the points maps
    ptcover = pcr.spreadzone(pcr.cover(points, 0), 0, 1)
    # Now get the org point value in the pt map
    nptorg = pcr.ifthen(npt > 0, ptcover)

    return nptorg
예제 #7
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    print(v)
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            co = 'rm '+str(tmpDir)+'*.*'
            cOut,err = subprocess.Popen(co, stdout=subprocess.PIPE,stderr=open('/dev/null'),shell=True).communicate()
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
예제 #8
0
def getRowColPoint(in_map, xcor, ycor):
    """
    returns the row and col in a map at the point given.
    Works but is rather slow.

    Input:
        - in_map - map to determine coordinates from
        - xcor - x coordinate
        - ycor - y coordinate

    Output:
        - row, column
    """
    x = pcr.pcr2numpy(pcr.xcoordinate(pcr.boolean(pcr.scalar(in_map) + 1.0)), np.nan)
    y = pcr.pcr2numpy(pcr.ycoordinate(pcr.boolean(pcr.scalar(in_map) + 1.0)), np.nan)
    XX = pcr.pcr2numpy(pcr.celllength(), 0.0)
    tolerance = 0.5  # takes a single point

    diffx = x - xcor
    diffy = y - ycor
    col_ = np.absolute(diffx) <= (XX[0, 0] * tolerance)  # cellsize
    row_ = np.absolute(diffy) <= (XX[0, 0] * tolerance)  # cellsize
    point = col_ * row_

    return point.argmax(0).max(), point.argmax(1).max()
예제 #9
0
def glacierHBV(GlacierFrac, 
                GlacierStore, 
                Snow, 
                Temperature, 
                TT, 
                Cfmax, 
                G_SIfrac,
                timestepsecs,
                basetimestep):
    """
    Run Glacier module and add the snowpack on-top of it.
    First, a fraction of the snowpack is converted into ice using the HBV-light
    model (fraction between 0.001-0.005 per day).
    Glacier melting is modelled using a Temperature degree factor and only
    occurs if the snow cover < 10 mm.


    :ivar GlacierFrac: Fraction of wflow cell covered by glaciers
    :ivar GlacierStore: Volume of the galcier in the cell in mm w.e.
    :ivar Snow: Snow pack on top of Glacier
    :ivar Temperature: Air temperature
    :ivar TT: Temperature threshold for ice melting
    :ivar Cfmax: Ice degree-day factor in mm/(°C/day)
    :ivar G_SIfrac: Fraction of the snow part turned into ice each timestep
    :ivar timestepsecs: Model timestep in seconds
    :ivar basetimestep: Model base timestep (86 400 seconds)

    :returns: Snow,Snow2Glacier,GlacierStore,GlacierMelt,
    """
    
    #Fraction of the snow transformed into ice (HBV-light model)
    Snow2Glacier = G_SIfrac * Snow

    Snow2Glacier = pcr.ifthenelse(
        GlacierFrac > 0.0, Snow2Glacier, pcr.scalar(0.0)
    )
    # Max conversion to 8mm/day
    Snow2Glacier = (
        pcr.min(Snow2Glacier, 8.0) * timestepsecs / basetimestep
    )

    Snow = Snow - (Snow2Glacier * GlacierFrac)
    GlacierStore = GlacierStore + Snow2Glacier

    PotMelt = pcr.ifthenelse(
        Temperature > TT, Cfmax * (Temperature - TT), pcr.scalar(0.0)
    )  # Potential snow melt, based on temperature

    GlacierMelt = pcr.ifthenelse(
        Snow < 10.0, pcr.min(PotMelt, GlacierStore), pcr.cover(0.0)
    )  # actual Glacier melt
    GlacierStore = GlacierStore - GlacierMelt  # dry snow content

    return Snow, Snow2Glacier, GlacierStore, GlacierMelt
예제 #10
0
def rainfall_interception_gash(
    Cmax, EoverR, CanopyGapFraction, Precipitation, CanopyStorage, maxevap=9999
):
    """
    Interception according to the Gash model (For daily timesteps). 
    """
    # TODO:  add other rainfall interception method (lui)
    # TODO: Include subdaily Gash model
    # TODO: add LAI variation in year
    # Hack for stemflow

    pt = 0.1 * CanopyGapFraction

    P_sat = pcr.max(
        pcr.scalar(0.0),
        pcr.cover(
            (-Cmax / EoverR) * pcr.ln(1.0 - (EoverR / (1.0 - CanopyGapFraction - pt))),
            pcr.scalar(0.0),
        ),
    )

    # large storms P > P_sat
    largestorms = Precipitation > P_sat

    Iwet = pcr.ifthenelse(
        largestorms,
        ((1 - CanopyGapFraction - pt) * P_sat) - Cmax,
        Precipitation * (1 - CanopyGapFraction - pt),
    )
    Isat = pcr.ifthenelse(largestorms, (EoverR) * (Precipitation - P_sat), 0.0)
    Idry = pcr.ifthenelse(largestorms, Cmax, 0.0)
    Itrunc = 0

    StemFlow = pt * Precipitation

    ThroughFall = Precipitation - Iwet - Idry - Isat - Itrunc - StemFlow
    Interception = Iwet + Idry + Isat + Itrunc

    # Non corect for area without any Interception (say open water Cmax -- zero)
    CmaxZero = Cmax <= 0.0
    ThroughFall = pcr.ifthenelse(CmaxZero, Precipitation, ThroughFall)
    Interception = pcr.ifthenelse(CmaxZero, pcr.scalar(0.0), Interception)
    StemFlow = pcr.ifthenelse(CmaxZero, pcr.scalar(0.0), StemFlow)

    # Now corect for maximum potential evap
    OverEstimate = pcr.ifthenelse(
        Interception > maxevap, Interception - maxevap, pcr.scalar(0.0)
    )
    Interception = pcr.min(Interception, maxevap)
    # Add surpluss to the thoughdfall
    ThroughFall = ThroughFall + OverEstimate

    return ThroughFall, Interception, StemFlow, CanopyStorage
예제 #11
0
    def getLakeOutflow(
        self, avgChannelDischarge, length_of_time_step=vos.secondsPerDay()
    ):

        # waterHeight (m): temporary variable, a function of storage:
        minWaterHeight = (
            0.001
        )  # (m) Rens used 0.001 m as the limit # this is to make sure there is always lake outflow,
        # but it will be still limited by available self.waterBodyStorage
        waterHeight = pcr.cover(
            pcr.max(
                minWaterHeight,
                (self.waterBodyStorage - pcr.cover(self.waterBodyCap, 0.0))
                / self.waterBodyArea,
            ),
            0.0,
        )

        # weirWidth (m) :
        # - estimated from avgOutflow (m3/s) using the bankfull discharge formula
        #
        avgOutflow = self.avgOutflow
        avgOutflow = pcr.ifthenelse(
            avgOutflow > 0.0,
            avgOutflow,
            pcr.max(avgChannelDischarge, self.avgInflow, 0.001),
        )  # This is needed when new lakes/reservoirs introduced (its avgOutflow is still zero).
        avgOutflow = pcr.areamaximum(avgOutflow, self.waterBodyIds)
        #
        bankfullWidth = pcr.cover(pcr.scalar(4.8) * ((avgOutflow) ** (0.5)), 0.0)
        weirWidthUsed = bankfullWidth
        weirWidthUsed = pcr.max(
            weirWidthUsed, self.minWeirWidth
        )  # TODO: minWeirWidth based on the GRanD database
        weirWidthUsed = pcr.cover(
            pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.0, weirWidthUsed), 0.0
        )

        # avgInflow <= lakeOutflow (weirFormula) <= waterBodyStorage
        lakeOutflowInM3PerSec = pcr.max(
            self.weirFormula(waterHeight, weirWidthUsed), self.avgInflow
        )  # unit: m3/s

        # estimate volume of water relased by lakes
        lakeOutflow = lakeOutflowInM3PerSec * length_of_time_step  # unit: m3
        lakeOutflow = pcr.min(self.waterBodyStorage, lakeOutflow)
        #
        lakeOutflow = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.0, lakeOutflow)
        lakeOutflow = pcr.ifthen(pcr.scalar(self.waterBodyTyp) == 1, lakeOutflow)

        # TODO: Consider endorheic lake/basin. No outflow for endorheic lake/basin!

        return lakeOutflow
예제 #12
0
def derive_HAND(dem, ldd, accuThreshold, rivers=None, basin=None, up_area=None, neg_HAND=None):
    """
    Function derives Height-Above-Nearest-Drain.
    See http://www.sciencedirect.com/science/article/pii/S003442570800120X
    Input:
        dem -- pcraster object float32, elevation data
        ldd -- pcraster object direction, local drain directions
        accuThreshold -- upstream amount of cells as threshold for river
            delineation
        rivers=None -- you can provide a rivers layer here. Pixels that are
                        identified as river should have a value > 0, other
                        pixels a value of zero.
        basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance
                        and areas with False by means of the nearest friction distance. Friction distance estimated using the
                        upstream area as weight (i.e. drains with a bigger upstream area have a lower friction)
                        the spreadzone operator is used in this case.
        up_area=None -- provide the upstream area (if not assigned a guesstimate is prepared, assuming the LDD covers a
                        full catchment area)
        neg_HAND=None -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than
                        stream (for example when there are natural embankments)
    Output:
        hand -- pcraster bject float32, height, normalised to nearest stream
        dist -- distance to nearest stream measured in cell lengths
            according to D8 directions
    """
    if rivers is None:
        # prepare stream from a strahler threshold
        stream = pcr.ifthenelse(pcr.accuflux(ldd, 1) >= accuThreshold,
                                pcr.boolean(1), pcr.boolean(0))
    else:
        # convert stream network to boolean
        stream = pcr.boolean(pcr.cover(rivers, 0))
    # determine height in river (in DEM*100 unit as ordinal)
    height_river = pcr.ifthenelse(stream, pcr.ordinal(dem*100), 0)
    if basin is None:
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
    else:
        # use basin to allocate areas outside basin to the nearest stream. Nearest is weighted by upstream area
        if up_area is None:
            up_area = pcr.accuflux(ldd, 1)
        up_area = pcr.ifthen(stream, up_area)  # mask areas outside streams
        friction = 1./pcr.scalar(pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0))
        # if basin, use nearest river within subcatchment, if outside basin, use weighted-nearest river
        up_elevation = pcr.ifthenelse(basin, pcr.scalar(pcr.subcatchment(ldd, height_river)), pcr.scalar(pcr.spreadzone(height_river, 0, friction)))
        # replace areas outside of basin by a spread zone calculation.
    # make negative HANDS also possible
    if neg_HAND == 1:
        hand = (pcr.scalar(pcr.ordinal(dem*100))-up_elevation)/100  # convert back to float in DEM units
    else:
        hand = pcr.max(pcr.scalar(pcr.ordinal(dem*100))-up_elevation, 0)/100  # convert back to float in DEM units
    dist = pcr.ldddist(ldd, stream, 1)  # compute horizontal distance estimate
    return hand, dist
예제 #13
0
파일: test.py 프로젝트: gaoshuai/pcraster
  def testIfThenElse(self):
    pcraster.setclone("and_Expr1.map")
    exceptionThrown = False
    try:
      result = pcraster.ifthenelse(1.0 == 2.0, 3.0, 4.0)
    except RuntimeError as exception:
      message = str(exception)
      self.assertTrue(message.find("conversion function to pick a data type") != -1)
      exceptionThrown = True
    self.assertTrue(exceptionThrown)

    result = pcraster.ifthenelse(pcraster.boolean(1.0 == 2.0), \
         pcraster.scalar(3.0), pcraster.scalar(4.0))
    self.assertEqual(pcraster.cellvalue(result, 1)[0], 4.0)
예제 #14
0
def volume_spread(ldd, hand, subcatch, volume, volume_thres=0., area_multiplier=1., iterations=15):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unittrue")
    dem_min = pcr.areaminimum(hand, subcatch)  # minimum elevation in subcatchments
    # pcr.report(dem_min, 'dem_min.map')
    dem_norm = hand - dem_min
    # pcr.report(dem_norm, 'dem_norm.map')
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch)*area_multiplier
    pcr.report(surface, 'surface.map')

    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    # pcr.report(volume_catch, 'volume_catch.map')

    depth_catch = volume_catch/surface
    pcr.report(depth_catch, 'depth_catch.map')

    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        print('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max)/2
        # pcr.report(dem_av, 'dem_av00.{:03d}'.format(n + 1))
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0), subcatch)
        # pcr.report(average_depth_catch, 'depth_c0.{:03d}'.format(n + 1))
        error = pcr.cover((depth_catch-average_depth_catch)/depth_catch, depth_catch*0)
        # pcr.report(error, 'error000.{:03d}'.format(n + 1))
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    # error_abs = np.abs(error)  # TODO: not needed probably, remove
    inundation = pcr.max(dem_av - dem_norm, 0)
    return inundation
	def initial(self):
		#####################
		# * initial section #
		#####################
		#-constants
		# betaQ [-]: constant of kinematic wave momentum equation
		self.betaQ= 0.6
		#-channel LDD
		self.channelLDD= pcr.ifthenelse(self.waterBodies.distribution != 0,\
			pcr.ldd(5),self.LDD)
		#-channel area and storage
		self.channelArea= self.channelWidth*self.channelLength
		self.channelStorageCapacity= pcr.ifthenelse(self.waterBodies.distribution == 0,\
			self.channelArea*self.channelDepth,pcr.scalar(0.))
		#-basin outlets
		self.basinOutlet= pcr.pit(self.LDD) != 0
		#-read initial conditions
		self.Q= clippedRead.get(self.QIniMap)
		self.actualStorage= clippedRead.get(self.actualStorageIniMap)
		self.actualStorage= pcr.ifthenelse(self.waterBodies.distribution != 0,\
			pcr.ifthenelse(self.waterBodies.location != 0,\
				pcr.areatotal(self.actualStorage,self.waterBodies.distribution),0),\
					self.actualStorage)   
		self.waterBodies.actualStorage= self.waterBodies.retrieveMapValue(self.actualStorage)
		#-update targets of average and bankful discharge
		self.waterBodies.averageQ= self.waterBodies.retrieveMapValue(self.averageQ)
		self.waterBodies.bankfulQ= self.waterBodies.retrieveMapValue(self.bankfulQ)
		#-return the parameters for the kinematic wave,
		# including alpha, wetted area, flood fraction, flood volume and depth
		# and the corresponding land area
		floodedFraction,floodedDepth,\
			self.wettedArea,self.alphaQ= self.kinAlphaComposite(self.actualStorage,self.floodplainMask)
		self.wettedArea= self.waterBodies.returnMapValue(self.wettedArea,\
			self.waterBodies.channelWidth+2.*self.waterBodies.updateWaterHeight())
		self.waterFraction= pcr.ifthenelse(self.waterBodies.distribution == 0,\
			pcr.max(self.waterFractionMask,floodedFraction),self.waterFractionMask)
		self.landFraction= pcr.max(0.,1.-self.waterFraction)
		#-update on velocity and check on Q - NOTE: does not work in case of reservoirs!
		self.flowVelocity= pcr.ifthenelse(self.wettedArea > 0,self.Q/self.wettedArea,0.)
		pcr.report(self.flowVelocity,pcrm.generateNameT(flowVelocityFileName,0).replace('.000','.ini'))
		#-setting initial values for specific runoff and surface water extraction
		self.landSurfaceQ= pcr.scalar(0.)
		self.potWaterSurfaceQ= pcr.scalar(0.)
		self.surfaceWaterExtraction= pcr.scalar(0.)
		#-budget check: setting initial values for cumulative discharge and 
		# net cumulative input, including initial storage [m3]   
		self.totalDischarge= pcr.scalar(0.)
		self.cumulativeDeltaStorage= pcr.catchmenttotal(self.actualStorage,self.LDD)
예제 #16
0
def subcatch_stream(ldd, stream, threshold):
    """
    Derive catchments based upon strahler threshold
    Input:
        ldd -- pcraster object direction, local drain directions
        stream -- pcraster object direction, streamorder
        threshold -- integer, strahler threshold, subcatchments ge threshold are
                 derived
    output:
        stream_ge -- pcraster object, streams of strahler order ge threshold
        subcatch -- pcraster object, subcatchments of strahler order ge threshold

    """
    # derive stream order

    # stream = pcr.streamorder(ldd)
    stream_ge = pcr.ifthen(stream >= threshold, stream)
    stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0)))
    # detect any transfer of strahler order, to a higher strahler order.
    transition_strahler = pcr.ifthenelse(pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1),
                                         pcr.ifthenelse(pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse(pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1),
                                                                                           pcr.boolean(0))))

    # make unique ids (write to file)
    transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler))

    # derive upstream catchment areas (write to file)
    subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique))
    return stream_ge, subcatch
예제 #17
0
def find_outlet(ldd):
    """
    Tries to find the outlet of the largest catchment in the Ldd

    Input:
        - Ldd

    Output:
        - outlet map (single point in the map)
    """
    largest = pcr.mapmaximum(pcr.catchmenttotal(pcr.spatial(pcr.scalar(1.0)), ldd))
    outlet = pcr.ifthen(
        pcr.catchmenttotal(1.0, ldd) == largest, pcr.spatial(pcr.scalar(1.0))
    )

    return outlet
예제 #18
0
def stackAverage(path,Root,StackStart,StackEnd):
	#calculates the average from a stack of maps, missing maps are skipped
	#Initialization
	MV= pcr.scalar(-999)
	NCount= pcr.scalar(0)
	SumStack= pcr.scalar(0)
	for StackNumber in range(StackStart,StackEnd):
		try:
			InMap= pcr.readmap(generateNameT(os.path.join(path,Root),StackNumber))
		except:
			InMap= MV;
		InMap= pcr.cover(InMap,MV)
		SumStack= SumStack+InMap
		NCount= NCount+pcr.ifthenelse(InMap <> MV,pcr.scalar(1),pcr.scalar(0))
	AvgStack= pcr.ifthenelse(NCount>0,SumStack/NCount,MV)
	return AvgStack
예제 #19
0
    def set_latlon_based_on_cloneMapFileName(self, cloneMapFileName):

        # cloneMap
        cloneMap = pcr.boolean(pcr.readmap(cloneMapFileName))
        cloneMap = pcr.boolean(pcr.scalar(1.0))

        # properties of the clone maps
        # - numbers of rows and colums
        rows = pcr.clone().nrRows()
        cols = pcr.clone().nrCols()
        # - cell size in arc minutes rounded to one value behind the decimal
        cellSizeInArcMin = round(pcr.clone().cellSize() * 60.0, 1)
        # - cell sizes in ar degrees for longitude and langitude direction
        deltaLon = cellSizeInArcMin / 60.0
        deltaLat = deltaLon
        # - coordinates of the upper left corner - rounded to two values behind the decimal in order to avoid rounding errors during (future) resampling process
        x_min = round(pcr.clone().west(), 2)
        y_max = round(pcr.clone().north(), 2)
        # - coordinates of the lower right corner - rounded to two values behind the decimal in order to avoid rounding errors during (future) resampling process
        x_max = round(x_min + cols * deltaLon, 2)
        y_min = round(y_max - rows * deltaLat, 2)

        # cell centres coordinates
        longitudes = np.arange(x_min + deltaLon / 2.0, x_max, deltaLon)
        latitudes = np.arange(y_max - deltaLat / 2.0, y_min, -deltaLat)

        # ~ # cell centres coordinates
        # ~ longitudes = np.linspace(x_min + deltaLon/2., x_max - deltaLon/2., cols)
        # ~ latitudes  = np.linspace(y_max - deltaLat/2., y_min + deltaLat/2., rows)

        # ~ # cell centres coordinates (latitudes and longitudes, directly from the clone maps)
        # ~ longitudes = np.unique(pcr.pcr2numpy(pcr.xcoordinate(cloneMap), vos.MV))
        # ~ latitudes  = np.unique(pcr.pcr2numpy(pcr.ycoordinate(cloneMap), vos.MV))[::-1]

        return longitudes, latitudes, cellSizeInArcMin
예제 #20
0
def detdrainlength(ldd, xl, yl):
    """
    Determines the drainaige length (DCL) for a non square grid

    Input:
        - ldd - drainage network
        - xl - length of cells in x direction
        - yl - length of cells in y direction

    Output:
        - DCL
    """
    # take into account non-square cells
    # if ldd is 8 or 2 use Ylength
    # if ldd is 4 or 6 use Xlength
    draindir = pcr.scalar(ldd)
    slantlength = pcr.sqrt(xl ** 2 + yl ** 2)
    drainlength = pcr.ifthenelse(
        draindir == 2,
        yl,
        pcr.ifthenelse(
            draindir == 8,
            yl,
            pcr.ifthenelse(
                draindir == 4, xl, pcr.ifthenelse(draindir == 6, xl, slantlength)
            ),
        ),
    )

    return drainlength
예제 #21
0
def readPCRmap(v):
	# v : fileName or floating values
    if not re.match(r"[0-9.-]*$", v):
        PCRmap = pcr.readmap(v)
    else:
        PCRmap = pcr.scalar(float(v))
    return PCRmap    
예제 #22
0
파일: test.py 프로젝트: gaoshuai/pcraster
  def testNonSpatialConversions(self):
    nonSpatialValue = pcraster.mapmaximum(pcraster.readmap("map2asc_PCRmap.map"))
    # Ordinal.
    nonSpatial = pcraster.ordinal(nonSpatialValue)
    self.assertEqual(bool(nonSpatial), True)
    self.assertEqual(int(nonSpatial), 124)
    self.assertEqual(float(nonSpatial), 124.0)

    # Nominal.
    nonSpatial = pcraster.nominal(nonSpatialValue)
    self.assertEqual(bool(nonSpatial), True)
    self.assertEqual(int(nonSpatial), 124)
    self.assertEqual(float(nonSpatial), 124)

    # Boolean.
    nonSpatial = pcraster.boolean(nonSpatialValue)
    self.assertEqual(bool(nonSpatial), True)
    self.assertEqual(int(nonSpatial), 1)
    self.assertEqual(float(nonSpatial), 1.0)

    # Scalar.
    nonSpatial = pcraster.scalar(pcraster.mapmaximum("abs_Expr.map"))
    self.assertEqual(bool(nonSpatial), True)
    self.assertEqual(int(nonSpatial), 14)
    self.assertEqual(float(nonSpatial), 14.0)
예제 #23
0
def detdrainwidth(ldd, xl, yl):
    """
    Determines width of drainage over DEM for a non square grid

    Input:
        - ldd - drainage network
        - xl - length of cells in x direction
        - yl - length of cells in y direction

    Output:
        - DCL
    """
    # take into account non-square cells
    # if ldd is 8 or 2 use Xlength
    # if ldd is 4 or 6 use Ylength
    draindir = pcr.scalar(ldd)
    slantwidth = (xl + yl) * 0.5
    drainwidth = pcr.ifthenelse(
        draindir == 2,
        xl,
        pcr.ifthenelse(
            draindir == 8,
            xl,
            pcr.ifthenelse(
                draindir == 4, yl, pcr.ifthenelse(draindir == 6, yl, slantwidth)
            ),
        ),
    )
    return drainwidth
예제 #24
0
  def test_001(self):
      """ nonspatial and pcr2numpy """
      nrRows, nrCols, cellSize = 5, 8, 1.0
      west, north = 0.0, 0.0
      pcraster.setclone(nrRows, nrCols, cellSize, west, north)

      value = 1.23456
      nonspatial = pcraster.scalar(value)
      array = pcraster.pcr2numpy(nonspatial, numpy.nan)

      for row in range(0, nrRows):
          for col in range(0, nrCols):
              self.assertAlmostEqual(array[row][col], value)

      value = 3
      nonspatial = pcraster.nominal(value)
      array = pcraster.pcr2numpy(nonspatial, numpy.nan)

      for row in range(0, nrRows):
          for col in range(0, nrCols):
              self.assertAlmostEqual(array[row][col], value)

      value = True
      nonspatial = pcraster.boolean(value)
      array = pcraster.pcr2numpy(nonspatial, numpy.nan)

      for row in range(0, nrRows):
          for col in range(0, nrCols):
              self.assertAlmostEqual(array[row][col], value)
예제 #25
0
파일: pcrut.py 프로젝트: openstreams/wflow
def readmapSave(pathtomap, default):
    """
    Adpatation of readmap that returns a default map if the map cannot be found
    """
    if os.path.isfile(pathtomap):
        return pcr.readmap(pathtomap)
    else:
        return pcr.scalar(default)
예제 #26
0
def derive_HAND(dem, ldd, accuThreshold, rivers=None, basin=None):
    """
    Function derives Height-Above-Nearest-Drain.
    See http://www.sciencedirect.com/science/article/pii/S003442570800120X
    Input:
        dem -- pcraster object float32, elevation data
        ldd -- pcraster object direction, local drain directions
        accuThreshold -- upstream amount of cells as threshold for river
            delineation
        rivers=None -- you can provide a rivers layer here. Pixels that are
                        identified as river should have a value > 0, other
                        pixels a value of zero.
        basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance
                        and areas with False by means of the nearest friction distance. Friction distance estimated using the
                        upstream area as weight (i.e. drains with a bigger upstream area have a lower friction)
                        the spreadzone operator is used in this case.
    Output:
        hand -- pcraster bject float32, height, normalised to nearest stream
        dist -- distance to nearest stream measured in cell lengths
            according to D8 directions
    """
    if rivers is None:
        stream = pcr.ifthenelse(
            pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)
        )
    else:
        stream = pcr.boolean(pcr.cover(rivers, 0))

    height_river = pcr.ifthenelse(stream, pcr.ordinal(dem * 100), 0)
    if basin is None:
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
    else:
        drainage_surf = pcr.ifthen(rivers, pcr.accuflux(ldd, 1))
        weight = 1.0 / pcr.scalar(
            pcr.spreadzone(pcr.cover(pcr.ordinal(drainage_surf), 0), 0, 0)
        )
        up_elevation = pcr.ifthenelse(
            basin,
            pcr.scalar(pcr.subcatchment(ldd, height_river)),
            pcr.scalar(pcr.spreadzone(height_river, 0, weight)),
        )
        # replace areas outside of basin by a spread zone calculation.
    hand = pcr.max(pcr.scalar(pcr.ordinal(dem * 100)) - up_elevation, 0) / 100
    dist = pcr.ldddist(ldd, stream, 1)

    return hand, dist
예제 #27
0
def getMinMaxMean(mapFile,ignoreEmptyMap=False):
    mn = pcr.cellvalue(pcr.mapminimum(mapFile),1)[0]
    mx = pcr.cellvalue(pcr.mapmaximum(mapFile),1)[0]
    nrValues = pcr.cellvalue(pcr.maptotal(pcr.scalar(pcr.defined(mapFile))), 1 ) [0] #/ getNumNonMissingValues(mapFile)
    if nrValues == 0.0 and ignoreEmptyMap: 
        return 0.0,0.0,0.0
    else:
        return mn,mx,(getMapTotal(mapFile) / nrValues)
예제 #28
0
  def _parseLine(self, line, lineNumber, nrColumns, externalNames, keyDict):

    line = re.sub("\n","",line)
    line = re.sub("\t"," ",line)
    result = None

    # read until first comment
    content = ""
    content,sep,comment = line.partition("#")
    if len(content) > 1:
      collectionVariableName, sep, tail = content.partition(" ")
      if collectionVariableName == self._varName:
        tail = tail.strip()
        key, sep, variableValue = tail.rpartition(" ")

        if len(key.split()) != nrColumns:
          tmp = re.sub("\(|\)|,","",str(key))
          msg = "Error reading %s line %d, order of columns given (%s columns) does not match expected order of %s columns" %(self._fileName, lineNumber, len(key.split()) + 2, int(nrColumns) + 2)
          raise ValueError(msg)

        variableValue = re.sub('\"', "", variableValue)

        tmp = None
        try:
          tmp = int(variableValue)
          if self._dataType == pcraster.Boolean:
            tmp = pcraster.boolean(tmp)
          elif self._dataType == pcraster.Nominal:
            tmp = pcraster.nominal(tmp)
          elif self._dataType == pcraster.Ordinal:
            tmp = pcraster.ordinal(tmp)
          elif self._dataType == pcraster.Ldd:
            tmp = pcraster.ldd(tmp)
          else:
            msg = "Conversion to %s failed" % (self._dataType)
            raise Exception(msg)
        except ValueError, e:
          try:
            tmp = float(variableValue)
            if self._dataType == pcraster.Scalar:
              tmp = pcraster.scalar(tmp)
            elif self._dataType == pcraster.Directional:
              tmp = pcraster.directional(tmp)
            else:
              msg = "Conversion to %s failed" % (self._dataType)
              raise Exception(msg)

          except ValueError,e:
            variableValue = re.sub("\\\\","/",variableValue)
            variableValue = variableValue.strip()
            path = os.path.normpath(variableValue)
            try:
              tmp = pcraster.readmap(path)
            except RuntimeError, e:
              msg = "Error reading %s line %d, %s" %(self._fileName, lineNumber, e)
              raise ValueError(msg)
    def dynamic(self):
        
        # re-calculate current model time using current pcraster timestep value
        self.modelTime.update(self.currentTimeStep())

        # open input data 
        referencePotET = vos.netcdf2PCRobjClone(\
                             self.input_files['referencePotET']['file_name'], \
                             self.input_files['referencePotET']['variable_name'], \
                             str(self.modelTime.fulldate), \
                             useDoy = None, \
                             cloneMapFileName = self.cloneMapFileName)
        cropKC = {}
        for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]:
            cropKC[lc_type] = vos.netcdf2PCRobjClone(\
                                  self.input_files['cropKC'][lc_type], \
                                  self.input_files['cropKC']['variable_name'], \
                                  str(self.modelTime.fulldate), 
                                  useDoy = None,
                                  cloneMapFileName = self.cloneMapFileName)
               
        # calculate
        potential_evaporation = {}
        for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]:
            potential_evaporation[lc_type] = referencePotET * cropKC[lc_type]
        
        # reporting for daily values
        timeStamp = datetime.datetime(self.modelTime.year,\
                                      self.modelTime.month,\
                                      self.modelTime.day,0)
        for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]:
            file_name = self.output['folder'] + "/daily_potential_evaporation_" + self.variable_unit + "_" + lc_type + ".nc"
            self.netcdf_report.data2NetCDF(file_name,\
                                           self.variable_name,\
                                           pcr.pcr2numpy(potential_evaporation[lc_type], vos.MV),\
                                           timeStamp)

        # reporting for monthly values
        # - reset at the beginning of the month:
        if self.modelTime.isFirstDayOfMonth:
            for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]:
                self.monthly_accumulator[lc_type] = pcr.scalar(0.0)
        # - accumulate until the last day of the month:
        for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]:
            self.monthly_accumulator[lc_type] = self.monthly_accumulator[lc_type] + potential_evaporation[lc_type]
        if self.modelTime.endMonth:
            for lc_type in ["forest", "grassland", "irrPaddy", "irrNonPaddy"]:
                file_name = self.output['folder'] + "/monthly_potential_evaporation_" + self.variable_unit + "_" + lc_type + ".nc"
                
                print file_name
                
                self.netcdf_report.data2NetCDF(file_name,\
                                               self.variable_name,\
                                               pcr.pcr2numpy(self.monthly_accumulator[lc_type]/calendar.monthrange(self.modelTime.year, self.modelTime.month)[1], vos.MV),\
                                               timeStamp)
예제 #30
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False,inputEPSG="EPSG:4326",outputEPSG="EPSG:4326",method="near"):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    logger.debug('read file/values: '+str(v))
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            # if no re-projection needed:
            if inputEPSG == outputEPSG or outputEPSG == None: 
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            else:
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap,inputEPSG,outputEPSG,method)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            if os.path.isdir(tmpDir):
                shutil.rmtree(tmpDir)
            os.makedirs(tmpDir)
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
    def readSoilMapOfFAO(self, iniItems, optionDict=None):

        # a dictionary/section of options that will be used
        if optionDict == None:
            optionDict = iniItems._sections[
                "landSurfaceOptions"
            ]  # iniItems.landSurfaceOptions

        # soil variable names given either in the ini or netCDF file:
        soilParameters = [
            "airEntryValue1",
            "airEntryValue2",
            "poreSizeBeta1",
            "poreSizeBeta2",
            "resVolWC1",
            "resVolWC2",
            "satVolWC1",
            "satVolWC2",
            "KSat1",
            "KSat2",
            "percolationImp",
        ]
        if optionDict["soilPropertiesNC"] == str(None):
            for var in soilParameters:
                input = optionDict[str(var)]
                vars(self)[var] = vos.readPCRmapClone(
                    input, self.cloneMap, self.tmpDir, self.inputDir
                )
                vars(self)[var] = pcr.scalar(vars(self)[var])

                if input == "percolationImp":
                    vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 0.75)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        else:
            soilPropertiesNC = vos.getFullPath(
                optionDict["soilPropertiesNC"], self.inputDir
            )
            for var in soilParameters:
                vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(
                    soilPropertiesNC, var, cloneMapFileName=self.cloneMap
                )

                if var == "percolationImp":
                    vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 0.75)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00)
                )

                vars(self)[var] = pcr.cover(vars(self)[var], 0.01)

        # make sure that resVolWC1 <= satVolWC1
        self.resVolWC1 = pcr.min(self.resVolWC1, self.satVolWC1)
        self.resVolWC2 = pcr.min(self.resVolWC2, self.satVolWC2)

        if self.numberOfLayers == 2:
            self.satVolMoistContUpp = (
                self.satVolWC1
            )  # saturated volumetric moisture content (m3.m-3)
            self.satVolMoistContLow = self.satVolWC2
            self.resVolMoistContUpp = (
                self.resVolWC1
            )  # residual volumetric moisture content (m3.m-3)
            self.resVolMoistContLow = self.resVolWC2
            self.airEntryValueUpp = (
                self.airEntryValue1
            )  # air entry value (m) according to soil water retention curve of Clapp & Hornberger (1978)
            self.airEntryValueLow = self.airEntryValue2
            self.poreSizeBetaUpp = (
                self.poreSizeBeta1
            )  # pore size distribution parameter according to Clapp & Hornberger (1978)
            self.poreSizeBetaLow = self.poreSizeBeta2
            self.kSatUpp = self.KSat1  # saturated hydraulic conductivity (m.day-1)
            self.kSatLow = self.KSat2

        if self.numberOfLayers == 3:
            self.satVolMoistContUpp000005 = self.satVolWC1
            self.satVolMoistContUpp005030 = self.satVolWC1
            self.satVolMoistContLow030150 = self.satVolWC2
            self.resVolMoistContUpp000005 = self.resVolWC1
            self.resVolMoistContUpp005030 = self.resVolWC1
            self.resVolMoistContLow030150 = self.resVolWC2
            self.airEntryValueUpp000005 = self.airEntryValue1
            self.airEntryValueUpp005030 = self.airEntryValue1
            self.airEntryValueLow030150 = self.airEntryValue2
            self.poreSizeBetaUpp000005 = self.poreSizeBeta1
            self.poreSizeBetaUpp005030 = self.poreSizeBeta1
            self.poreSizeBetaLow030150 = self.poreSizeBeta2
            self.kSatUpp000005 = self.KSat1
            self.kSatUpp005030 = self.KSat1
            self.kSatLow030150 = self.KSat2

        self.percolationImp = pcr.cover(
            self.percolationImp, 0.0
        )  # fractional area where percolation to groundwater store is impeded (dimensionless)

        # soil thickness and storage variable names
        # as given either in the ini or netCDF file:
        soilStorages = [
            "firstStorDepth",
            "secondStorDepth",
            "soilWaterStorageCap1",
            "soilWaterStorageCap2",
        ]
        if optionDict["soilPropertiesNC"] == str(None):
            for var in soilStorages:
                input = optionDict[str(var)]
                temp = str(var) + "Inp"
                vars(self)[temp] = vos.readPCRmapClone(
                    input, self.cloneMap, self.tmpDir, self.inputDir
                )

                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 0.75)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(vars(self)[temp], 0.0)

        else:
            soilPropertiesNC = vos.getFullPath(
                optionDict["soilPropertiesNC"], self.inputDir
            )
            for var in soilStorages:
                temp = str(var) + "Inp"
                vars(self)[temp] = vos.netcdf2PCRobjCloneWithoutTime(
                    soilPropertiesNC, var, cloneMapFileName=self.cloneMap
                )
                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 0.75)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp], pcr.windowaverage(vars(self)[temp], 1.05)
                )
                vars(self)[temp] = pcr.cover(vars(self)[temp], 0.0)

        # layer thickness
        if self.numberOfLayers == 2:
            self.thickUpp = (0.30 / 0.30) * self.firstStorDepthInp
            self.thickLow = (1.20 / 1.20) * self.secondStorDepthInp
        if self.numberOfLayers == 3:
            self.thickUpp000005 = (0.05 / 0.30) * self.firstStorDepthInp
            self.thickUpp005030 = (0.25 / 0.30) * self.firstStorDepthInp
            self.thickLow030150 = (1.20 / 1.20) * self.secondStorDepthInp

        # soil storage
        if self.numberOfLayers == 2:
            # ~ self.storCapUpp = (0.30/0.30)*self.soilWaterStorageCap1Inp
            # ~ self.storCapLow = (1.20/1.20)*self.soilWaterStorageCap2Inp                     # 22 Feb 2014: We can calculate this based on thickness and porosity.
            self.storCapUpp = self.thickUpp * (
                self.satVolMoistContUpp - self.resVolMoistContUpp
            )
            self.storCapLow = self.thickLow * (
                self.satVolMoistContLow - self.resVolMoistContLow
            )
            self.rootZoneWaterStorageCap = (
                self.storCapUpp + self.storCapLow
            )  # This is called as WMAX in the original pcrcalc script.
        if self.numberOfLayers == 3:
            self.storCapUpp000005 = self.thickUpp000005 * (
                self.satVolMoistContUpp000005 - self.resVolMoistContUpp000005
            )
            self.storCapUpp005030 = self.thickUpp005030 * (
                self.satVolMoistContUpp005030 - self.resVolMoistContUpp005030
            )
            self.storCapLow030150 = self.thickLow030150 * (
                self.satVolMoistContLow030150 - self.resVolMoistContLow030150
            )
            self.rootZoneWaterStorageCap = (
                self.storCapUpp000005 + self.storCapUpp005030 + self.storCapLow030150
            )
예제 #32
0
def waterBalance(  fluxesIn,  fluxesOut,  deltaStorages,  processName,   PrintOnlyErrors,  dateStr,threshold=1e-5):
    """ Returns the water balance for a list of input, output, and storage map files and """

    inMap = pcr.spatial(pcr.scalar(0.0))
    dsMap = pcr.spatial(pcr.scalar(0.0))
    outMap = pcr.spatial(pcr.scalar(0.0))
    inflow = 0
    outflow = 0
    deltaS = 0
    for fluxIn in fluxesIn:
        inflow += getMapTotal(fluxIn)
        inMap += fluxIn
    for fluxOut in fluxesOut:
        outflow += getMapTotal(fluxOut)
        outMap += fluxOut
    for deltaStorage in deltaStorages:
        deltaS += getMapTotal(deltaStorage)
        dsMap += deltaStorage

    #if PrintOnlyErrors:
    a,b,c = getMinMaxMean(inMap + dsMap- outMap)
    # if abs(a) > 1e-5 or abs(b) > 1e-5:
    # if abs(a) > 1e-4 or abs(b) > 1e-4:
    if abs(a) > threshold or abs(b) > threshold:
        print "WBError %s Min %f Max %f Mean %f" %(processName,a,b,c)
    #    if abs(inflow + deltaS - outflow) > 1e-5:
    #        print "Water balance Error for %s on %s: in = %f\tout=%f\tdeltaS=%f\tBalance=%f" \
    #        %(processName,dateStr,inflow,outflow,deltaS,inflow + deltaS - outflow)
    #else:
    #   print "Water balance for %s: on %s in = %f\tout=%f\tdeltaS=%f\tBalance=%f" \
    #        %(processName,dateStr,inflow,outflow,deltaS,inflow + deltaS - outflow)

    wb = inMap + dsMap - outMap
    maxWBError = pcr.cellvalue(pcr.mapmaximum(pcr.abs(wb)), 1, 1)[0]

    #if maxWBError > 0.001 / 1000:
        #row = 0
        #col = 0
        #cellID = 1
        #troubleCell = 0

        #print "Water balance for %s on %s: %f mm !!! " %(processName,dateStr,maxWBError * 1000)
        #pcr.report(wb,"%s-WaterBalanceError-%s" %(processName,dateStr))

        #npWBMError = pcr2numpy(wb, -9999)
        #(nr, nc) = np.shape(npWBMError)
        #for r in range(0, nr):
            #for c in range(0, nc):

                ## print r,c

                #if npWBMError[r, c] != -9999.0:
                    #val = npWBMError[r, c]
                    #if math.fabs(val) > 0.0001 / 1000:

                        ## print npWBMError[r,c]

                        #row = r
                        #col = c
                        #troubleCell = cellID
                #cellID += 1
        #print 'Water balance for %s on %s: %f mm row %i col %i cellID %i!!! ' % (
            #processName,
            #dateStr,
            #maxWBError * 1000,
            #row,
            #col,
            #troubleCell,
            #)

    return inMap + dsMap - outMap
예제 #33
0
def subcatch_stream(ldd,
                    threshold,
                    stream=None,
                    min_strahler=-999,
                    max_strahler=999,
                    assign_edge=False,
                    assign_existing=False,
                    up_area=None,
                    basin=None):
    """
    Derive catchments based upon strahler threshold
    Input:
        ldd -- pcraster object direction, local drain directions
        threshold -- integer, strahler threshold, subcatchments ge threshold
            are derived
        stream=None -- pcraster object ordinal, stream order map (made with pcr.streamorder), if provided, stream order
            map is not generated on the fly but used from this map. Useful when a subdomain within a catchment is
            provided, which would cause edge effects in the stream order map
        min_strahler=-999 -- integer, minimum strahler threshold of river catchments
            to return
        max_strahler=999 -- integer, maximum strahler threshold of river catchments
            to return
        assign_unique=False -- if set to True, unassigned connected areas at
            the edges of the domain are assigned a unique id as well. If set
            to False, edges are not assigned
        assign_existing=False == if set to True, unassigned edges are assigned
            to existing basins with an upstream weighting. If set to False,
            edges are assigned to unique IDs, or not assigned
    output:
        stream_ge -- pcraster object, streams of strahler order ge threshold
        subcatch -- pcraster object, subcatchments of strahler order ge threshold

    """
    # derive stream order

    if stream is None:
        stream = pcr.streamorder(ldd)

    stream_ge = pcr.ifthen(stream >= threshold, stream)
    stream_up_sum = pcr.ordinal(
        pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0)))
    # detect any transfer of strahler order, to a higher strahler order.
    transition_strahler = pcr.ifthenelse(
        pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1),
        pcr.ifthenelse(
            pcr.nominal(ldd) == 5, pcr.boolean(1),
            pcr.ifthenelse(
                pcr.downstream(ldd, pcr.scalar(stream_up_sum)) >
                pcr.scalar(stream_ge), pcr.boolean(1), pcr.boolean(0))))
    # make unique ids (write to file)
    transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler))

    # derive upstream catchment areas (write to file)
    subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique))
    # mask out areas outside basin
    if basin is not None:
        subcatch = pcr.ifthen(basin, subcatch)

    if assign_edge:
        # fill unclassified areas (in pcraster equal to zero) with a unique id, above the maximum id assigned so far
        unique_edge = pcr.clump(pcr.ifthen(subcatch == 0, pcr.ordinal(0)))
        subcatch = pcr.ifthenelse(
            subcatch == 0,
            pcr.nominal(
                pcr.mapmaximum(pcr.scalar(subcatch)) +
                pcr.scalar(unique_edge)), pcr.nominal(subcatch))
    elif assign_existing:
        # unaccounted areas are added to largest nearest draining basin
        if up_area is None:
            up_area = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)),
                                 pcr.accuflux(ldd, 1))
        riverid = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)), subcatch)

        friction = 1. / pcr.scalar(
            pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0,
                           0))  # *(pcr.scalar(ldd)*0+1)
        delta = pcr.ifthen(
            pcr.scalar(ldd) >= 0,
            pcr.ifthen(
                pcr.cover(subcatch, 0) == 0,
                pcr.spreadzone(pcr.cover(riverid, 0), 0, friction)))
        subcatch = pcr.ifthenelse(pcr.boolean(pcr.cover(subcatch, 0)),
                                  subcatch, delta)

    # finally, only keep basins with minimum and maximum river order flowing through them
    strahler_subcatch = pcr.areamaximum(stream, subcatch)
    subcatch = pcr.ifthen(
        pcr.ordinal(strahler_subcatch) >= min_strahler,
        pcr.ifthen(pcr.ordinal(strahler_subcatch) <= max_strahler, subcatch))

    return stream_ge, pcr.ordinal(subcatch)
예제 #34
0
    def report(self):

        self.post_processing()

        # time stamp for reporting
        timeStamp = datetime.datetime(self._modelTime.year,\
                                      self._modelTime.month,\
                                      self._modelTime.day,\
                                      0)

        # writing daily output to netcdf files
        if self.outDailyTotNC[0] != "None":
            for var in self.outDailyTotNC:

                short_name = varDicts.netcdf_short_name[var]
                self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                            str(var)+\
                                            "_dailyTot_output.nc",\
                                            short_name,\
                  pcr2numpy(self.__getattribute__(var),vos.MV),\
                                            timeStamp)

        # writing sub-season output to netcdf files
        # - cummulative
        if self.outSeasoTotNC[0] != "None":
            for var in self.outSeasoTotNC:

                # introduce variables at the beginning of simulation or
                #     reset variables at the beginning of the month
                if self._modelTime.timeStepPCR == 1 or \
                   self._modelTime.day == 1 or \
                   self._modelTime.day == 16:                    \
                                       vars(self)[var+'SeasoTot'] = pcr.scalar(0.0)

                # accumulating
                vars(self)[var + 'SeasoTot'] += vars(self)[var]

                # reporting at the end of the month:
                if self._modelTime.endSubSeason == True:

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                            str(var)+\
                                               "_seasoTot_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var+'SeasoTot'),\
                       vos.MV),timeStamp)
        #
        # - average
        if self.outSeasoAvgNC[0] != "None":
            for var in self.outSeasoAvgNC:

                # only if a accumulator variable has not been defined:
                if var not in self.outSeasoTotNC:

                    # introduce accumulator at the beginning of simulation or
                    #     reset accumulator at the beginning of the month
                    if self._modelTime.timeStepPCR == 1 or \
                       self._modelTime.day == 1 or \
                       self._modelTime.day == 16:                        \
                                               vars(self)[var+'SeasoTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'SeasoTot'] += vars(self)[var]

                # calculating average & reporting at the end of the month:
                if self._modelTime.endSubSeason == True:

                    vars(self)[var+'SeasoAvg'] = vars(self)[var+'SeasoTot']/\
                                                 self._modelTime.seasonLength

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                               str(var)+\
                                               "_seasoAvg_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var+'SeasoAvg'),\
                       vos.MV),timeStamp)
        #
        # - last day of the month
        if self.outSeasoEndNC[0] != "None":
            for var in self.outSeasoEndNC:

                # reporting at the end of the month:
                if self._modelTime.endSubSeason == True:

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                               str(var)+\
                                               "_seasoEnd_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var),\
                       vos.MV),timeStamp)

        # writing monthly output to netcdf files
        # - cummulative
        if self.outMonthTotNC[0] != "None":
            for var in self.outMonthTotNC:

                # introduce variables at the beginning of simulation or
                #     reset variables at the beginning of the month
                if self._modelTime.timeStepPCR == 1 or \
                   self._modelTime.day == 1:                    \
                                       vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                # accumulating
                vars(self)[var + 'MonthTot'] += vars(self)[var]

                # reporting at the end of the month:
                if self._modelTime.endMonth == True:

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                            str(var)+\
                                               "_monthTot_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var+'MonthTot'),\
                       vos.MV),timeStamp)
        #
        # - average
        if self.outMonthAvgNC[0] != "None":
            for var in self.outMonthAvgNC:

                # only if a accumulator variable has not been defined:
                if var not in self.outMonthTotNC:

                    # introduce accumulator at the beginning of simulation or
                    #     reset accumulator at the beginning of the month
                    if self._modelTime.timeStepPCR == 1 or \
                       self._modelTime.day == 1:                        \
                                               vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'MonthTot'] += vars(self)[var]

                # calculating average & reporting at the end of the month:
                if self._modelTime.endMonth == True:

                    vars(self)[var+'MonthAvg'] = vars(self)[var+'MonthTot']/\
                                                 self._modelTime.day

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                               str(var)+\
                                               "_monthAvg_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var+'MonthAvg'),\
                       vos.MV),timeStamp)
        #
        # - last day of the month
        if self.outMonthEndNC[0] != "None":
            for var in self.outMonthEndNC:

                # reporting at the end of the month:
                if self._modelTime.endMonth == True:

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                               str(var)+\
                                               "_monthEnd_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var),\
                       vos.MV),timeStamp)

        # writing yearly output to netcdf files
        # - cummulative
        if self.outAnnuaTotNC[0] != "None":
            for var in self.outAnnuaTotNC:

                # introduce variables at the beginning of simulation or
                #     reset variables at the beginning of the month
                if self._modelTime.timeStepPCR == 1 or \
                   self._modelTime.doy == 1:                    \
                                       vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                # accumulating
                vars(self)[var + 'AnnuaTot'] += vars(self)[var]

                # reporting at the end of the year:
                if self._modelTime.endYear == True:

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                               str(var)+\
                                               "_annuaTot_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var+'AnnuaTot'),\
                       vos.MV),timeStamp)

        # - average
        if self.outAnnuaAvgNC[0] != "None":
            for var in self.outAnnuaAvgNC:

                # only if a accumulator variable has not been defined:
                if var not in self.outAnnuaTotNC:

                    # introduce accumulator at the beginning of simulation or
                    #     reset accumulator at the beginning of the year
                    if self._modelTime.timeStepPCR == 1 or \
                       self._modelTime.doy == 1:                        \
                                               vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'AnnuaTot'] += vars(self)[var]

                # calculating average & reporting at the end of the year:
                if self._modelTime.endYear == True:

                    vars(self)[var+'AnnuaAvg'] = vars(self)[var+'AnnuaTot']/\
                                                 self._modelTime.doy

                    short_name = varDicts.netcdf_short_name[var]
                    self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                               str(var)+\
                                               "_annuaAvg_output.nc",\
                                               short_name,\
                      pcr2numpy(self.__getattribute__(var+'AnnuaAvg'),\
                       vos.MV),timeStamp)
        #
        # -last day of the year
        if self.outAnnuaEndNC[0] != "None":
            for var in self.outAnnuaEndNC:

                short_name = varDicts.netcdf_short_name[var]
                self.netcdfObj.data2NetCDF(self.outNCDir+"/"+ \
                                           str(var)+\
                                           "_annuaEnd_output.nc",\
                                           short_name,\
                  pcr2numpy(self.__getattribute__(var),\
                   vos.MV),timeStamp)

        logger.info("reporting for time %s", self._modelTime.currTime)
예제 #35
0
    def update(self,landSurface,routing,currTimeStep):

        logger.info("Updating groundwater")
        
        if self.debugWaterBalance:
            preStorGroundwater       = self.storGroundwater
            preStorGroundwaterFossil = self.storGroundwaterFossil
                
        # get riverbed infiltration from the previous time step (from routing)
        self.surfaceWaterInf  = routing.riverbedExchange/\
                                routing.cellArea               # unit: m
        self.storGroundwater += self.surfaceWaterInf

        # get net recharge (percolation-capRise) and update storage:
        self.storGroundwater  = pcr.max(0.,\
                                self.storGroundwater + landSurface.gwRecharge)         
                        
        # non fossil groundwater abstraction
        self.nonFossilGroundwaterAbs = landSurface.nonFossilGroundwaterAbs
        self.storGroundwater         = pcr.max(0.,\
                                       self.storGroundwater - self.nonFossilGroundwaterAbs) 
        
        # baseflow
        self.baseflow         = pcr.max(0.,\
                                pcr.min(self.storGroundwater,\
                                        self.recessionCoeff* \
                                        self.storGroundwater))
        self.storGroundwater  = pcr.max(0.,\
                                self.storGroundwater - self.baseflow)
        # PS: baseflow must be calculated at the end (to ensure the availability of storGroundwater to support nonFossilGroundwaterAbs)
        
        # fossil groundwater abstraction:
        self.fossilGroundwaterAbstr = landSurface.fossilGroundwaterAbstr
        self.storGroundwaterFossil -= self.fossilGroundwaterAbstr

        # fossil groundwater cannot be negative if limitFossilGroundwaterAbstraction is used
        if self.limitFossilGroundwaterAbstraction:
            self.storGroundwaterFossil = pcr.max(0.0, self.storGroundwaterFossil)

        # groundwater allocation (Note: This is done in the landSurface module)
        self.allocNonFossilGroundwater = landSurface.allocNonFossilGroundwater
        self.fossilGroundwaterAlloc    = landSurface.fossilGroundwaterAlloc
        
        # Note: The following variable (unmetDemand) is a bad name and used in the past. 
        #       Its definition is actually as follows: (the amount of demand that is satisfied/allocated from fossil groundwater) 
        self.unmetDemand = self.fossilGroundwaterAlloc

        # calculate the average total groundwater abstraction (m/day) from the last 365 days:
        totalAbstraction    = self.fossilGroundwaterAbstr + self.nonFossilGroundwaterAbs
        deltaAbstraction    = totalAbstraction - self.avgAbstraction  
        self.avgAbstraction = self.avgAbstraction +\
                                 deltaAbstraction/\
                              pcr.min(365., pcr.max(1.0, routing.timestepsToAvgDischarge))
        self.avgAbstraction = pcr.max(0.0, self.avgAbstraction)                                    

        # calculate the average non fossil groundwater allocation (m/day) 
        # - from the last 365 days:
        deltaAllocation     = self.allocNonFossilGroundwater  - self.avgNonFossilAllocation  
        self.avgNonFossilAllocation  = self.avgNonFossilAllocation +\
                                 deltaAllocation/\
                              pcr.min(365., pcr.max(1.0, routing.timestepsToAvgDischarge))
        self.avgNonFossilAllocation = pcr.max(0.0, self.avgNonFossilAllocation)
        # - from the last 7 days:
        deltaAllocationShort    = self.allocNonFossilGroundwater - self.avgNonFossilAllocationShort  
        self.avgNonFossilAllocationShort = self.avgNonFossilAllocationShort +\
                                     deltaAllocationShort/\
                                  pcr.min(7., pcr.max(1.0, routing.timestepsToAvgDischarge))
        self.avgNonFossilAllocationShort = pcr.max(0.0, self.avgNonFossilAllocationShort)                                    

        # calculate the average total (fossil + non fossil) groundwater allocation (m/day) 
        totalGroundwaterAllocation = self.allocNonFossilGroundwater + self.fossilGroundwaterAlloc
        # - from the last 365 days:
        deltaAllocation            = totalGroundwaterAllocation - self.avgAllocation 
        self.avgAllocation         = self.avgAllocation +\
                                        deltaAllocation/\
                                        pcr.min(365., pcr.max(1.0, routing.timestepsToAvgDischarge))
        self.avgAllocation         = pcr.max(0.0, self.avgAllocation)
        # - from the last 7 days:
        deltaAllocationShort       = totalGroundwaterAllocation - self.avgAllocationShort  
        self.avgAllocationShort    = self.avgAllocationShort +\
                                        deltaAllocationShort/\
                                        pcr.min(7., pcr.max(1.0, routing.timestepsToAvgDischarge))
        self.avgAllocationShort    = pcr.max(0.0, self.avgAllocationShort)

        if self.debugWaterBalance:
            vos.waterBalanceCheck([self.surfaceWaterInf,\
                                   landSurface.gwRecharge],\
                                  [self.baseflow,\
                                   self.nonFossilGroundwaterAbs],\
                                  [  preStorGroundwater],\
                                  [self.storGroundwater],\
                                       'storGroundwater',\
                                   True,\
                                   currTimeStep.fulldate,threshold=1e-4)

        if self.debugWaterBalance:
            vos.waterBalanceCheck([pcr.scalar(0.0)],\
                                  [self.fossilGroundwaterAbstr],\
                                  [  preStorGroundwaterFossil],\
                                  [self.storGroundwaterFossil],\
                                       'storGroundwaterFossil',\
                                   True,\
                                   currTimeStep.fulldate,threshold=1e-3)

        if self.debugWaterBalance:
            vos.waterBalanceCheck([landSurface.desalinationAllocation,\
                                   self.unmetDemand, \
                                   self.allocNonFossilGroundwater, \
                                   landSurface.allocSurfaceWaterAbstract],\
                                  [landSurface.totalPotentialGrossDemand],\
                                  [pcr.scalar(0.)],\
                                  [pcr.scalar(0.)],\
                                  'demand allocation (desalination, surface water, groundwater & unmetDemand. Error here may be due to rounding error.',\
                                   True,\
                                   currTimeStep.fulldate,threshold=1e-3)

        # old-style reporting                             
        self.old_style_groundwater_reporting(currTimeStep)              # TODO: remove this one
def generate_hydro_datasets(path, output_dir, step):
    print(path)

    file_name = os.path.splitext(os.path.basename(path))[0]
    map_path = output_dir + '/' + file_name + '.map'
    path_prefix = map_path[:-14]

    if step == 'ldd':
        cmd = u'gdal_translate -a_nodata -9999 -of PCRaster -ot Float32 ' + path + ' ' + map_path
        print(cmd)
        subprocess.call(cmd, shell=True)

    # slope = pcr.slope(dem)
    # pcr.report(slope, path_prefix + '_slope.map')

    # pcr.setglobaloption("lddin")

    if step == 'ldd':
        dem = pcr.readmap(map_path)

        print("Computing LDD ...")
        # enable pit filling
        ldd = pcr.lddcreate(dem, 9999999, 9999999, 9999999, 9999999)
        pcr.report(ldd, path_prefix + '_ldd.map')

        return
    elif step == 'ldddem':
        dem = pcr.readmap(map_path)

        print("Computing LDD DEM ...")
        dem_pitfilled = pcr.lddcreatedem(dem, 9999999, 9999999, 9999999,
                                         9999999)
        dem_diff = dem_pitfilled - dem
        pcr.report(dem_diff, path_prefix + '_dem_pits_diff.map')

        return

    # print("Computing LDD without pit filling ...")
    # ldd_pits = pcr.lddcreate(dem, 0, 0, 0, 0)
    # pcr.report(ldd_pits, path_prefix + '_ldd_with_pits.map')

    # print("Computing pits ...")
    # pits = pcr.pit(ldd_pits)

    # pcr.report(pits, path_prefix + '_pits.map')

    if step == 'fa':
        ldd = pcr.readmap(path_prefix + '_ldd.map')

        print("Computing flow accumulation ...")
        fa = pcr.accuflux(ldd, 1)
        pcr.report(fa, path_prefix + '_fa.map')

        return

    if step == 'catchments':
        ldd = pcr.readmap(path_prefix + '_ldd.map')

        print("Delineating catchments ...")
        catchments = pcr.catchment(ldd, pcr.pit(ldd))
        pcr.report(catchments, path_prefix + '_catchments.map')

        return

    if step == 'stream_order':
        ldd = pcr.readmap(path_prefix + '_ldd.map')

        print("Computing stream order ...")
        stream_order = pcr.streamorder(ldd)
        pcr.report(stream_order, path_prefix + '_streamorder.map')

        return

    if step == 'stream':
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        accuThreshold = 100
        print("Computing stream ...")
        stream = pcr.ifthenelse(
            pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1),
            pcr.boolean(0))
        pcr.report(stream, path_prefix + '_stream.map')
        return

    if step == 'height_river':
        print("Computing heigh_river ...")

        stream = pcr.readmap(path_prefix + '_stream.map')
        dem = pcr.readmap(map_path)
        height_river = pcr.ifthenelse(stream, pcr.ordinal(dem), 0)
        pcr.report(height_river, path_prefix + '_height_river.map')
        return

    if step == 'up_elevation':
        print("Computing up_elevation ...")

        height_river = pcr.readmap(path_prefix + '_height_river.map')
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
        pcr.report(up_elevation, path_prefix + '_up_elevation.map')
        return

    if step == 'hand':
        print("Computing HAND ...")
        dem = pcr.readmap(map_path)
        up_elevation = pcr.readmap(path_prefix + '_up_elevation.map')
        hand = pcr.max(dem - up_elevation, 0)
        pcr.report(hand, path_prefix + '_hand.map')
        return

    if step == 'dand':
        print("Computing DAND ...")
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        stream = pcr.readmap(path_prefix + '_stream.map')
        dist = pcr.ldddist(ldd, stream, 1)
        pcr.report(dist, path_prefix + '_dist.map')
        return

    if step == 'fa_river':
        print("Computing FA river ...")
        fa = pcr.readmap(path_prefix + '_fa.map')
        stream = pcr.readmap(path_prefix + '_stream.map')
        fa_river = pcr.ifthenelse(stream, pcr.ordinal(fa), 0)
        pcr.report(fa_river, path_prefix + '_fa_river.map')
        return

    if step == 'faand':
        print("Computing FAAND ...")
        fa_river = pcr.readmap(path_prefix + '_fa_river.map')
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        up_fa = pcr.scalar(pcr.subcatchment(ldd, fa_river))
        pcr.report(up_fa, path_prefix + '_faand.map')
        return
예제 #37
0
    def createInstancesInitial(self):
        import generalfunctions

        if readDistributionOfParametersFromDisk:
            path = '/home/derek/tmp/'
            maximumInterceptionCapacityPerLAI = pcr.scalar(
                path +
                pcrfw.generateNameS('RPic', self.currentSampleNumber()) +
                '.map')
            ksat = pcr.scalar(
                path +
                pcrfw.generateNameS('RPks', self.currentSampleNumber()) +
                '.map')
            regolithThicknessHomogeneous = pcr.scalar(
                path +
                pcrfw.generateNameS('RPrt', self.currentSampleNumber()) +
                '.map')
            saturatedConductivityMetrePerDay = pcr.scalar(
                path +
                pcrfw.generateNameS('RPsc', self.currentSampleNumber()) +
                '.map')
            multiplierMaxStomatalConductance = pcr.scalar(
                path +
                pcrfw.generateNameS('RPmm', self.currentSampleNumber()) +
                '.map')
        else:
            maximumInterceptionCapacityPerLAI = generalfunctions.areauniformBounds(
                0.0001, 0.0005, pcr.nominal(1),
                pcr.scalar(cfg.maximumInterceptionCapacityValue),
                createRealizations)
            ksat = generalfunctions.areauniformBounds(
                0.025, 0.05, pcr.nominal(1), pcr.scalar(cfg.ksatValue),
                createRealizations)
            regolithThicknessHomogeneous = generalfunctions.areauniformBounds(
                1.0, 3.5, cfg.areas,
                pcr.scalar(cfg.regolithThicknessHomogeneousValue),
                createRealizations)
            saturatedConductivityMetrePerDay = generalfunctions.mapuniformBounds(
                25.0, 40.0,
                pcr.scalar(cfg.saturatedConductivityMetrePerDayValue),
                createRealizations)
            multiplierMaxStomatalConductance = generalfunctions.mapuniformBounds(
                0.8, 1.1,
                pcr.scalar(cfg.multiplierMaxStomatalConductanceValue),
                createRealizations)

        if swapCatchments:
            regolithThicknessHomogeneous = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, regolithThicknessHomogeneous, True)

        self.d_randomparameters = randomparameters.RandomParameters(
            timeStepsToReportRqs, setOfVariablesToReport,
            maximumInterceptionCapacityPerLAI, ksat,
            regolithThicknessHomogeneous, saturatedConductivityMetrePerDay,
            multiplierMaxStomatalConductance)

        # class for exchange variables in initial and dynamic
        # introduced to make filtering possible
        self.d_exchangevariables = exchangevariables.ExchangeVariables(
            timeStepsToReportSome,
            setOfVariablesToReport,
        )

        ################
        # interception #
        ################

        self.ldd = cfg.lddMap

        initialInterceptionStore = pcr.scalar(0.000001)
        leafAreaIndex = pcr.scalar(cfg.leafAreaIndexValue)

        if swapCatchments:
            leafAreaIndex = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, leafAreaIndex, True)
        gapFraction = pcr.exp(
            -0.5 * leafAreaIndex)  # equation 40 in Brolsma et al 2010a
        maximumInterceptionStore = maximumInterceptionCapacityPerLAI * leafAreaIndex

        self.d_interceptionuptomaxstore = interceptionuptomaxstore.InterceptionUpToMaxStore(
            self.ldd, initialInterceptionStore, maximumInterceptionStore,
            gapFraction, self.timeStepDurationHours, timeStepsToReportSome,
            setOfVariablesToReport)

        #################
        # surface store #
        #################

        initialSurfaceStore = pcr.scalar(0.0)
        maxSurfaceStore = pcr.scalar(cfg.maxSurfaceStoreValue)
        self.d_surfaceStore = surfacestore.SurfaceStore(
            initialSurfaceStore, maxSurfaceStore, self.timeStepDurationHours,
            timeStepsToReportSome, setOfVariablesToReport)

        ################
        # infiltration #
        ################

        # N initialMoistureContentFraction taken from 1st July

        # DK
        # we do not use rts and Gs as input to calculate initial moisture fraction to avoid
        # problems when the initial regolith thickness is calibrated (it might be thinner than
        # initialMoistureThick -> problems!)
        # instead, we use initial moisture content fraction as input, read from disk, it is just calculated
        # by pcrcalc 'mergeInitialMoistureContentFraction=Gs000008.761/rts00008.761'
        # note that I also changed the name for the initial soil moisture as a fraction
        initialSoilMoistureFractionFromDisk = pcr.scalar(
            cfg.initialSoilMoistureFractionFromDiskValue)
        if swapCatchments:
            initialSoilMoistureFractionFromDisk = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, initialSoilMoistureFractionFromDisk, True)

        # initial soil moisture as a fraction should not be above soil porosity as a fraction, just a check
        soilPorosityFraction = pcr.scalar(cfg.soilPorosityFractionValue)
        if swapCatchments:
            soilPorosityFraction = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, soilPorosityFraction, True)
        initialSoilMoistureFraction = pcr.min(
            soilPorosityFraction, initialSoilMoistureFractionFromDisk)
        hf = pcr.scalar(-0.0000001)
        self.d_infiltrationgreenandampt = infiltrationgreenandampt.InfiltrationGreenAndAmpt(
            soilPorosityFraction, initialSoilMoistureFraction, ksat, hf,
            self.timeStepDurationHours, timeStepsToReportSome,
            setOfVariablesToReport)

        ####################
        # subsurface water #
        ####################

        demOfBedrockTopography = self.dem

        stream = pcr.boolean(cfg.streamValue)
        theSlope = pcr.slope(self.dem)
        regolithThickness = pcr.ifthenelse(stream, 0.01,
                                           regolithThicknessHomogeneous)

        self.multiplierWiltingPoint = pcr.scalar(1.0)
        limitingPointFraction = pcr.scalar(cfg.limitingPointFractionValue)

        if swapCatchments:
            limitingPointFraction = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, limitingPointFraction, True)
        mergeWiltingPointFractionFS = pcr.scalar(
            cfg.mergeWiltingPointFractionFSValue)
        if swapCatchments:
            mergeWiltingPointFractionFS = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, mergeWiltingPointFractionFS, True)
        wiltingPointFractionNotChecked = mergeWiltingPointFractionFS * self.multiplierWiltingPoint
        wiltingPointFraction = pcr.min(wiltingPointFractionNotChecked,
                                       limitingPointFraction)

        fieldCapacityFraction = pcr.scalar(cfg.fieldCapacityFractionValue)
        if swapCatchments:
            fieldCapacityFraction = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, fieldCapacityFraction, True)

        self.d_subsurfaceWaterOneLayer = subsurfacewateronelayer.SubsurfaceWaterOneLayer(
            self.ldd, demOfBedrockTopography, regolithThickness,
            initialSoilMoistureFraction, soilPorosityFraction,
            wiltingPointFraction, fieldCapacityFraction, limitingPointFraction,
            saturatedConductivityMetrePerDay, self.timeStepDurationHours,
            timeStepsToReportSome, setOfVariablesToReport)

        ##########
        # runoff #
        ##########

        self.d_runoffAccuthreshold = runoffaccuthreshold.RunoffAccuthreshold(
            self.ldd, self.timeStepDurationHours, timeStepsToReportRqs,
            setOfVariablesToReport)

        ######################
        # evapotranspiration #
        ######################

        albedo = pcr.scalar(cfg.albedoValue)
        if swapCatchments:
            albedo = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, albedo, True)

        maxStomatalConductance = pcr.scalar(
            cfg.maxStomatalConductanceValue) * multiplierMaxStomatalConductance
        if swapCatchments:
            maxStomatalConductance = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, maxStomatalConductance, True)

        vegetationHeight = pcr.scalar(cfg.vegetationHeightValue)
        if swapCatchments:
            vegetationHeight = generalfunctions.swapValuesOfTwoRegions(
                cfg.areas, vegetationHeight, True)
        self.d_evapotranspirationPenman = evapotranspirationpenman.EvapotranspirationPenman(
            self.timeStepDurationHours, albedo, maxStomatalConductance,
            vegetationHeight, leafAreaIndex, timeStepsToReportSome,
            setOfVariablesToReport)
예제 #38
0
    def dynamic(self):
        import generalfunctions

        # time
        self.d_dateTimePCRasterPython.update()
        timeDatetimeFormat = self.d_dateTimePCRasterPython.getTimeDatetimeFormat(
        )

        # precipitation
        # for calibration
        rainfallFluxDeterm = pcr.timeinputscalar(
            cfg.rainfallFluxDetermTimeSeries,
            pcr.nominal(cfg.rainfallFluxDetermTimeSeriesAreas))
        # for the experiments
        rainfallFlux = rainfallFluxDeterm  #generalfunctions.mapNormalRelativeError(rainfallFluxDeterm,0.25)
        self.d_exchangevariables.cumulativePrecipitation = \
                self.d_exchangevariables.cumulativePrecipitation + rainfallFlux * self.timeStepDuration

        # interception store
        actualAdditionFluxToInterceptionStore = self.d_interceptionuptomaxstore.addWater(
            rainfallFlux)
        throughfallFlux = rainfallFlux - actualAdditionFluxToInterceptionStore

        # surface store
        totalToSurfaceFlux = throughfallFlux + self.d_exchangevariables.upwardSeepageFlux
        potentialToSurfaceStoreFlux = self.d_surfaceStore.potentialToFlux()

        # potential infiltration
        potentialHortonianInfiltrationFlux = self.d_infiltrationgreenandampt.potentialInfiltrationFluxFunction(
        )
        maximumSaturatedOverlandFlowInfiltrationFlux = self.d_subsurfaceWaterOneLayer.getMaximumAdditionFlux(
        )
        potentialInfiltrationFlux = pcr.min(
            potentialHortonianInfiltrationFlux,
            maximumSaturatedOverlandFlowInfiltrationFlux)

        # abstraction from surface water
        potentialAbstractionFromSurfaceWaterFlux = potentialToSurfaceStoreFlux + potentialInfiltrationFlux
        actualAbstractionFromSurfaceWaterFlux, runoffCubicMetresPerHour = self.d_runoffAccuthreshold.update(
            totalToSurfaceFlux, potentialAbstractionFromSurfaceWaterFlux)
        potentialOutSurfaceStoreFlux = self.d_surfaceStore.potentialOutFlux()

        # infiltration
        availableForInfiltrationFlux = potentialOutSurfaceStoreFlux + actualAbstractionFromSurfaceWaterFlux
        availableForInfiltrationNotExceedingMaximumSaturatedOverlandFlowFlux = pcr.min(
            availableForInfiltrationFlux,
            maximumSaturatedOverlandFlowInfiltrationFlux)
        actualInfiltrationFlux = self.d_infiltrationgreenandampt.update(
            availableForInfiltrationNotExceedingMaximumSaturatedOverlandFlowFlux
        )

        # surface store
        surfaceStoreChange = actualAbstractionFromSurfaceWaterFlux - actualInfiltrationFlux
        self.d_surfaceStore.update(surfaceStoreChange)
        actualAdditionFlux = self.d_subsurfaceWaterOneLayer.addWater(
            actualInfiltrationFlux)

        if cfg.with_shading:
            # solar radiation (POTRAD, shading effect and inclination)
            fractionReceived, fractionReceivedFlatSurface, shaded = \
                                                  self.d_shading.update(timeDatetimeFormat)

            # we assume all cells receive the same solar radiation as measured by the device
            # except for shading, if shading, there is nothing received
            fractionReceived = pcr.ifthenelse(shaded, pcr.scalar(0.0),
                                              pcr.scalar(1.0))
        else:
            fractionReceived = pcr.scalar(cfg.fractionReceivedValue)
            fractionReceivedFlatSurface = pcr.scalar(
                cfg.fractionReceivedFlatSurfaceValue)

        fWaterPotential = self.d_subsurfaceWaterOneLayer.getFWaterPotential()

        # potential evapotranspiration
        airTemperatureDeterm = pcr.timeinputscalar(
            cfg.airTemperatureDetermString, self.clone)
        airTemperature = airTemperatureDeterm  #airTemperatureDeterm+mapnormal()

        relativeHumidityDeterm = pcr.timeinputscalar(
            cfg.relativeHumidityDetermString, self.clone)
        relativeHumidity = relativeHumidityDeterm  #pcr.max(pcr.min(relativeHumidityDeterm+mapnormal()*0.1,pcr.scalar(1.0)),pcr.scalar(0))

        incomingShortwaveRadiationFlatSurface = pcr.timeinputscalar(
            cfg.incomingShortwaveRadiationFlatSurfaceString, self.clone)
        # incomingShortwaveRadiationFlatSurface = pcr.max(pcr.scalar(0),
        #                              generalfunctions.mapNormalRelativeError(incomingShortwaveRadiationFlatSurfaceDeterm,0.25))

        incomingShortwaveRadiationAtSurface = incomingShortwaveRadiationFlatSurface * fractionReceived

        windVelocityDeterm = pcr.timeinputscalar(cfg.windVelocityDetermString,
                                                 self.clone)
        windVelocity = windVelocityDeterm  #generalfunctions.mapNormalRelativeError(windVelocityDeterm,0.25)

        elevationAboveSeaLevelOfMeteoStation = cfg.elevationAboveSeaLevelOfMeteoStationValue

        potentialEvapotranspirationFlux, \
               potentialEvapotranspirationAmount, \
               potentialEvapotranspirationFromCanopyFlux, \
               potentialEvapotranspirationFromCanopyAmount = \
                                self.d_evapotranspirationPenman.potentialEvapotranspiration(
                                airTemperature,
                                relativeHumidity,
                                incomingShortwaveRadiationAtSurface,
                                incomingShortwaveRadiationFlatSurface,
                                fractionReceivedFlatSurface,
                                windVelocity,
                                elevationAboveSeaLevelOfMeteoStation,
                                fWaterPotential,
                                rainfallFlux < 0.000000000001)

        potentialEvapotranspirationFluxNoNegativeValues = pcr.max(
            0.0, potentialEvapotranspirationFlux)
        potentialEvapotranspirationFluxFromCanopyNoNegativeValues = pcr.max(
            0.0, potentialEvapotranspirationFromCanopyFlux)

        # evapotranspirate first from interception store
        actualAbstractionFluxFromInterceptionStore = self.d_interceptionuptomaxstore.abstractWater(
            potentialEvapotranspirationFluxFromCanopyNoNegativeValues)

        # fraction of soil evapotranspiration depends on evapo from canopy
        evapFromSoilMultiplierMV = (potentialEvapotranspirationFluxFromCanopyNoNegativeValues -
                                actualAbstractionFluxFromInterceptionStore) / \
                                potentialEvapotranspirationFluxFromCanopyNoNegativeValues
        self.d_exchangevariables.evapFromSoilMultiplier = \
                               pcr.ifthenelse(potentialEvapotranspirationFluxNoNegativeValues < 0.0000000000001,
                               pcr.scalar(1), evapFromSoilMultiplierMV)

        # evapotranspirate from subsurface store
        # potentialEvapotranspirationFluxFromSubsurface= \
        #                       pcr.max(0.0,potentialEvapotranspirationFluxNoNegativeValues-actualAbstractionFluxFromInterceptionStore)
        potentialEvapotranspirationFluxFromSubsurface = self.d_exchangevariables.evapFromSoilMultiplier * \
                                                      potentialEvapotranspirationFluxNoNegativeValues
        actualAbstractionFluxFromSubsurface = self.d_subsurfaceWaterOneLayer.abstractWater(
            potentialEvapotranspirationFluxFromSubsurface)

        # upward seepage from subsurfacestore
        self.d_exchangevariables.upwardSeepageFlux = self.d_subsurfaceWaterOneLayer.lateralFlow(
        )

        # reports
        self.reportComponentsDynamic()
        self.reportRandomParametersDynamic()
        self.printComponentsDynamic()
        if doReportComponentsDynamicAsNumpy:
            self.reportComponentsDynamicAsNumpy()
예제 #39
0
def simplereservoir(
    storage,
    inflow,
    ResArea,
    maxstorage,
    target_perc_full,
    maximum_Q,
    demand,
    minimum_full_perc,
    ReserVoirLocs,
    precip,
    pet,
    ReservoirSimpleAreas,
    timestepsecs=86400,
):
    """

    :param storage: initial storage m^3
    :param inflow: inflow m^3/s
    :param maxstorage: maximum storage (above which water is spilled) m^3
    :param target_perc_full: target fraction full (of max storage) -
    :param maximum_Q: maximum Q to release m^3/s if below spillway
    :param demand: water demand (all combined) m^3/s
    :param minimum_full_perc: target minimum full fraction (of max storage) -
    :param ReserVoirLocs: map with reservoir locations
    :param timestepsecs: timestep of the model in seconds (default = 86400)
    :return: storage (m^3), outflow (m^3/s), PercentageFull (0-1), Release (m^3/sec)
    """

    inflow = pcr.ifthen(pcr.boolean(ReserVoirLocs), inflow)

    prec_av = pcr.cover(
        pcr.ifthen(
            pcr.boolean(ReserVoirLocs), pcr.areaaverage(precip, ReservoirSimpleAreas)
        ),
        pcr.scalar(0.0),
    )
    pet_av = pcr.cover(
        pcr.ifthen(
            pcr.boolean(ReserVoirLocs), pcr.areaaverage(pet, ReservoirSimpleAreas)
        ),
        pcr.scalar(0.0),
    )
    
    _outflow = 0
    _demandRelease = 0
    
    nr_loop = np.max([int(timestepsecs / 21600), 1])
    for n in range(0, nr_loop):
        
        fl_nr_loop = float(nr_loop)
        
        storage = (
            storage
            + (inflow * timestepsecs / fl_nr_loop)
            + (prec_av / fl_nr_loop / 1000.0) * ResArea
            - (pet_av / fl_nr_loop / 1000.0) * ResArea
        )

        percfull = storage / maxstorage
        # first determine minimum (environmental) flow using a simple sigmoid curve to scale for target level
        fac = sCurve(percfull, a=minimum_full_perc, c=30.0)
        demandRelease = pcr.min(fac * demand * timestepsecs / fl_nr_loop, storage)
        storage = storage - demandRelease

        wantrel = pcr.max(0.0, storage - (maxstorage * target_perc_full))
        # Assume extra maximum Q if spilling
        overflowQ = pcr.max((storage - maxstorage), 0.0)
        torelease = pcr.min(wantrel, overflowQ + maximum_Q * timestepsecs / fl_nr_loop - demandRelease)
        storage = storage - torelease
        outflow = torelease + demandRelease
        percfull = storage / maxstorage
        
        _outflow = _outflow + outflow
        _demandRelease = _demandRelease + demandRelease

    return storage, _outflow / timestepsecs, percfull, prec_av, pet_av, _demandRelease / timestepsecs
예제 #40
0
    def getReservoirOutflow(self,\
        avgChannelDischarge,length_of_time_step,downstreamDemand):

        # avgOutflow (m3/s)
        avgOutflow = self.avgOutflow
        # The following is needed when new lakes/reservoirs introduced (its avgOutflow is still zero).
        #~ # - alternative 1
        #~ avgOutflow = pcr.ifthenelse(\
        #~ avgOutflow > 0.,\
        #~ avgOutflow,
        #~ pcr.max(avgChannelDischarge, self.avgInflow, 0.001))
        # - alternative 2
        avgOutflow = pcr.ifthenelse(\
                     avgOutflow > 0.,\
                     avgOutflow,
                     pcr.max(avgChannelDischarge, self.avgInflow))
        avgOutflow = pcr.ifthenelse(\
                     avgOutflow > 0.,\
                     avgOutflow, pcr.downstream(self.lddMap, avgOutflow))
        avgOutflow = pcr.areamaximum(avgOutflow, self.waterBodyIds)

        # calculate resvOutflow (m2/s) (based on reservoir storage and avgDischarge):
        # - using reductionFactor in such a way that:
        #   - if relativeCapacity < minResvrFrac : release is terminated
        #   - if relativeCapacity > maxResvrFrac : longterm average
        reductionFactor = \
         pcr.cover(\
         pcr.min(1.,
         pcr.max(0., \
          self.waterBodyStorage - self.minResvrFrac*self.waterBodyCap)/\
             (self.maxResvrFrac - self.minResvrFrac)*self.waterBodyCap),0.0)
        #
        resvOutflow = reductionFactor * avgOutflow * length_of_time_step  # unit: m3

        # maximum release <= average inflow (especially during dry condition)
        resvOutflow = pcr.max(0,
                              pcr.min(resvOutflow, self.avgInflow *
                                      length_of_time_step))  # unit: m3

        # downstream demand (m3/s)
        # reduce demand if storage < lower limit
        reductionFactor = vos.getValDivZero(
            downstreamDemand, self.minResvrFrac * self.waterBodyCap,
            vos.smallNumber)
        reductionFactor = pcr.cover(reductionFactor, 0.0)
        downstreamDemand = pcr.min(downstreamDemand,
                                   downstreamDemand * reductionFactor)
        # resvOutflow > downstreamDemand
        resvOutflow = pcr.max(resvOutflow, downstreamDemand *
                              length_of_time_step)  # unit: m3

        # floodOutflow: additional release if storage > upper limit
        ratioQBankfull = 2.3
        estmStorage = pcr.max(0., self.waterBodyStorage - resvOutflow)
        floodOutflow = \
           pcr.max(0.0, estmStorage - self.waterBodyCap) +\
           pcr.cover(\
           pcr.max(0.0, estmStorage - self.maxResvrFrac*\
                                      self.waterBodyCap)/\
              ((1.-self.maxResvrFrac)*self.waterBodyCap),0.0)*\
           pcr.max(0.0,ratioQBankfull*avgOutflow* vos.secondsPerDay()-\
                                      resvOutflow)
        floodOutflow = pcr.max(0.0,
                       pcr.min(floodOutflow,\
                       estmStorage - self.maxResvrFrac*\
                                     self.waterBodyCap*0.75)) # maximum limit of floodOutflow: bring the reservoir storages only to 3/4 of upper limit capacities

        # update resvOutflow after floodOutflow
        resvOutflow  = pcr.cover(resvOutflow , 0.0) +\
                       pcr.cover(floodOutflow, 0.0)

        # maximum release if storage > upper limit : bring the reservoir storages only to 3/4 of upper limit capacities
        resvOutflow  = pcr.ifthenelse(self.waterBodyStorage >
                       self.maxResvrFrac*self.waterBodyCap,\
                       pcr.min(resvOutflow,\
                       pcr.max(0,self.waterBodyStorage - \
                       self.maxResvrFrac*self.waterBodyCap*0.75)),
                       resvOutflow)

        # if storage > upper limit : resvOutflow > avgInflow
        resvOutflow  = pcr.ifthenelse(self.waterBodyStorage >
                       self.maxResvrFrac*self.waterBodyCap,\
                       pcr.max(0.0, resvOutflow, self.avgInflow),
                       resvOutflow)

        # resvOutflow < waterBodyStorage
        resvOutflow = pcr.min(self.waterBodyStorage, resvOutflow)

        resvOutflow = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0., resvOutflow)
        resvOutflow = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) == 2, resvOutflow)
        return (resvOutflow)  # unit: m3
예제 #41
0
    def report(self, storesAtBeginning, storesAtEnd):
        #report the state. which states are written when is based on the configuration

        #set total to 0 on first day of the year
        if self._modelTime.doy == 1 or self._modelTime.isFirstTimestep():

            # set all accumulated variables to zero
            self.precipitationAcc = pcr.ifthen(self.landmask, pcr.scalar(0.0))

            for var in self.landSurface.fluxVars:
                vars(self)[var + 'Acc'] = pcr.ifthen(self.landmask,
                                                     pcr.scalar(0.0))

            self.nonFossilGroundwaterAbsAcc = pcr.ifthen(
                self.landmask, pcr.scalar(0.0))
            self.allocNonFossilGroundwaterAcc = pcr.ifthen(
                self.landmask, pcr.scalar(0.0))
            self.baseflowAcc = pcr.ifthen(self.landmask, pcr.scalar(0.0))

            self.surfaceWaterInfAcc = pcr.ifthen(self.landmask,
                                                 pcr.scalar(0.0))

            self.runoffAcc = pcr.ifthen(self.landmask, pcr.scalar(0.0))
            self.unmetDemandAcc = pcr.ifthen(self.landmask, pcr.scalar(0.0))

            self.waterBalanceAcc = pcr.ifthen(self.landmask, pcr.scalar(0.0))
            self.absWaterBalanceAcc = pcr.ifthen(self.landmask,
                                                 pcr.scalar(0.0))

            # also save the storage at the first day of the year (or the first time step)
            self.storageAtFirstDay = pcr.ifthen(self.landmask,
                                                storesAtBeginning)

        # accumulating until the last day of the year:
        self.precipitationAcc += self.meteo.precipitation
        for var in self.landSurface.fluxVars:
            vars(self)[var + 'Acc'] += vars(self.landSurface)[var]

        self.nonFossilGroundwaterAbsAcc += self.groundwater.nonFossilGroundwaterAbs
        self.allocNonFossilGroundwaterAcc += self.groundwater.allocNonFossilGroundwater
        self.baseflowAcc += self.groundwater.baseflow

        self.surfaceWaterInfAcc += self.groundwater.surfaceWaterInf

        self.runoffAcc += self.routing.runoff
        self.unmetDemandAcc += self.groundwater.unmetDemand

        self.waterBalance = \
          (storesAtBeginning - storesAtEnd +\
           self.meteo.precipitation + self.landSurface.irrGrossDemand + self.groundwater.surfaceWaterInf -\
           self.landSurface.actualET - self.routing.runoff - self.groundwater.nonFossilGroundwaterAbs)

        self.waterBalanceAcc = self.waterBalanceAcc + self.waterBalance
        self.absWaterBalanceAcc = self.absWaterBalanceAcc + pcr.abs(
            self.waterBalance)

        if self._modelTime.isLastDayOfYear():
            self.dumpState(self._configuration.endStateDir)

            msg = 'The following waterBalance checks assume fracWat = 0 for all cells (not including surface water bodies).'
            logging.getLogger("model").info(
                msg)  # TODO: Improve these water balance checks.

            totalCellArea = vos.getMapTotal(
                pcr.ifthen(self.landmask, self.routing.cellArea))
            msg = 'Total area = %e km2'\
                    % (totalCellArea/1e6)
            logging.getLogger("model").info(msg)

            deltaStorageOneYear = vos.getMapVolume( \
                                     pcr.ifthen(self.landmask,storesAtEnd) - \
                                     pcr.ifthen(self.landmask,self.storageAtFirstDay),
                                     self.routing.cellArea)
            msg = 'Delta total storage days 1 to %i in %i = %e km3 = %e mm'\
                % (    int(self._modelTime.doy),\
                       int(self._modelTime.year),\
                       deltaStorageOneYear/1e9,\
                       deltaStorageOneYear*1000/totalCellArea)
            logging.getLogger("model").info(msg)

            # reporting the endStates at the end of the Year:
            variableList = [
                'precipitation', 'nonFossilGroundwaterAbs',
                'allocNonFossilGroundwater', 'baseflow', 'surfaceWaterInf',
                'runoff', 'unmetDemand'
            ]
            variableList += self.landSurface.fluxVars
            variableList += ['waterBalance', 'absWaterBalance']

            for var in variableList:
                volume = vos.getMapVolume(\
                            self.__getattribute__(var + 'Acc'),\
                            self.routing.cellArea)
                msg = 'Accumulated %s days 1 to %i in %i = %e km3 = %e mm'\
                    % (var,int(self._modelTime.doy),\
                           int(self._modelTime.year),volume/1e9,volume*1000/totalCellArea)
                logging.getLogger("model").info(msg)
예제 #42
0
    def initial(self):
        """ initial part of the water abstraction module
        """

        # self.testmap=windowaverage(self.var.Elevation,5)
        # self.report(self.testmap,"test.map")

        # ************************************************************
        # ***** WATER USE
        # ************************************************************
        settings = LisSettings.instance()
        option = settings.options
        binding = settings.binding
        maskinfo = MaskInfo.instance()
        if option['wateruse']:
            self.var.WUsePercRemain = loadmap('WUsePercRemain')
            self.var.NoWaterUseSteps = int(loadmap('maxNoWateruse'))
            self.var.GroundwaterBodies = loadmap('GroundwaterBodies')
            self.var.FractionGroundwaterUsed = np.minimum(
                np.maximum(loadmap('FractionGroundwaterUsed'),
                           maskinfo.in_zero()), 1.0)
            self.var.FractionNonConventionalWaterUsed = loadmap(
                'FractionNonConventionalWaterUsed')
            self.var.FractionLakeReservoirWaterUsed = loadmap(
                'FractionLakeReservoirWaterUsed')
            self.var.EFlowThreshold = loadmap('EFlowThreshold')
            # EFlowThreshold is map with m3/s discharge, e.g. the 10th percentile discharge of the baseline run

            self.var.WUseRegionC = loadmap('WUseRegion').astype(int)
            self.var.IrrigationMult = loadmap('IrrigationMult')

            # ************************************************************
            # ***** water use constant maps ******************************
            # ************************************************************

            self.var.IndustryConsumptiveUseFraction = loadmap(
                'IndustryConsumptiveUseFraction')
            # fraction (0-1)
            self.var.WaterReUseFraction = loadmap('WaterReUseFraction')
            # fraction of water re-used (0-1)
            self.var.EnergyConsumptiveUseFraction = loadmap(
                'EnergyConsumptiveUseFraction')
            # fraction (0-1), value depends on cooling technology of power plants
            self.var.LivestockConsumptiveUseFraction = loadmap(
                'LivestockConsumptiveUseFraction')
            # fraction (0-1)
            self.var.LeakageFraction = np.minimum(
                np.maximum(
                    loadmap('LeakageFraction') *
                    (1 - loadmap('LeakageReductionFraction')),
                    maskinfo.in_zero()), 1.0)
            self.var.DomesticLeakageConstant = np.minimum(
                np.maximum(1 / (1 - self.var.LeakageFraction),
                           maskinfo.in_zero()), 1.0)
            # Domestic Water Abstraction becomes larger in case of leakage
            # LeakageFraction is LeakageFraction (0-1) multiplied by reduction scenario (10% reduction is 0.1 in map)
            # 0.65 leakage and 0.1 reduction leads to 0.585 effective leakage, resulting in 2.41 times more water abstraction
            self.var.DomesticWaterSavingConstant = np.minimum(
                np.maximum(1 - loadmap('WaterSavingFraction'),
                           maskinfo.in_zero()), 1.0)
            # Domestic water saving if in place, changes this value from 1 to a value between 0 and 1, and will reduce demand and abstraction
            # so value = 0.9 if WaterSavingFraction equals 0.1 (10%)
            self.var.DomesticConsumptiveUseFraction = loadmap(
                'DomesticConsumptiveUseFraction')
            # fraction (0-1), typically rather low ~ 0.10
            self.var.LeakageWaterLossFraction = loadmap('LeakageWaterLoss')
            # fraction (0-1), 0=no leakage

            # Initialize water demand. Read from a static map either value or pcraster map or netcdf (single or stack).
            # If reading from NetCDF stack, get time step corresponding to model step.
            # Added management for sub-daily modelling time steps
            # Added possibility to use one single average year to be repeated during the simulation
            if option['useWaterDemandAveYear']:
                # CM: using one water demand average year throughout the model simulation
                self.var.DomesticDemandMM = loadmap(
                    'DomesticDemandMaps',
                    timestampflag='closest',
                    averageyearflag=True) * self.var.DtDay
                self.var.IndustrialDemandMM = loadmap(
                    'IndustrialDemandMaps',
                    timestampflag='closest',
                    averageyearflag=True) * self.var.DtDay
                self.var.LivestockDemandMM = loadmap(
                    'LivestockDemandMaps',
                    timestampflag='closest',
                    averageyearflag=True) * self.var.DtDay
                self.var.EnergyDemandMM = loadmap(
                    'EnergyDemandMaps',
                    timestampflag='closest',
                    averageyearflag=True) * self.var.DtDay
            else:
                # CM: using information on water demand from NetCDF files
                self.var.DomesticDemandMM = loadmap(
                    'DomesticDemandMaps',
                    timestampflag='closest') * self.var.DtDay
                self.var.IndustrialDemandMM = loadmap(
                    'IndustrialDemandMaps',
                    timestampflag='closest') * self.var.DtDay
                self.var.LivestockDemandMM = loadmap(
                    'LivestockDemandMaps',
                    timestampflag='closest') * self.var.DtDay
                self.var.EnergyDemandMM = loadmap(
                    'EnergyDemandMaps',
                    timestampflag='closest') * self.var.DtDay

            # Check consistency with the reference calendar that is read from the precipitation forcing file (global_modules.zusatz.optionBinding)
            if option['TransientWaterDemandChange'] and option[
                    'readNetcdfStack']:
                for k in ('DomesticDemandMaps', 'IndustrialDemandMaps',
                          'LivestockDemandMaps', 'EnergyDemandMaps'):
                    with Dataset(binding[k] + '.nc') as nc:
                        cal_type = get_calendar_type(nc)
                        if cal_type != binding['calendar_type']:
                            warnings.warn(
                                calendar_inconsistency_warning(
                                    binding[k], cal_type,
                                    binding['calendar_type']))

            if option['groundwaterSmooth']:
                self.var.GroundwaterBodiesPcr = decompress(
                    self.var.GroundwaterBodies)
                self.var.groundwaterCatch = boolean(
                    decompress((self.var.GroundwaterBodies *
                                self.var.Catchments).astype(int)))
                # nominal(scalar(GroundwaterBodies)*scalar(self.var.Catchments));
                # smoothing for groundwater to correct error by using windowtotal, based on groundwater bodies and catchments
                self.var.LZSmoothRange = loadmap('LZSmoothRange')

            if option['wateruseRegion']:
                WUseRegion = nominal(loadmap('WUseRegion', pcr=True))
                pitWuse1 = ifthen(self.var.AtLastPoint != 0, boolean(1))
                pitWuse1b = ifthen(defined(pitWuse1), WUseRegion)
                # use every existing pit in the Ldd and number them by the water regions
                # coastal water regions can have more than one pit per water region

                pitWuseMax = areamaximum(self.var.UpArea, WUseRegion)
                pitWuse2 = ifthen(pitWuseMax == self.var.UpArea, WUseRegion)
                # search outlets in the inland water regions by using the maximum  upstream area as criterium

                pitWuse3 = downstream(self.var.LddStructuresKinematic,
                                      WUseRegion)
                pitWuse3b = ifthen(pitWuse3 != WUseRegion, WUseRegion)
                # search point where ldd leaves a water region

                pitWuse = cover(pitWuse1b, pitWuse2, pitWuse3b, nominal(0))
                # join all sources of pits

                LddWaterRegion = lddrepair(
                    ifthenelse(pitWuse == 0, self.var.LddStructuresKinematic,
                               5))
                # create a Ldd with pits at every water region outlet
                # this results in a interrupted ldd, so water cannot be transfered to the next water region
                lddC = compressArray(LddWaterRegion)
                inAr = decompress(
                    np.arange(maskinfo.info.mapC[0], dtype="int32"))
                # giving a number to each non missing pixel as id
                self.var.downWRegion = (compressArray(
                    downstream(LddWaterRegion, inAr))).astype(np.int32)
                # each upstream pixel gets the id of the downstream pixel
                self.var.downWRegion[lddC == 5] = maskinfo.info.mapC[0]
                # all pits gets a high number

                # ************************************************************
                # ***** OUTFLOW AND INFLOW POINTS FOR WATER REGIONS **********
                # ************************************************************

                self.var.WaterRegionOutflowPoints = ifthen(
                    pitWuse != 0, boolean(1))
                # outflowpoints to calculate upstream inflow for balances and Water Exploitation Index
                # both inland outflowpoints to downstream subbasin, and coastal outlets

                WaterRegionInflow1 = boolean(
                    upstream(
                        self.var.LddStructuresKinematic,
                        cover(scalar(self.var.WaterRegionOutflowPoints), 0)))
                self.var.WaterRegionInflowPoints = ifthen(
                    WaterRegionInflow1, boolean(1))
                # inflowpoints to calculate upstream inflow for balances and Water Exploitation Index
            else:
                self.var.downWRegion = self.var.downstruct.copy()
                self.var.downWRegion = self.var.downWRegion.astype(np.int32)

            # ************************************************************
            # ***** Initialising cumulative output variables *************
            # ************************************************************

            # These are all needed to compute the cumulative mass balance error
            self.var.wateruseCum = maskinfo.in_zero()
            # water use cumulated amount
            self.var.WUseAddM3Dt = maskinfo.in_zero()
            self.var.WUseAddM3 = maskinfo.in_zero()

            self.var.IrriLossCUM = maskinfo.in_zero()
            # Cumulative irrigation loss [mm]
            # Cumulative abstraction from surface water [mm]

            self.var.TotalAbstractionFromSurfaceWaterM3 = maskinfo.in_zero()
            self.var.TotalAbstractionFromGroundwaterM3 = maskinfo.in_zero()
            self.var.TotalIrrigationAbstractionM3 = maskinfo.in_zero()
            self.var.TotalPaddyRiceIrrigationAbstractionM3 = maskinfo.in_zero()
            self.var.TotalLivestockAbstractionM3 = maskinfo.in_zero()

            self.var.IrrigationType = loadmap('IrrigationType')
            self.var.IrrigationEfficiency = loadmap('IrrigationEfficiency')
            self.var.ConveyanceEfficiency = loadmap('ConveyanceEfficiency')

            self.var.GroundwaterRegionPixels = np.take(
                np.bincount(self.var.WUseRegionC,
                            weights=self.var.GroundwaterBodies),
                self.var.WUseRegionC)
            self.var.AllRegionPixels = np.take(
                np.bincount(self.var.WUseRegionC,
                            weights=self.var.GroundwaterBodies * 0.0 + 1.0),
                self.var.WUseRegionC)
            self.var.RatioGroundWaterUse = self.var.AllRegionPixels / (
                self.var.GroundwaterRegionPixels + 0.01)
            self.var.FractionGroundwaterUsed = np.minimum(
                self.var.FractionGroundwaterUsed *
                self.var.RatioGroundWaterUse,
                1 - self.var.FractionNonConventionalWaterUsed)
            # FractionGroundwaterUsed is a percentage given at national scale
            # since the water needs to come from the GroundwaterBodies pixels,
            # the fraction needs correction for the non-Groundwaterbodies; this is done here
            self.var.EFlowIndicator = maskinfo.in_zero()
            self.var.ReservoirAbstractionM3 = maskinfo.in_zero()
            self.var.PotentialSurfaceWaterAvailabilityForIrrigationM3 = maskinfo.in_zero(
            )
            self.var.LakeAbstractionM3 = maskinfo.in_zero()
            self.var.FractionAbstractedFromChannels = maskinfo.in_zero()
            self.var.AreatotalIrrigationUseM3 = maskinfo.in_zero()
            self.var.totalAddM3 = maskinfo.in_zero()
            self.var.TotalDemandM3 = maskinfo.in_zero()
예제 #43
0
            if os.path.basename(pcraster_file).startswith(return_period):
                selected_pcraster_file = pcraster_file
                map_for_this_return_period = pcr.readmap(
                    selected_pcraster_file)
        else:
            if return_period in pcraster_file:
                selected_pcraster_file = pcraster_file
                map_for_this_return_period = pcr.readmap(
                    selected_pcraster_file)

    print selected_pcraster_file
    map_for_this_return_period = pcr.cover(map_for_this_return_period, 0.0)

    if i_return_period > 0:
        check_map = pcr.ifthenelse(map_for_this_return_period >= previous_map,
                                   pcr.scalar(0.0), pcr.scalar(-1.0))

        minimum_value, maximum_value, average_value = vos.getMinMaxMean(
            check_map)

        msg = ""
        msg += "\n"
        msg += "\n"
        msg += "Checkting that the values in the file %s are equal to or bigger than the file %s : Min %f Max %f Mean %f" % (
            selected_pcraster_file, previous_file, minimum_value,
            maximum_value, average_value)
        msg += "\n"
        msg += "\n"
        print(msg)

    previous_map = map_for_this_return_period
 def setGapFraction(self, gapFraction):
     self.gapFraction = pcr.scalar(gapFraction)
예제 #45
0
    def __init__(self, iniItems,landmask,spinUp):
        object.__init__(self)
        
        self.cloneMap = iniItems.cloneMap
        self.tmpDir = iniItems.tmpDir
        self.inputDir = iniItems.globalOptions['inputDir']
        self.landmask = landmask

        # option to activate water balance check
        self.debugWaterBalance = True
        if iniItems.routingOptions['debugWaterBalance'] == "False":
            self.debugWaterBalance = False

        if iniItems.groundwaterOptions['groundwaterPropertiesNC'] == str(None):
            # assign the recession coefficient parameter(s)
            self.recessionCoeff = vos.readPCRmapClone(\
               iniItems.groundwaterOptions['recessionCoeff'],
               self.cloneMap,self.tmpDir,self.inputDir)
        else:       
            groundwaterPropertiesNC = vos.getFullPath(\
                                      iniItems.groundwaterOptions[\
                                         'groundwaterPropertiesNC'],
                                          self.inputDir)
            self.recessionCoeff = vos.netcdf2PCRobjCloneWithoutTime(\
                                  groundwaterPropertiesNC,'recessionCoeff',\
                                  cloneMapFileName = self.cloneMap)

        # groundwater recession coefficient (day-1)
        self.recessionCoeff = pcr.cover(self.recessionCoeff,0.00)       
        self.recessionCoeff = pcr.min(1.0000,self.recessionCoeff)       
        #
        if 'minRecessionCoeff' in iniItems.groundwaterOptions.keys():
            minRecessionCoeff = float(iniItems.groundwaterOptions['minRecessionCoeff'])
        else:
            minRecessionCoeff = 1.0e-4                                       # This is the minimum value used in Van Beek et al. (2011). 
        self.recessionCoeff = pcr.max(minRecessionCoeff,self.recessionCoeff)      
        
        if iniItems.groundwaterOptions['groundwaterPropertiesNC'] == str(None):
            # assign aquifer specific yield
            self.specificYield  = vos.readPCRmapClone(\
               iniItems.groundwaterOptions['specificYield'],
               self.cloneMap,self.tmpDir,self.inputDir)
        else:       
            self.specificYield = vos.netcdf2PCRobjCloneWithoutTime(\
                                 groundwaterPropertiesNC,'specificYield',\
                                 cloneMapFileName = self.cloneMap)

        self.specificYield  = pcr.cover(self.specificYield,0.0)       
        self.specificYield  = pcr.max(0.010,self.specificYield)         # TODO: TO BE CHECKED: The resample process of specificYield     
        self.specificYield  = pcr.min(1.000,self.specificYield)       

        if iniItems.groundwaterOptions['groundwaterPropertiesNC'] == str(None):
            # assign aquifer saturated conductivity
            self.kSatAquifer = vos.readPCRmapClone(\
               iniItems.groundwaterOptions['kSatAquifer'],
               self.cloneMap,self.tmpDir,self.inputDir)
        else:       
            self.kSatAquifer = vos.netcdf2PCRobjCloneWithoutTime(\
                               groundwaterPropertiesNC,'kSatAquifer',\
                               cloneMapFileName = self.cloneMap)

        self.kSatAquifer = pcr.cover(self.kSatAquifer,0.0)       
        self.kSatAquifer = pcr.max(0.010,self.kSatAquifer)       

        # limitAbstraction options
        self.limitAbstraction = False
        if iniItems.landSurfaceOptions['limitAbstraction'] == "True": self.limitAbstraction = True
        

        # option for limitting regional groundwater abstractions
        if iniItems.groundwaterOptions['pumpingCapacityNC'] != "None":

            logger.info('Limit for annual regional groundwater abstraction is used.')
            self.limitRegionalAnnualGroundwaterAbstraction = True
            self.pumpingCapacityNC = vos.getFullPath(\
                                     iniItems.groundwaterOptions['pumpingCapacityNC'],self.inputDir,False)
        else:
            logger.warning('NO LIMIT for regional groundwater (annual) pumping. It may result too high groundwater abstraction.')
            self.limitRegionalAnnualGroundwaterAbstraction = False
        
        # option for limitting fossil groundwater abstractions: 
        self.limitFossilGroundwaterAbstraction = False
        #
        # estimate of fossil groundwater capacity:
        if iniItems.groundwaterOptions['limitFossilGroundWaterAbstraction'] == "True": 

            logger.info('Fossil groundwater abstractions are allowed with LIMIT.')
            self.limitFossilGroundwaterAbstraction = True

            # estimate of thickness (unit: m) of accesible groundwater: shallow and deep 
            totalGroundwaterThickness = vos.readPCRmapClone(\
                                        iniItems.groundwaterOptions['estimateOfTotalGroundwaterThickness'],
                                        self.cloneMap,self.tmpDir,self.inputDir)
            # extrapolation 
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                        pcr.windowaverage(totalGroundwaterThickness, 1.0))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                        pcr.windowaverage(totalGroundwaterThickness, 1.5))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                        pcr.windowaverage(totalGroundwaterThickness, 2.5))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                        pcr.windowaverage(totalGroundwaterThickness, 5.0))
            #
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness, 0.0)
            #
            # set minimum thickness
            minimumThickness = pcr.scalar(float(\
                               iniItems.groundwaterOptions['minimumTotalGroundwaterThickness']))
            totalGroundwaterThickness = pcr.max(minimumThickness, totalGroundwaterThickness)
            #            
            # estimate of capacity (unit: m) of renewable groundwater (shallow)
            storGroundwaterCap =  pcr.cover(
                                  vos.readPCRmapClone(\
                                  iniItems.groundwaterOptions['estimateOfRenewableGroundwaterCapacity'],
                                  self.cloneMap,self.tmpDir,self.inputDir), 0.0)
            #
            # fossil groundwater capacity (unit: m)
            self.fossilWaterCap = pcr.ifthen(self.landmask,\
                                  pcr.max(0.0,\
                                  totalGroundwaterThickness*self.specificYield - storGroundwaterCap))

        # zones at which groundwater allocations are determined
        self.usingAllocSegments = False
        if iniItems.landSurfaceOptions['allocationSegmentsForGroundSurfaceWater']  != "None": self.usingAllocSegments = True
        
        # incorporating groundwater distribution network:
        if self.usingAllocSegments:

            self.allocSegments = vos.readPCRmapClone(\
             iniItems.landSurfaceOptions['allocationSegmentsForGroundSurfaceWater'],
             self.cloneMap,self.tmpDir,self.inputDir,isLddMap=False,cover=None,isNomMap=True)
            self.allocSegments = pcr.ifthen(self.landmask, self.allocSegments)

            cellArea = vos.readPCRmapClone(\
              iniItems.routingOptions['cellAreaMap'],
              self.cloneMap,self.tmpDir,self.inputDir)
            cellArea = pcr.ifthen(self.landmask, cellArea)              # TODO: integrate this one with the one coming from the routing module

            self.segmentArea = pcr.areatotal(pcr.cover(cellArea, 0.0), self.allocSegments)
            self.segmentArea = pcr.ifthen(self.landmask, self.segmentArea)
        
        # get initial conditions
        self.getICs(iniItems,spinUp)

        # initiate old style reporting                                  # TODO: remove this!
        self.initiate_old_style_groundwater_reporting(iniItems)
 def setMaximumStore(self, maximumStore):
     self.maximumStore = pcr.scalar(maximumStore)
     self.store = pcr.min(self.store, self.maximumStore)
예제 #47
0
    def old_style_groundwater_reporting(self,currTimeStep):

        if self.report == True:
            timeStamp = datetime.datetime(currTimeStep.year,\
                                          currTimeStep.month,\
                                          currTimeStep.day,\
                                          0)
            # writing daily output to netcdf files
            timestepPCR = currTimeStep.timeStepPCR
            if self.outDailyTotNC[0] != "None":
                for var in self.outDailyTotNC:
                    self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_dailyTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,timestepPCR-1)

            # writing monthly output to netcdf files
            # -cummulative
            if self.outMonthTotNC[0] != "None":
                for var in self.outMonthTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.day == 1:\
                       vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var+'MonthTot'] += vars(self)[var]

                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True: 
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthTot'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            # -average
            if self.outMonthAvgNC[0] != "None":
                for var in self.outMonthAvgNC:
                    # only if a accumulator variable has not been defined: 
                    if var not in self.outMonthTotNC: 

                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the month
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.day == 1:\
                           vars(self)[var+'MonthTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var+'MonthTot'] += vars(self)[var]

                    # calculating average & reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        vars(self)[var+'MonthAvg'] = vars(self)[var+'MonthTot']/\
                                                     currTimeStep.day  
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthAvg'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            #
            # -last day of the month
            if self.outMonthEndNC[0] != "None":
                for var in self.outMonthEndNC:
                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True: 
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.monthIdx-1)

            # writing yearly output to netcdf files
            # -cummulative
            if self.outAnnuaTotNC[0] != "None":
                for var in self.outAnnuaTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.doy == 1:\
                       vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var+'AnnuaTot'] += vars(self)[var]

                    # reporting at the end of the year:
                    if currTimeStep.endYear == True: 
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaTot'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            # -average
            if self.outAnnuaAvgNC[0] != "None":
                for var in self.outAnnuaAvgNC:
                    # only if a accumulator variable has not been defined: 
                    if var not in self.outAnnuaTotNC: 
                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the year
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.doy == 1:\
                           vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var+'AnnuaTot'] += vars(self)[var]
                    #
                    # calculating average & reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        vars(self)[var+'AnnuaAvg'] = vars(self)[var+'AnnuaTot']/\
                                                     currTimeStep.doy  
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaAvg'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            #
            # -last day of the year
            if self.outAnnuaEndNC[0] != "None":
                for var in self.outAnnuaEndNC:
                    # reporting at the end of the year:
                    if currTimeStep.endYear == True: 
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.annuaIdx-1)
예제 #48
0
    def __init__(self, iniItems, landmask):
        object.__init__(self)
        
        # cloneMap, temporary directory for the resample process, temporary directory for the modflow process, absolute path for input directory, landmask
        self.cloneMap        = iniItems.cloneMap
        self.tmpDir          = iniItems.tmpDir
        self.tmp_modflow_dir = iniItems.tmp_modflow_dir
        self.inputDir        = iniItems.globalOptions['inputDir']
        self.landmask        = landmask
        
        # configuration from the ini file
        self.iniItems = iniItems
                
        # topography properties: read several variables from the netcdf file
        for var in ['dem_minimum','dem_maximum','dem_average','dem_standard_deviation',\
                    'slopeLength','orographyBeta','tanslope',\
                    'dzRel0000','dzRel0001','dzRel0005',\
                    'dzRel0010','dzRel0020','dzRel0030','dzRel0040','dzRel0050',\
                    'dzRel0060','dzRel0070','dzRel0080','dzRel0090','dzRel0100']:
            vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['topographyNC'], \
                                                                var, self.cloneMap)
            vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        # channel properties: read several variables from the netcdf file
        for var in ['lddMap','cellAreaMap','gradient','bankfull_width',
                    'bankfull_depth','dem_floodplain','dem_riverbed']:
            vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['channelNC'], \
                                                                var, self.cloneMap)
            vars(self)[var] = pcr.cover(vars(self)[var], 0.0)
        
        # minimum channel width
        minimum_channel_width = 0.5                                               # TODO: Define this one in the configuration file
        self.bankfull_width = pcr.max(minimum_channel_width, self.bankfull_width)
        
        #~ # cell fraction if channel water reaching the flood plan               # NOT USED YET 
        #~ self.flood_plain_fraction = self.return_innundation_fraction(pcr.max(0.0, self.dem_floodplain - self.dem_minimum))
        
        # coefficient of Manning
        self.manningsN = vos.readPCRmapClone(self.iniItems.modflowParameterOptions['manningsN'],\
                                             self.cloneMap,self.tmpDir,self.inputDir)
        
        # minimum channel gradient
        minGradient   = 0.00005                                                   # TODO: Define this one in the configuration file
        self.gradient = pcr.max(minGradient, pcr.cover(self.gradient, minGradient))

        # correcting lddMap
        self.lddMap = pcr.ifthen(pcr.scalar(self.lddMap) > 0.0, self.lddMap)
        self.lddMap = pcr.lddrepair(pcr.ldd(self.lddMap))
        
        # channelLength = approximation of channel length (unit: m)  # This is approximated by cell diagonal. 
        cellSizeInArcMin      = np.round(pcr.clone().cellSize()*60.)               # FIXME: This one will not work if you use the resolution: 0.5, 1.5, 2.5 arc-min
        verticalSizeInMeter   = cellSizeInArcMin*1852.                            
        horizontalSizeInMeter = self.cellAreaMap/verticalSizeInMeter
        self.channelLength    = ((horizontalSizeInMeter)**(2)+\
                                 (verticalSizeInMeter)**(2))**(0.5)
        
        # option for lakes and reservoir
        self.onlyNaturalWaterBodies = False
        if self.iniItems.modflowParameterOptions['onlyNaturalWaterBodies'] == "True": self.onlyNaturalWaterBodies = True

        # groundwater linear recession coefficient (day-1) ; the linear reservoir concept is still being used to represent fast response flow  
        #                                                                                                                  particularly from karstic aquifer in mountainous regions                    
        self.recessionCoeff = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                                 'recessionCoeff', self.cloneMap)
        self.recessionCoeff = pcr.cover(self.recessionCoeff,0.00)       
        self.recessionCoeff = pcr.min(1.0000,self.recessionCoeff)       
        #
        if 'minRecessionCoeff' in iniItems.modflowParameterOptions.keys():
            minRecessionCoeff = float(iniItems.modflowParameterOptions['minRecessionCoeff'])
        else:
            minRecessionCoeff = 1.0e-4                                       # This is the minimum value used in Van Beek et al. (2011). 
        self.recessionCoeff = pcr.max(minRecessionCoeff,self.recessionCoeff)      
        
        # aquifer saturated conductivity (m/day)
        self.kSatAquifer = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                             'kSatAquifer', self.cloneMap)
        self.kSatAquifer = pcr.cover(self.kSatAquifer,pcr.mapmaximum(self.kSatAquifer))       
        self.kSatAquifer = pcr.max(0.001,self.kSatAquifer)
        # TODO: Define the minimum value as part of the configuration file
        
        # aquifer specific yield (dimensionless)
        self.specificYield = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['groundwaterPropertiesNC'],\
                                                               'specificYield', self.cloneMap)
        self.specificYield = pcr.cover(self.specificYield,pcr.mapmaximum(self.specificYield))       
        self.specificYield = pcr.max(0.010,self.specificYield)         # TODO: TO BE CHECKED: The resample process of specificYield     
        self.specificYield = pcr.min(1.000,self.specificYield)       
        # TODO: Define the minimum value as part of the configuration file

        # estimate of thickness (unit: m) of accesible groundwater 
        totalGroundwaterThickness = vos.netcdf2PCRobjCloneWithoutTime(self.iniItems.modflowParameterOptions['estimateOfTotalGroundwaterThicknessNC'],\
                                    'thickness', self.cloneMap)
        # extrapolation 
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,\
                                    pcr.windowaverage(totalGroundwaterThickness, 1.0))
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,\
                                    pcr.windowaverage(totalGroundwaterThickness, 1.5))
        totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness, 0.0)
        #
        # set minimum thickness
        minimumThickness = pcr.scalar(float(\
                           self.iniItems.modflowParameterOptions['minimumTotalGroundwaterThickness']))
        totalGroundwaterThickness = pcr.max(minimumThickness, totalGroundwaterThickness)
        #
        # set maximum thickness: 250 m.   # TODO: Define this one as part of the ini file
        maximumThickness = 250.
        self.totalGroundwaterThickness = pcr.min(maximumThickness, totalGroundwaterThickness)
        # TODO: Define the maximum value as part of the configuration file

        # surface water bed thickness  (unit: m)
        bed_thickness  = 0.1              # TODO: Define this as part of the configuration file
        # surface water bed resistance (unit: day)
        bed_resistance = bed_thickness / (self.kSatAquifer) 
        minimum_bed_resistance = 1.0      # TODO: Define this as part of the configuration file
        self.bed_resistance = pcr.max(minimum_bed_resistance,\
                                              bed_resistance,)
        
        # option to ignore capillary rise
        self.ignoreCapRise = True
        if self.iniItems.modflowParameterOptions['ignoreCapRise'] == "False": self.ignoreCapRise = False
        
        # a variable to indicate if the modflow has been called or not
        self.modflow_has_been_called = False
        
        # list of the convergence criteria for HCLOSE (unit: m)
        # - Deltares default's value is 0.001 m                         # check this value with Jarno
        self.criteria_HCLOSE = [0.001, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]  
        self.criteria_HCLOSE = sorted(self.criteria_HCLOSE)
        
        # list of the convergence criteria for RCLOSE (unit: m3)
        # - Deltares default's value for their 25 and 250 m resolution models is 10 m3  # check this value with Jarno
        cell_area_assumption = verticalSizeInMeter * float(pcr.cellvalue(pcr.mapmaximum(horizontalSizeInMeter),1)[0])
        self.criteria_RCLOSE = [10., 10.* cell_area_assumption/(250.*250.), 10.* cell_area_assumption/(25.*25.)]
        self.criteria_RCLOSE = sorted(self.criteria_RCLOSE)

        # initiate the index for HCLOSE and RCLOSE
        self.iteration_HCLOSE = 0
        self.iteration_RCLOSE = 0
        
        # initiate old style reporting                                  # TODO: remove this!
        self.initiate_old_style_groundwater_reporting(iniItems)
예제 #49
0
    def read_forcings(self, currTimeStep):

        #-----------------------------------------------------------------------
        # NOTE: RvB 13/07/2016 hard-coded reference to the variable names
        # preciptiation, temperature and evapotranspiration have been replaced
        # by the variable names used in the netCDF and passed from the ini file
        #-----------------------------------------------------------------------

        # method for finding time indexes in the precipitation netdf file:
        # - the default one
        method_for_time_index = None
        # - based on the ini/configuration file (if given)
        if 'time_index_method_for_precipitation_netcdf' in list(self.iniItems.meteoOptions.keys()) and\
                                                           self.iniItems.meteoOptions['time_index_method_for_precipitation_netcdf'] != "None":
            method_for_time_index = self.iniItems.meteoOptions[
                'time_index_method_for_precipitation_netcdf']

        # reading precipitation:
        if self.precipitation_set_per_year:
            #~ print currTimeStep.year
            nc_file_per_year = self.preFileNC % (float(
                currTimeStep.year), float(currTimeStep.year))
            self.precipitation = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, self.preVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
        else:
            self.precipitation = vos.netcdf2PCRobjClone(\
                                      self.preFileNC, self.preVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)

        #-----------------------------------------------------------------------
        # NOTE: RvB 13/07/2016 added to automatically update precipitation
        self.precipitation = self.preConst + self.preFactor * pcr.ifthen(
            self.landmask, self.precipitation)
        #-----------------------------------------------------------------------

        # make sure that precipitation is always positive
        self.precipitation = pcr.max(0., self.precipitation)
        self.precipitation = pcr.cover(self.precipitation, 0.0)

        # ignore very small values of precipitation (less than 0.00001 m/day or less than 0.01 kg.m-2.day-1 )
        if self.usingDailyTimeStepForcingData:
            self.precipitation = pcr.rounddown(
                self.precipitation * 100000.) / 100000.

        # method for finding time index in the temperature netdf file:
        # - the default one
        method_for_time_index = None
        # - based on the ini/configuration file (if given)
        if 'time_index_method_for_temperature_netcdf' in list(self.iniItems.meteoOptions.keys()) and\
                                                         self.iniItems.meteoOptions['time_index_method_for_temperature_netcdf'] != "None":
            method_for_time_index = self.iniItems.meteoOptions[
                'time_index_method_for_temperature_netcdf']

        # reading temperature
        if self.temperature_set_per_year:
            nc_file_per_year = self.tmpFileNC % (int(
                currTimeStep.year), int(currTimeStep.year))
            self.temperature = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, self.tmpVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
        else:
            self.temperature = vos.netcdf2PCRobjClone(\
                                 self.tmpFileNC,self.tmpVarName,\
                                 str(currTimeStep.fulldate),
                                 useDoy = method_for_time_index,
                                 cloneMapFileName=self.cloneMap,\
                                 LatitudeLongitude = True)

        #-----------------------------------------------------------------------
        # NOTE: RvB 13/07/2016 added to automatically update temperature
        self.temperature = self.tmpConst + self.tmpFactor * pcr.ifthen(
            self.landmask, self.temperature)
        #-----------------------------------------------------------------------

        # Downscaling precipitation and temperature
        if self.downscalePrecipitationOption:
            self.downscalePrecipitation(currTimeStep)
        if self.downscaleTemperatureOption:
            self.downscaleTemperature(currTimeStep)

        # calculate or obtain referencePotET
        if self.refETPotMethod == 'Hamon':            self.referencePotET = \
   refPotET.HamonPotET(self.temperature,\
                       currTimeStep.doy,\
                       self.latitudes)
        if self.refETPotMethod == 'Input':

            # method for finding time indexes in the precipitation netdf file:
            # - the default one
            method_for_time_index = None
            # - based on the ini/configuration file (if given)
            if 'time_index_method_for_ref_pot_et_netcdf' in list(self.iniItems.meteoOptions.keys()) and\
                                                            self.iniItems.meteoOptions['time_index_method_for_ref_pot_et_netcdf'] != "None":
                method_for_time_index = self.iniItems.meteoOptions[
                    'time_index_method_for_ref_pot_et_netcdf']

            if self.refETPotFileNC_set_per_year:
                nc_file_per_year = self.etpFileNC % (int(
                    currTimeStep.year), int(currTimeStep.year))
                self.referencePotET = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, self.refETPotVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
            else:
                self.referencePotET = vos.netcdf2PCRobjClone(\
                                      self.etpFileNC,self.refETPotVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName=self.cloneMap,\
                                      LatitudeLongitude = True)
            #-----------------------------------------------------------------------
            # NOTE: RvB 13/07/2016 added to automatically update reference potential evapotranspiration
            self.referencePotET = self.refETPotConst + self.refETPotFactor * pcr.ifthen(
                self.landmask, self.referencePotET)
            #-----------------------------------------------------------------------

        # Downscaling referenceETPot (based on temperature)
        if self.downscaleReferenceETPotOption: self.downscaleReferenceETPot()

        # smoothing:
        if self.forcingSmoothing == True:
            logger.debug("Forcing data are smoothed.")
            self.precipitation = pcr.windowaverage(self.precipitation,
                                                   self.smoothingWindowsLength)
            self.temperature = pcr.windowaverage(self.temperature,
                                                 self.smoothingWindowsLength)
            self.referencePotET = pcr.windowaverage(
                self.referencePotET, self.smoothingWindowsLength)

        # rounding temperature values to minimize numerical errors (note only to minimize, not remove)
        self.temperature = pcr.roundoff(self.temperature * 1000.) / 1000.

        # ignore snow by setting temperature to 25 deg C
        if self.ignore_snow: self.temperature = pcr.spatial(pcr.scalar(25.))

        # define precipitation, temperature and referencePotET ONLY at landmask area (for reporting):
        self.precipitation = pcr.ifthen(self.landmask, self.precipitation)
        self.temperature = pcr.ifthen(self.landmask, self.temperature)
        self.referencePotET = pcr.ifthen(self.landmask, self.referencePotET)

        # make sure precipitation and referencePotET are always positive:
        self.precipitation = pcr.max(0.0, self.precipitation)
        self.referencePotET = pcr.max(0.0, self.referencePotET)

        if self.report == True:
            timeStamp = datetime.datetime(currTimeStep.year,\
                                          currTimeStep.month,\
                                          currTimeStep.day,\
                                          0)
            # writing daily output to netcdf files
            timestepPCR = currTimeStep.timeStepPCR
            if self.outDailyTotNC[0] != "None":
                for var in self.outDailyTotNC:
                    self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_dailyTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,timestepPCR-1)

            # writing monthly output to netcdf files
            # -cummulative
            if self.outMonthTotNC[0] != "None":
                for var in self.outMonthTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.day == 1:                        \
                                               vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthTot'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            # -average
            if self.outMonthAvgNC[0] != "None":
                for var in self.outMonthAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outMonthTotNC:

                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the month
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.day == 1:                            \
                                                       vars(self)[var+'MonthTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # calculating average & reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        vars(self)[var+'MonthAvg'] = vars(self)[var+'MonthTot']/\
                                                     currTimeStep.day
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthAvg'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            #
            # -last day of the month
            if self.outMonthEndNC[0] != "None":
                for var in self.outMonthEndNC:
                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.monthIdx-1)

            # writing yearly output to netcdf files
            # -cummulative
            if self.outAnnuaTotNC[0] != "None":
                for var in self.outAnnuaTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.doy == 1:                        \
                                               vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'AnnuaTot'] += vars(self)[var]

                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaTot'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            # -average
            if self.outAnnuaAvgNC[0] != "None":
                for var in self.outAnnuaAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outAnnuaTotNC:
                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the year
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.doy == 1:                            \
                                                       vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'AnnuaTot'] += vars(self)[var]
                    #
                    # calculating average & reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        vars(self)[var+'AnnuaAvg'] = vars(self)[var+'AnnuaTot']/\
                                                     currTimeStep.doy
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaAvg'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            #
            # -last day of the year
            if self.outAnnuaEndNC[0] != "None":
                for var in self.outAnnuaEndNC:
                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.annuaIdx-1)
예제 #50
0
    def modflow_simulation(self,\
                           simulation_type,\
                           initial_head,\
                           currTimeStep = None,\
                           PERLEN = 1.0, 
                           NSTP   = 1, \
                           HCLOSE = 1.0,\
                           RCLOSE = 10.* 400.*400.,\
                           MXITER = 50,\
                           ITERI = 30,\
                           NPCOND = 1,\
                           RELAX = 1.00,\
                           NBPOL = 2,\
                           DAMP = 1,\
                           ITMUNI = 4, LENUNI = 2, TSMULT = 1.0):
        
        # initiate pcraster modflow object if modflow is not called yet:
        if self.modflow_has_been_called == False or self.modflow_converged == False:
            self.initiate_modflow()
            self.modflow_has_been_called = True

        if simulation_type == "transient":
            logger.info("Preparing MODFLOW input for a transient simulation.")
            SSTR = 0
        if simulation_type == "steady-state":
            logger.info("Preparing MODFLOW input for a steady-state simulation.")
            SSTR = 1

        # waterBody class to define the extent of lakes and reservoirs
        #
        if simulation_type == "steady-state":
            self.WaterBodies = waterBodies.WaterBodies(self.iniItems,\
                                                       self.landmask,\
                                                       self.onlyNaturalWaterBodies)
            self.WaterBodies.getParameterFiles(date_given = self.iniItems.globalOptions['startTime'],\
                                               cellArea = self.cellAreaMap, \
                                               ldd = self.lddMap)        
        #
        if simulation_type == "transient":
            if currTimeStep.timeStepPCR == 1:
               self.WaterBodies = waterBodies.WaterBodies(self.iniItems,\
                                                          self.landmask,\
                                                          self.onlyNaturalWaterBodies)
            if currTimeStep.timeStepPCR == 1 or currTimeStep.doy == 1:
               self.WaterBodies.getParameterFiles(date_given = str(currTimeStep.fulldate),\
                                                  cellArea = self.cellAreaMap, \
                                                  ldd = self.lddMap)        

        print "here"

        # using dem_average as the initial groundwater head value 
        self.pcr_modflow.setInitialHead(initial_head, 1)
        
        # set parameter values for the DIS package and PCG solver
        self.pcr_modflow.setDISParameter(ITMUNI, LENUNI, PERLEN, NSTP, TSMULT, SSTR)
        self.pcr_modflow.setPCG(MXITER, ITERI, NPCOND, HCLOSE, RCLOSE, RELAX, NBPOL, DAMP)
        #
        # Some notes about the values  
        #
        # ITMUNI = 4     # indicates the time unit (0: undefined, 1: seconds, 2: minutes, 3: hours, 4: days, 5: years)
        # LENUNI = 2     # indicates the length unit (0: undefined, 1: feet, 2: meters, 3: centimeters)
        # PERLEN = 1.0   # duration of a stress period
        # NSTP   = 1     # number of time steps in a stress period
        # TSMULT = 1.0   # multiplier for the length of the successive iterations
        # SSTR   = 1     # 0 - transient, 1 - steady state
        #
        # MXITER = 50                 # maximum number of outer iterations           # Deltares use 50
        # ITERI  = 30                 # number of inner iterations                   # Deltares use 30
        # NPCOND = 1                  # 1 - Modified Incomplete Cholesky, 2 - Polynomial matrix conditioning method;
        # HCLOSE = 0.01               # HCLOSE (unit: m) 
        # RCLOSE = 10.* 400.*400.     # RCLOSE (unit: m3)
        # RELAX  = 1.00               # relaxation parameter used with NPCOND = 1
        # NBPOL  = 2                  # indicates whether the estimate of the upper bound on the maximum eigenvalue is 2.0 (but we don ot use it, since NPCOND = 1) 
        # DAMP   = 1                  # no damping (DAMP introduced in MODFLOW 2000)
        
        # read input files (for the steady-state condition, we use pcraster maps):
        if simulation_type == "steady-state":
            # - discharge (m3/s) from PCR-GLOBWB
            discharge = vos.readPCRmapClone(self.iniItems.modflowSteadyStateInputOptions['avgDischargeInputMap'],\
                                                self.cloneMap, self.tmpDir, self.inputDir)
            # - recharge/capillary rise (unit: m/day) from PCR-GLOBWB 
            gwRecharge = vos.readPCRmapClone(self.iniItems.modflowSteadyStateInputOptions['avgGroundwaterRechargeInputMap'],\
                                                self.cloneMap, self.tmpDir, self.inputDir)
            if self.ignoreCapRise: gwRecharge = pcr.max(0.0, gwRecharge) 
            gwAbstraction = pcr.spatial(pcr.scalar(0.0))

        # read input files (for the transient, input files are given in netcdf files):
        if simulation_type == "transient":
            # - discharge (m3/s) from PCR-GLOBWB
            discharge = vos.netcdf2PCRobjClone(self.iniItems.modflowTransientInputOptions['dischargeInputNC'],
                                               "discharge",str(currTimeStep.fulldate),None,self.cloneMap)
            # - recharge/capillary rise (unit: m/day) from PCR-GLOBWB 
            gwRecharge = vos.netcdf2PCRobjClone(self.iniItems.modflowTransientInputOptions['groundwaterRechargeInputNC'],\
                                               "groundwater_recharge",str(currTimeStep.fulldate),None,self.cloneMap)
            if self.ignoreCapRise: gwRecharge = pcr.max(0.0, gwRecharge) 
            # - groundwater abstraction (unit: m/day) from PCR-GLOBWB 
            gwAbstraction = vos.netcdf2PCRobjClone(self.iniItems.modflowTransientInputOptions['groundwaterAbstractionInputNC'],\
                                               "total_groundwater_abstraction",str(currTimeStep.fulldate),None,self.cloneMap)

        # set recharge, river, well and drain packages
        self.set_river_package(discharge, currTimeStep)
        self.set_recharge_package(gwRecharge)
        self.set_well_package(gwAbstraction)
        self.set_drain_package()
        
        # execute MODFLOW 
        logger.info("Executing MODFLOW.")
        self.pcr_modflow.run()
        
        logger.info("Check if the model whether a run has converged or not")
        self.modflow_converged = self.check_modflow_convergence()
        if self.modflow_converged == False:

            msg = "MODFLOW FAILED TO CONVERGE with HCLOSE = "+str(HCLOSE)+" and RCLOSE = "+str(RCLOSE)
            logger.info(msg)
            
            # iteration index for the RCLOSE
            self.iteration_RCLOSE += 1 
            # reset if the index has reached the length of available criteria
            if self.iteration_RCLOSE > (len(self.criteria_RCLOSE)-1): self.iteration_RCLOSE = 0     

            # iteration index for the HCLOSE
            if self.iteration_RCLOSE == 0: self.iteration_HCLOSE += 1 
            
            # we have to reset modflow as we want to change the PCG setup
            self.modflow_has_been_called = False
            
            # for the steady state simulation, we still save the calculated head as the initial estimate for the next iteration
            if simulation_type == "steady-state": self.groundwaterHead = self.pcr_modflow.getHeads(1)
            # NOTE: We cannot implement this principle for transient simulation 
            
        else:

            msg = "HURRAY!!! MODFLOW CONVERGED with HCLOSE = "+str(HCLOSE)+" and RCLOSE = "+str(RCLOSE)
            logger.info(msg)

            # reset the iteration because modflow has converged
            self.iteration_HCLOSE = 0
            self.iteration_RCLOSE = 0
            
            self.modflow_has_been_called = True
            
            # obtaining the results from modflow simulation
            self.groundwaterHead = None
            self.groundwaterHead = self.pcr_modflow.getHeads(1)  
            
            # calculate groundwater depth only in the landmask region
            self.groundwaterDepth = pcr.ifthen(self.landmask, self.dem_average - self.groundwaterHead)
예제 #51
0
def derive_HAND(dem,
                ldd,
                accuThreshold,
                rivers=None,
                basin=None,
                up_area=None,
                neg_HAND=None):
    """
    Function derives Height-Above-Nearest-Drain.
    See http://www.sciencedirect.com/science/article/pii/S003442570800120X
    Input:
        dem -- pcraster object float32, elevation data
        ldd -- pcraster object direction, local drain directions
        accuThreshold -- upstream amount of cells as threshold for river
            delineation
        rivers=None -- you can provide a rivers layer here. Pixels that are
                        identified as river should have a value > 0, other
                        pixels a value of zero.
        basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance
                        and areas with False by means of the nearest friction distance. Friction distance estimated using the
                        upstream area as weight (i.e. drains with a bigger upstream area have a lower friction)
                        the spreadzone operator is used in this case.
        up_area=None -- provide the upstream area (if not assigned a guesstimate is prepared, assuming the LDD covers a
                        full catchment area)
        neg_HAND=None -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than
                        stream (for example when there are natural embankments)
    Output:
        hand -- pcraster bject float32, height, normalised to nearest stream
        dist -- distance to nearest stream measured in cell lengths
            according to D8 directions
    """
    if rivers is None:
        # prepare stream from a strahler threshold
        stream = pcr.ifthenelse(
            pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1),
            pcr.boolean(0))
    else:
        # convert stream network to boolean
        stream = pcr.boolean(pcr.cover(rivers, 0))
    # determine height in river (in DEM*100 unit as ordinal)
    height_river = pcr.ifthenelse(stream, pcr.ordinal(dem * 100), 0)
    if basin is None:
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
    else:
        # use basin to allocate areas outside basin to the nearest stream. Nearest is weighted by upstream area
        if up_area is None:
            up_area = pcr.accuflux(ldd, 1)
        up_area = pcr.ifthen(stream, up_area)  # mask areas outside streams
        friction = 1. / pcr.scalar(
            pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0))
        # if basin, use nearest river within subcatchment, if outside basin, use weighted-nearest river
        up_elevation = pcr.ifthenelse(
            basin, pcr.scalar(pcr.subcatchment(ldd, height_river)),
            pcr.scalar(pcr.spreadzone(height_river, 0, friction)))
        # replace areas outside of basin by a spread zone calculation.
    # make negative HANDS also possible
    if neg_HAND == 1:
        hand = (pcr.scalar(pcr.ordinal(dem * 100)) -
                up_elevation) / 100  # convert back to float in DEM units
    else:
        hand = pcr.max(pcr.scalar(pcr.ordinal(dem * 100)) - up_elevation,
                       0) / 100  # convert back to float in DEM units
    dist = pcr.ldddist(ldd, stream, 1)  # compute horizontal distance estimate
    return hand, dist
예제 #52
0
    def set_river_package(self, discharge, currTimeStep):

        logger.info("Set the river package.")
        
        # - surface water river bed/bottom elevation and conductance 
        need_to_define_surface_water_bed = False
        if currTimeStep == None:
            # this is for a steady state simulation (no currTimeStep define)
            need_to_define_surface_water_bed = True
        else:    
            # only at the first month of the model simulation or the first month of the year
            if self.firstMonthOfSimulation or currTimeStep.month == 1:
                need_to_define_surface_water_bed = True
                self.firstMonthOfSimulation = False          # This part becomes False as we don't need it anymore. 

        if need_to_define_surface_water_bed:

            logger.info("Estimating the surface water bed elevation and surface water bed conductance.")
        
            #~ # - for lakes and resevoirs, alternative 1: make the bottom elevation deep --- Shall we do this? 
            #~ additional_depth = 500.
            #~ surface_water_bed_elevation = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
                                                     #~ self.dem_riverbed - additional_depth)
            #
            # - for lakes and resevoirs, estimate bed elevation from dem and bankfull depth
            surface_water_bed_elevation  = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, self.dem_average)
            surface_water_bed_elevation  = pcr.areaaverage(surface_water_bed_elevation, self.WaterBodies.waterBodyIds)
            surface_water_bed_elevation -= pcr.areamaximum(self.bankfull_depth, self.WaterBodies.waterBodyIds) 
            #
            surface_water_bed_elevation  = pcr.cover(surface_water_bed_elevation, self.dem_riverbed)
            #~ surface_water_bed_elevation = self.dem_riverbed # This is an alternative, if we do not want to introduce very deep bottom elevations of lakes and/or reservoirs.   
            #
            # rounding values for surface_water_bed_elevation
            self.surface_water_bed_elevation = pcr.roundup(surface_water_bed_elevation * 1000.)/1000.
            #
            # - river bed condutance (unit: m2/day)
            bed_surface_area = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
                                                     self.WaterBodies.fracWat * self.cellAreaMap)   # TODO: Incorporate the concept of dynamicFracWat # I have problem with the convergence if I use this one. 
            bed_surface_area = pcr.min(bed_surface_area,\
                               pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
                                          pcr.areaaverage(self.bankfull_width * self.channelLength, self.WaterBodies.waterBodyIds)))
            bed_surface_area = pcr.cover(bed_surface_area, \
                                         self.bankfull_width * self.channelLength)
            #~ bed_surface_area = self.bankfull_width * self.channelLength
            bed_conductance = (1.0/self.bed_resistance) * bed_surface_area
            bed_conductance = pcr.ifthenelse(bed_conductance < 1e-20, 0.0, \
                                             bed_conductance) 
            self.bed_conductance = pcr.cover(bed_conductance, 0.0)
             

            logger.info("Estimating outlet widths of lakes and/or reservoirs.")
            # - 'channel width' for lakes and reservoirs 
            channel_width = pcr.areamaximum(self.bankfull_width, self.WaterBodies.waterBodyIds)
            self.channel_width = pcr.cover(channel_width, self.bankfull_width)
        

        logger.info("Estimating surface water elevation.")
        
        # - convert discharge value to surface water elevation (m)
        river_water_height = (self.channel_width**(-3/5)) * (discharge**(3/5)) * ((self.gradient)**(-3/10)) *(self.manningsN**(3/5))
        surface_water_elevation = self.dem_riverbed + \
                                  river_water_height
        #
        # - calculating water level (unit: m) above the flood plain   # TODO: Improve this concept (using Rens's latest innundation scheme) 
        #----------------------------------------------------------
        water_above_fpl  = pcr.max(0.0, surface_water_elevation - self.dem_floodplain)  # unit: m, water level above the floodplain (not distributed)
        water_above_fpl *= self.bankfull_depth * self.bankfull_width / self.cellAreaMap  # unit: m, water level above the floodplain (distributed within the cell)
        # TODO: Improve this concept using Rens's latest scheme
        #
        # - corrected surface water elevation
        surface_water_elevation = pcr.ifthenelse(surface_water_elevation > self.dem_floodplain, \
                                                                           self.dem_floodplain + water_above_fpl, \
                                                                           surface_water_elevation)
        # - surface water elevation for lakes and reservoirs:
        lake_reservoir_water_elevation = pcr.ifthen(self.WaterBodies.waterBodyOut, surface_water_elevation)
        lake_reservoir_water_elevation = pcr.areamaximum(lake_reservoir_water_elevation, self.WaterBodies.waterBodyIds)
        lake_reservoir_water_elevation = pcr.cover(lake_reservoir_water_elevation, \
                                         pcr.areaaverage(surface_water_elevation, self.WaterBodies.waterBodyIds))
        # - maximum and minimum values for lake_reservoir_water_elevation
        lake_reservoir_water_elevation = pcr.min(self.dem_floodplain, lake_reservoir_water_elevation)
        lake_reservoir_water_elevation = pcr.max(self.surface_water_bed_elevation, lake_reservoir_water_elevation)
        # - smoothing
        lake_reservoir_water_elevation = pcr.areaaverage(surface_water_elevation, self.WaterBodies.waterBodyIds)
        # 
        # - merge lake and reservoir water elevation
        surface_water_elevation = pcr.cover(lake_reservoir_water_elevation, surface_water_elevation)
        #
        # - covering the missing values and rounding
        surface_water_elevation = pcr.cover(surface_water_elevation, self.surface_water_bed_elevation)
        surface_water_elevation = pcr.rounddown(surface_water_elevation * 1000.)/1000.
        #
        # - make sure that HRIV >= RBOT ; no infiltration if HRIV = RBOT (and h < RBOT)  
        self.surface_water_elevation = pcr.max(surface_water_elevation, self.surface_water_bed_elevation)
        #
        # - pass the values to the RIV package 
        self.pcr_modflow.setRiver(self.surface_water_elevation, self.surface_water_bed_elevation, self.bed_conductance, 1)
예제 #53
0
def volume_spread(ldd,
                  hand,
                  subcatch,
                  volume,
                  volume_thres=0.,
                  cell_surface=1.,
                  iterations=15,
                  logging=logging,
                  order=0,
                  neg_HAND=None):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
        neg_HAND -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than
        stream (for example when there are natural embankments)
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unitcell")
    dem_min = pcr.areaminimum(hand,
                              subcatch)  # minimum elevation in subcatchments
    dem_norm = hand - dem_min
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch) * pcr.areaaverage(
        cell_surface, subcatch)  # area_multiplier
    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    depth_catch = volume_catch / surface  # meters water disc averaged over subcatchment
    # ilt(depth_catch, 'depth_catch_{:02d}.map'.format(order))
    # pcr.report(volume, 'volume_{:02d}.map'.format(order))
    if neg_HAND == 1:
        dem_max = pcr.ifthenelse(
            volume_catch > volume_thres, pcr.scalar(32.),
            pcr.scalar(-32.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(-32.)
    else:
        dem_max = pcr.ifthenelse(
            volume_catch > volume_thres, pcr.scalar(32.),
            pcr.scalar(0.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(0.)
    for n in range(iterations):
        logging.debug('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max) / 2
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0),
                                              subcatch)
        error = pcr.cover((depth_catch - average_depth_catch) / depth_catch,
                          depth_catch * 0)
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    inundation = pcr.max(dem_av - dem_norm, 0)
    pcr.setglobaloption('unittrue')
    return inundation
예제 #54
0
def main():
    ### Read input arguments #####
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option(
        "-q",
        "--quiet",
        dest="verbose",
        default=True,
        action="store_false",
        help="do not print status messages to stdout",
    )
    parser.add_option(
        "-i",
        "--ini",
        dest="inifile",
        default="hand_contour_inun.ini",
        nargs=1,
        help="ini configuration file",
    )
    parser.add_option(
        "-f",
        "--flood_map",
        nargs=1,
        dest="flood_map",
        help="Flood map file (NetCDF point time series file",
    )
    parser.add_option(
        "-v",
        "--flood_variable",
        nargs=1,
        dest="flood_variable",
        default="water_level",
        help="variable name of flood water level",
    )
    parser.add_option(
        "-b",
        "--bankfull_map",
        dest="bankfull_map",
        default="",
        help="Map containing bank full level (is subtracted from flood map, in NetCDF)",
    )
    parser.add_option(
        "-c",
        "--catchment",
        dest="catchment_strahler",
        default=7,
        type="int",
        help="Strahler order threshold >= are selected as catchment boundaries",
    )
    parser.add_option(
        "-t",
        "--time",
        dest="time",
        default="",
        help="time in YYYYMMDDHHMMSS, overrides time in NetCDF input if set",
    )
    # parser.add_option('-s', '--hand_strahler',
    #                   dest='hand_strahler', default=7, type='int',
    #                   help='Strahler order threshold >= selected as riverine')
    parser.add_option(
        "-m",
        "--max_strahler",
        dest="max_strahler",
        default=1000,
        type="int",
        help="Maximum Strahler order to loop over",
    )
    parser.add_option(
        "-d", "--destination", dest="dest_path", default="inun", help="Destination path"
    )
    parser.add_option(
        "-H",
        "--hand_file_prefix",
        dest="hand_file_prefix",
        default="",
        help="optional HAND file prefix of already generated HAND files",
    )
    parser.add_option(
        "-n",
        "--neg_HAND",
        dest="neg_HAND",
        default=0,
        type="int",
        help="if set to 1, allow for negative HAND values in HAND maps",
    )
    (options, args) = parser.parse_args()

    if not os.path.exists(options.inifile):
        print "path to ini file cannot be found"
        sys.exit(1)
    options.dest_path = os.path.abspath(options.dest_path)

    if not (os.path.isdir(options.dest_path)):
        os.makedirs(options.dest_path)

    # set up the logger
    flood_name = os.path.split(options.flood_map)[1].split(".")[0]
    # case_name = 'inun_{:s}_hand_{:02d}_catch_{:02d}'.format(flood_name, options.hand_strahler, options.catchment_strahler)
    case_name = "inun_{:s}_catch_{:02d}".format(flood_name, options.catchment_strahler)
    logfilename = os.path.join(options.dest_path, "hand_contour_inun.log")
    logger, ch = inun_lib.setlogger(logfilename, "HAND_INUN", options.verbose)
    logger.info("$Id: $")
    logger.info("Flood map: {:s}".format(options.flood_map))
    logger.info("Bank full map: {:s}".format(options.bankfull_map))
    logger.info("Destination path: {:s}".format(options.dest_path))
    # read out ini file
    ### READ CONFIG FILE
    # open config-file
    config = inun_lib.open_conf(options.inifile)

    # read settings
    options.dem_file = inun_lib.configget(config, "HighResMaps", "dem_file", True)
    options.ldd_file = inun_lib.configget(config, "HighResMaps", "ldd_file", True)
    options.stream_file = inun_lib.configget(config, "HighResMaps", "stream_file", True)
    options.riv_length_fact_file = inun_lib.configget(
        config, "wflowResMaps", "riv_length_fact_file", True
    )
    options.ldd_wflow = inun_lib.configget(config, "wflowResMaps", "ldd_wflow", True)
    options.riv_width_file = inun_lib.configget(
        config, "wflowResMaps", "riv_width_file", True
    )
    options.file_format = inun_lib.configget(
        config, "file_settings", "file_format", 0, datatype="int"
    )
    options.out_format = inun_lib.configget(
        config, "file_settings", "out_format", 0, datatype="int"
    )
    options.latlon = inun_lib.configget(
        config, "file_settings", "latlon", 0, datatype="int"
    )
    options.x_tile = inun_lib.configget(
        config, "tiling", "x_tile", 10000, datatype="int"
    )
    options.y_tile = inun_lib.configget(
        config, "tiling", "y_tile", 10000, datatype="int"
    )
    options.x_overlap = inun_lib.configget(
        config, "tiling", "x_overlap", 1000, datatype="int"
    )
    options.y_overlap = inun_lib.configget(
        config, "tiling", "y_overlap", 1000, datatype="int"
    )
    options.iterations = inun_lib.configget(
        config, "inundation", "iterations", 20, datatype="int"
    )
    options.initial_level = inun_lib.configget(
        config, "inundation", "initial_level", 32., datatype="float"
    )
    options.flood_volume_type = inun_lib.configget(
        config, "inundation", "flood_volume_type", 0, datatype="int"
    )

    # options.area_multiplier = inun_lib.configget(config, 'inundation',
    #                               'area_multiplier', 1., datatype='float')
    logger.info("DEM file: {:s}".format(options.dem_file))
    logger.info("LDD file: {:s}".format(options.ldd_file))
    logger.info("streamfile: {:s}".format(options.stream_file))
    logger.info("Columns per tile: {:d}".format(options.x_tile))
    logger.info("Rows per tile: {:d}".format(options.y_tile))
    logger.info("Columns overlap: {:d}".format(options.x_overlap))
    logger.info("Rows overlap: {:d}".format(options.y_overlap))
    metadata_global = {}
    # add metadata from the section [metadata]
    meta_keys = config.options("metadata_global")
    for key in meta_keys:
        metadata_global[key] = config.get("metadata_global", key)
    # add a number of metadata variables that are mandatory
    metadata_global["config_file"] = os.path.abspath(options.inifile)
    metadata_var = {}
    metadata_var["units"] = "m"
    metadata_var["standard_name"] = "water_surface_height_above_reference_datum"
    metadata_var["long_name"] = "flooding"
    metadata_var[
        "comment"
    ] = "water_surface_reference_datum_altitude is given in file {:s}".format(
        options.dem_file
    )
    if not os.path.exists(options.dem_file):
        logger.error("path to dem file {:s} cannot be found".format(options.dem_file))
        sys.exit(1)
    if not os.path.exists(options.ldd_file):
        logger.error("path to ldd file {:s} cannot be found".format(options.ldd_file))
        sys.exit(1)

    # Read extent from a GDAL compatible file
    try:
        extent = inun_lib.get_gdal_extent(options.dem_file)
    except:
        msg = "Input file {:s} not a gdal compatible file".format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    try:
        x, y = inun_lib.get_gdal_axes(options.dem_file, logging=logger)
        srs = inun_lib.get_gdal_projection(options.dem_file, logging=logger)
    except:
        msg = "Input file {:s} not a gdal compatible file".format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    # read history from flood file
    if options.file_format == 0:
        a = nc.Dataset(options.flood_map, "r")
        metadata_global[
            "history"
        ] = "Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}".format(
            os.path.abspath(options.flood_map), a.history
        )
        a.close()
    else:
        metadata_global[
            "history"
        ] = "Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}".format(
            os.path.abspath(options.flood_map), "PCRaster file, no history"
        )

    # first write subcatch maps and hand maps
    ############### TODO ######
    # setup a HAND file for each strahler order

    max_s = inun_lib.define_max_strahler(options.stream_file, logging=logger)
    stream_max = np.minimum(max_s, options.max_strahler)

    for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
        dem_name = os.path.split(options.dem_file)[1].split(".")[0]
        if os.path.isfile(
            "{:s}_{:02d}.tif".format(options.hand_file_prefix, hand_strahler)
        ):
            hand_file = "{:s}_{:02d}.tif".format(
                options.hand_file_prefix, hand_strahler
            )
        else:
            logger.info(
                "No HAND files with HAND prefix were found, checking {:s}_hand_strahler_{:02d}.tif".format(
                    dem_name, hand_strahler
                )
            )
            hand_file = os.path.join(
                options.dest_path,
                "{:s}_hand_strahler_{:02d}.tif".format(dem_name, hand_strahler),
            )
        if not (os.path.isfile(hand_file)):
            # hand file does not exist yet! Generate it, otherwise skip!
            logger.info(
                "HAND file {:s} not found, start setting up...please wait...".format(
                    hand_file
                )
            )
            hand_file_tmp = os.path.join(
                options.dest_path,
                "{:s}_hand_strahler_{:02d}.tif.tmp".format(dem_name, hand_strahler),
            )
            ds_hand, band_hand = inun_lib.prepare_gdal(
                hand_file_tmp, x, y, logging=logger, srs=srs
            )
            # band_hand = ds_hand.GetRasterBand(1)

            # Open terrain data for reading
            ds_dem, rasterband_dem = inun_lib.get_gdal_rasterband(options.dem_file)
            ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
            ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(
                options.stream_file
            )
            n = 0
            for x_loop in range(0, len(x), options.x_tile):
                x_start = np.maximum(x_loop, 0)
                x_end = np.minimum(x_loop + options.x_tile, len(x))
                # determine actual overlap for cutting
                for y_loop in range(0, len(y), options.y_tile):
                    x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
                    x_overlap_max = (
                        np.minimum(x_end + options.x_overlap, len(x)) - x_end
                    )
                    n += 1
                    # print('tile {:001d}:'.format(n))
                    y_start = np.maximum(y_loop, 0)
                    y_end = np.minimum(y_loop + options.y_tile, len(y))
                    y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
                    y_overlap_max = (
                        np.minimum(y_end + options.y_overlap, len(y)) - y_end
                    )
                    # cut out DEM
                    logger.debug(
                        "Computing HAND for xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}".format(
                            x_start, x_end, y_start, y_end
                        )
                    )
                    terrain = rasterband_dem.ReadAsArray(
                        x_start - x_overlap_min,
                        y_start - y_overlap_min,
                        (x_end + x_overlap_max) - (x_start - x_overlap_min),
                        (y_end + y_overlap_max) - (y_start - y_overlap_min),
                    )

                    drainage = rasterband_ldd.ReadAsArray(
                        x_start - x_overlap_min,
                        y_start - y_overlap_min,
                        (x_end + x_overlap_max) - (x_start - x_overlap_min),
                        (y_end + y_overlap_max) - (y_start - y_overlap_min),
                    )
                    stream = rasterband_stream.ReadAsArray(
                        x_start - x_overlap_min,
                        y_start - y_overlap_min,
                        (x_end + x_overlap_max) - (x_start - x_overlap_min),
                        (y_end + y_overlap_max) - (y_start - y_overlap_min),
                    )
                    # write to temporary file
                    terrain_temp_file = os.path.join(
                        options.dest_path, "terrain_temp.map"
                    )
                    drainage_temp_file = os.path.join(
                        options.dest_path, "drainage_temp.map"
                    )
                    stream_temp_file = os.path.join(
                        options.dest_path, "stream_temp.map"
                    )
                    if rasterband_dem.GetNoDataValue() is not None:
                        inun_lib.gdal_writemap(
                            terrain_temp_file,
                            "PCRaster",
                            np.arange(0, terrain.shape[1]),
                            np.arange(0, terrain.shape[0]),
                            terrain,
                            rasterband_dem.GetNoDataValue(),
                            gdal_type=gdal.GDT_Float32,
                            logging=logger,
                        )
                    else:
                        # in case no nodata value is found
                        logger.warning(
                            "No nodata value found in {:s}. assuming -9999".format(
                                options.dem_file
                            )
                        )
                        inun_lib.gdal_writemap(
                            terrain_temp_file,
                            "PCRaster",
                            np.arange(0, terrain.shape[1]),
                            np.arange(0, terrain.shape[0]),
                            terrain,
                            -9999.,
                            gdal_type=gdal.GDT_Float32,
                            logging=logger,
                        )

                    inun_lib.gdal_writemap(
                        drainage_temp_file,
                        "PCRaster",
                        np.arange(0, terrain.shape[1]),
                        np.arange(0, terrain.shape[0]),
                        drainage,
                        rasterband_ldd.GetNoDataValue(),
                        gdal_type=gdal.GDT_Int32,
                        logging=logger,
                    )
                    inun_lib.gdal_writemap(
                        stream_temp_file,
                        "PCRaster",
                        np.arange(0, terrain.shape[1]),
                        np.arange(0, terrain.shape[0]),
                        stream,
                        rasterband_ldd.GetNoDataValue(),
                        gdal_type=gdal.GDT_Int32,
                        logging=logger,
                    )
                    # read as pcr objects
                    pcr.setclone(terrain_temp_file)
                    terrain_pcr = pcr.readmap(terrain_temp_file)
                    drainage_pcr = pcr.lddrepair(
                        pcr.ldd(pcr.readmap(drainage_temp_file))
                    )  # convert to ldd type map
                    stream_pcr = pcr.scalar(
                        pcr.readmap(stream_temp_file)
                    )  # convert to ldd type map

                    # check if the highest stream order of the tile is below the hand_strahler
                    # if the highest stream order of the tile is smaller than hand_strahler, than DEM values are taken instead of HAND values.
                    max_stream_tile = inun_lib.define_max_strahler(
                        stream_temp_file, logging=logger
                    )
                    if max_stream_tile < hand_strahler:
                        hand_pcr = terrain_pcr
                        logger.info(
                            "For this tile, DEM values are used instead of HAND because there is no stream order larger than {:02d}".format(
                                hand_strahler
                            )
                        )
                    else:
                        # compute streams
                        stream_ge, subcatch = inun_lib.subcatch_stream(
                            drainage_pcr, hand_strahler, stream=stream_pcr
                        )  # generate streams
                        # compute basins
                        stream_ge_dummy, subcatch = inun_lib.subcatch_stream(
                            drainage_pcr, options.catchment_strahler, stream=stream_pcr
                        )  # generate streams
                        basin = pcr.boolean(subcatch)
                        hand_pcr, dist_pcr = inun_lib.derive_HAND(
                            terrain_pcr,
                            drainage_pcr,
                            3000,
                            rivers=pcr.boolean(stream_ge),
                            basin=basin,
                            neg_HAND=options.neg_HAND,
                        )
                    # convert to numpy
                    hand = pcr.pcr2numpy(hand_pcr, -9999.)
                    # cut relevant part
                    if y_overlap_max == 0:
                        y_overlap_max = -hand.shape[0]
                    if x_overlap_max == 0:
                        x_overlap_max = -hand.shape[1]
                    hand_cut = hand[
                        0 + y_overlap_min : -y_overlap_max,
                        0 + x_overlap_min : -x_overlap_max,
                    ]

                    band_hand.WriteArray(hand_cut, x_start, y_start)
                    os.unlink(terrain_temp_file)
                    os.unlink(drainage_temp_file)
                    os.unlink(stream_temp_file)
                    band_hand.FlushCache()
            ds_dem = None
            ds_ldd = None
            ds_stream = None
            band_hand.SetNoDataValue(-9999.)
            ds_hand = None
            logger.info("Finalizing {:s}".format(hand_file))
            # rename temporary file to final hand file
            os.rename(hand_file_tmp, hand_file)
        else:
            logger.info("HAND file {:s} already exists...skipping...".format(hand_file))

    #####################################################################################
    #  HAND file has now been prepared, moving to flood mapping part                    #
    #####################################################################################
    # set the clone
    pcr.setclone(options.ldd_wflow)
    # read wflow ldd as pcraster object
    ldd_pcr = pcr.readmap(options.ldd_wflow)
    xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(
        options.riv_width_file, "GTiff", logging=logger
    )

    # determine cell length in meters using ldd_pcr as clone (if latlon=True, values are converted to m2
    x_res, y_res, reallength_wflow = pcrut.detRealCellLength(
        pcr.scalar(ldd_pcr), not (bool(options.latlon))
    )
    cell_surface_wflow = pcr.pcr2numpy(x_res * y_res, 0)

    if options.flood_volume_type == 0:
        # load the staticmaps needed to estimate volumes across all
        # xax, yax, riv_length, fill_value = inun_lib.gdal_readmap(options.riv_length_file, 'GTiff', logging=logger)
        # riv_length = np.ma.masked_where(riv_length==fill_value, riv_length)
        xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(
            options.riv_width_file, "GTiff", logging=logger
        )
        riv_width[riv_width == fill_value] = 0

        # read river length factor file (multiplier)
        xax, yax, riv_length_fact, fill_value = inun_lib.gdal_readmap(
            options.riv_length_fact_file, "GTiff", logging=logger
        )
        riv_length_fact = np.ma.masked_where(
            riv_length_fact == fill_value, riv_length_fact
        )
        drain_length = wflow_lib.detdrainlength(ldd_pcr, x_res, y_res)

        # compute river length in each cell
        riv_length = pcr.pcr2numpy(drain_length, 0) * riv_length_fact
        # riv_length_pcr = pcr.numpy2pcr(pcr.Scalar, riv_length, 0)

    flood_folder = os.path.join(options.dest_path, case_name)
    flood_vol_map = os.path.join(
        flood_folder,
        "{:s}_vol.tif".format(os.path.split(options.flood_map)[1].split(".")[0]),
    )
    if not (os.path.isdir(flood_folder)):
        os.makedirs(flood_folder)
    if options.out_format == 0:
        inun_file_tmp = os.path.join(flood_folder, "{:s}.tif.tmp".format(case_name))
        inun_file = os.path.join(flood_folder, "{:s}.tif".format(case_name))
    else:
        inun_file_tmp = os.path.join(flood_folder, "{:s}.nc.tmp".format(case_name))
        inun_file = os.path.join(flood_folder, "{:s}.nc".format(case_name))

    hand_temp_file = os.path.join(flood_folder, "hand_temp.map")
    drainage_temp_file = os.path.join(flood_folder, "drainage_temp.map")
    stream_temp_file = os.path.join(flood_folder, "stream_temp.map")
    flood_vol_temp_file = os.path.join(flood_folder, "flood_warp_temp.tif")
    # load the data with river levels and compute the volumes
    if options.file_format == 0:
        # assume we need the maximum value in a NetCDF time series grid
        logger.info("Reading flood from {:s} NetCDF file".format(options.flood_map))
        a = nc.Dataset(options.flood_map, "r")
        if options.latlon == 0:
            xax = a.variables["x"][:]
            yax = a.variables["y"][:]
        else:
            try:
                xax = a.variables["lon"][:]
                yax = a.variables["lat"][:]
            except:
                xax = a.variables["x"][:]
                yax = a.variables["y"][:]
        if options.time == "":
            time_list = nc.num2date(
                a.variables["time"][:],
                units=a.variables["time"].units,
                calendar=a.variables["time"].calendar,
            )
            time = [time_list[len(time_list) / 2]]
        else:
            time = [dt.datetime.strptime(options.time, "%Y%m%d%H%M%S")]

        flood_series = a.variables[options.flood_variable][:]
        flood_data = flood_series.max(axis=0)
        if np.ma.is_masked(flood_data):
            flood = flood_data.data
            flood[flood_data.mask] = 0
        if yax[-1] > yax[0]:
            yax = np.flipud(yax)
            flood = np.flipud(flood)
        a.close()
    elif options.file_format == 1:
        logger.info("Reading flood from {:s} PCRaster file".format(options.flood_map))
        xax, yax, flood, flood_fill_value = inun_lib.gdal_readmap(
            options.flood_map, "PCRaster", logging=logger
        )
        flood = np.ma.masked_equal(flood, flood_fill_value)
        if options.time == "":
            options.time = "20000101000000"
        time = [dt.datetime.strptime(options.time, "%Y%m%d%H%M%S")]

        flood[flood == flood_fill_value] = 0.
    # load the bankfull depths
    if options.bankfull_map == "":
        bankfull = np.zeros(flood.shape)
    else:
        if options.file_format == 0:
            logger.info(
                "Reading bankfull from {:s} NetCDF file".format(options.bankfull_map)
            )
            a = nc.Dataset(options.bankfull_map, "r")
            xax = a.variables["x"][:]
            yax = a.variables["y"][:]
            #            xax = a.variables['lon'][:]
            #            yax = a.variables['lat'][:]

            bankfull_series = a.variables[options.flood_variable][:]
            bankfull_data = bankfull_series.max(axis=0)
            if np.ma.is_masked(bankfull_data):
                bankfull = bankfull_data.data
                bankfull[bankfull_data.mask] = 0
            if yax[-1] > yax[0]:
                yax = np.flipud(yax)
                bankfull = np.flipud(bankfull)
            a.close()
        elif options.file_format == 1:
            logger.info(
                "Reading bankfull from {:s} PCRaster file".format(options.bankfull_map)
            )
            xax, yax, bankfull, bankfull_fill_value = inun_lib.gdal_readmap(
                options.bankfull_map, "PCRaster", logging=logger
            )
            bankfull = np.ma.masked_equal(bankfull, bankfull_fill_value)
    #     flood = bankfull*2
    # res_x = 2000
    # res_y = 2000
    # subtract the bankfull water level to get flood levels (above bankfull)
    flood_vol = np.maximum(flood - bankfull, 0)
    if options.flood_volume_type == 0:
        flood_vol_m = (
            riv_length * riv_width * flood_vol / cell_surface_wflow
        )  # volume expressed in meters water disc
        flood_vol_m_pcr = pcr.numpy2pcr(pcr.Scalar, flood_vol_m, 0)
    else:
        flood_vol_m = flood_vol / cell_surface_wflow
    flood_vol_m_data = flood_vol_m.data
    flood_vol_m_data[flood_vol_m.mask] = -999.
    logger.info("Saving water layer map to {:s}".format(flood_vol_map))
    # write to a tiff file
    inun_lib.gdal_writemap(
        flood_vol_map,
        "GTiff",
        xax,
        yax,
        np.maximum(flood_vol_m_data, 0),
        -999.,
        logging=logger,
    )
    # this is placed later in the hand loop
    # ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
    ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
    ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)

    logger.info("Preparing flood map in {:s} ...please wait...".format(inun_file))
    if options.out_format == 0:
        ds_inun, band_inun = inun_lib.prepare_gdal(
            inun_file_tmp, x, y, logging=logger, srs=srs
        )
        # band_inun = ds_inun.GetRasterBand(1)
    else:
        ds_inun, band_inun = inun_lib.prepare_nc(
            inun_file_tmp,
            time,
            x,
            np.flipud(y),
            metadata=metadata_global,
            metadata_var=metadata_var,
            logging=logger,
        )
    # loop over all the tiles
    n = 0
    for x_loop in range(0, len(x), options.x_tile):
        x_start = np.maximum(x_loop, 0)
        x_end = np.minimum(x_loop + options.x_tile, len(x))
        # determine actual overlap for cutting
        for y_loop in range(0, len(y), options.y_tile):
            x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
            x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
            n += 1
            # print('tile {:001d}:'.format(n))
            y_start = np.maximum(y_loop, 0)
            y_end = np.minimum(y_loop + options.y_tile, len(y))
            y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
            y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
            x_tile_ax = x[x_start - x_overlap_min : x_end + x_overlap_max]
            y_tile_ax = y[y_start - y_overlap_min : y_end + y_overlap_max]
            # cut out DEM
            logger.debug(
                "handling xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}".format(
                    x_start, x_end, y_start, y_end
                )
            )

            drainage = rasterband_ldd.ReadAsArray(
                x_start - x_overlap_min,
                y_start - y_overlap_min,
                (x_end + x_overlap_max) - (x_start - x_overlap_min),
                (y_end + y_overlap_max) - (y_start - y_overlap_min),
            )
            stream = rasterband_stream.ReadAsArray(
                x_start - x_overlap_min,
                y_start - y_overlap_min,
                (x_end + x_overlap_max) - (x_start - x_overlap_min),
                (y_end + y_overlap_max) - (y_start - y_overlap_min),
            )

            # stream_max = np.minimum(stream.max(), options.max_strahler)

            inun_lib.gdal_writemap(
                drainage_temp_file,
                "PCRaster",
                x_tile_ax,
                y_tile_ax,
                drainage,
                rasterband_ldd.GetNoDataValue(),
                gdal_type=gdal.GDT_Int32,
                logging=logger,
            )
            inun_lib.gdal_writemap(
                stream_temp_file,
                "PCRaster",
                x_tile_ax,
                y_tile_ax,
                stream,
                rasterband_stream.GetNoDataValue(),
                gdal_type=gdal.GDT_Int32,
                logging=logger,
            )

            # read as pcr objects
            pcr.setclone(stream_temp_file)
            drainage_pcr = pcr.lddrepair(
                pcr.ldd(pcr.readmap(drainage_temp_file))
            )  # convert to ldd type map
            stream_pcr = pcr.scalar(
                pcr.readmap(stream_temp_file)
            )  # convert to ldd type map

            # warp of flood volume to inundation resolution
            inun_lib.gdal_warp(
                flood_vol_map,
                stream_temp_file,
                flood_vol_temp_file,
                gdal_interp=gdalconst.GRA_NearestNeighbour,
            )  # ,
            x_tile_ax, y_tile_ax, flood_meter, fill_value = inun_lib.gdal_readmap(
                flood_vol_temp_file, "GTiff", logging=logger
            )
            # make sure that the option unittrue is on !! (if unitcell was is used in another function)
            x_res_tile, y_res_tile, reallength = pcrut.detRealCellLength(
                pcr.scalar(stream_pcr), not (bool(options.latlon))
            )
            cell_surface_tile = pcr.pcr2numpy(x_res_tile * y_res_tile, 0)

            # convert meter depth to volume [m3]
            flood_vol = pcr.numpy2pcr(
                pcr.Scalar, flood_meter * cell_surface_tile, fill_value
            )

            # first prepare a basin map, belonging to the lowest order we are looking at
            inundation_pcr = pcr.scalar(stream_pcr) * 0
            for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
                # hand_temp_file = os.path.join(flood_folder, 'hand_temp.map')
                if os.path.isfile(
                    os.path.join(
                        options.dest_path,
                        "{:s}_hand_strahler_{:02d}.tif".format(dem_name, hand_strahler),
                    )
                ):
                    hand_file = os.path.join(
                        options.dest_path,
                        "{:s}_hand_strahler_{:02d}.tif".format(dem_name, hand_strahler),
                    )
                else:
                    hand_file = "{:s}_{:02d}.tif".format(
                        options.hand_file_prefix, hand_strahler
                    )
                ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
                hand = rasterband_hand.ReadAsArray(
                    x_start - x_overlap_min,
                    y_start - y_overlap_min,
                    (x_end + x_overlap_max) - (x_start - x_overlap_min),
                    (y_end + y_overlap_max) - (y_start - y_overlap_min),
                )
                print (
                    "len x-ax: {:d} len y-ax {:d} x-shape {:d} y-shape {:d}".format(
                        len(x_tile_ax), len(y_tile_ax), hand.shape[1], hand.shape[0]
                    )
                )

                inun_lib.gdal_writemap(
                    hand_temp_file,
                    "PCRaster",
                    x_tile_ax,
                    y_tile_ax,
                    hand,
                    rasterband_hand.GetNoDataValue(),
                    gdal_type=gdal.GDT_Float32,
                    logging=logger,
                )

                hand_pcr = pcr.readmap(hand_temp_file)

                stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(
                    drainage_pcr, options.catchment_strahler, stream=stream_pcr
                )
                # stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(drainage_pcr, hand_strahler, stream=stream_pcr)
                stream_ge, subcatch = inun_lib.subcatch_stream(
                    drainage_pcr,
                    options.catchment_strahler,
                    stream=stream_pcr,
                    basin=pcr.boolean(pcr.cover(subcatch_hand, 0)),
                    assign_existing=True,
                    min_strahler=hand_strahler,
                    max_strahler=hand_strahler,
                )  # generate subcatchments, only within basin for HAND
                flood_vol_strahler = pcr.ifthenelse(
                    pcr.boolean(pcr.cover(subcatch, 0)), flood_vol, 0
                )  # mask the flood volume map with the created subcatch map for strahler order = hand_strahler

                inundation_pcr_step = inun_lib.volume_spread(
                    drainage_pcr,
                    hand_pcr,
                    pcr.subcatchment(
                        drainage_pcr, subcatch
                    ),  # to make sure backwater effects can occur from higher order rivers to lower order rivers
                    flood_vol_strahler,
                    volume_thres=0.,
                    iterations=options.iterations,
                    cell_surface=pcr.numpy2pcr(pcr.Scalar, cell_surface_tile, -9999),
                    logging=logger,
                    order=hand_strahler,
                    neg_HAND=options.neg_HAND,
                )  # 1166400000.
                # use maximum value of inundation_pcr_step and new inundation for higher strahler order
                inundation_pcr = pcr.max(inundation_pcr, inundation_pcr_step)
            inundation = pcr.pcr2numpy(inundation_pcr, -9999.)
            # cut relevant part
            if y_overlap_max == 0:
                y_overlap_max = -inundation.shape[0]
            if x_overlap_max == 0:
                x_overlap_max = -inundation.shape[1]
            inundation_cut = inundation[
                0 + y_overlap_min : -y_overlap_max, 0 + x_overlap_min : -x_overlap_max
            ]
            # inundation_cut
            if options.out_format == 0:
                band_inun.WriteArray(inundation_cut, x_start, y_start)
                band_inun.FlushCache()
            else:
                # with netCDF, data is up-side-down.
                inun_lib.write_tile_nc(band_inun, inundation_cut, x_start, y_start)
            # clean up
            os.unlink(flood_vol_temp_file)
            os.unlink(drainage_temp_file)
            os.unlink(hand_temp_file)
            os.unlink(
                stream_temp_file
            )  # also remove temp stream file from output folder

            # if n == 35:
            #     band_inun.SetNoDataValue(-9999.)
            #     ds_inun = None
            #     sys.exit(0)
    # os.unlink(flood_vol_map)

    logger.info("Finalizing {:s}".format(inun_file))
    # add the metadata to the file and band
    # band_inun.SetNoDataValue(-9999.)
    # ds_inun.SetMetadata(metadata_global)
    # band_inun.SetMetadata(metadata_var)
    if options.out_format == 0:
        ds_inun = None
        ds_hand = None
    else:
        ds_inun.close()

    ds_ldd = None
    # rename temporary file to final hand file
    if os.path.isfile(inun_file):
        # remove an old result if available
        os.unlink(inun_file)
    os.rename(inun_file_tmp, inun_file)

    logger.info("Done! Thank you for using hand_contour_inun.py")
    logger, ch = inun_lib.closeLogger(logger, ch)
    del logger, ch
    sys.exit(0)
예제 #55
0
def main():

    # output folder
    clean_out_folder = True
    if os.path.exists(out_folder): 
        if clean_out_folder:
            shutil.rmtree(out_folder)
            os.makedirs(out_folder)
    else:
        os.makedirs(out_folder)
    os.chdir(out_folder)    
    os.system("pwd")

    # tmp folder
    tmp_folder = out_folder + "/tmp/"
    if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
    os.makedirs(tmp_folder)
    

    # set the clone map
    print("set the clone based on the ldd inpu") 
    pcr.setclone(global_ldd_inp_file)
    

    # define the landmask
    print("define the landmask based on the ldd inpup") 
    landmask = pcr.defined(pcr.readmap(global_ldd_inp_file))
    landmask = pcr.ifthen(landmask, landmask)
    

    # read ldd
    print("define the ldd") 
    ldd_map = pcr.readmap(global_ldd_inp_file)
    # ~ # - extend ldd (not needed)
    # ~ ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5)))
    
    # copy ldd file
    cmd = "cp " + str(global_ldd_inp_file) + " ."
    print(cmd); os.system(cmd)


    # make catchment map
    print("make catchment map") 
    catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map))


    # read global subdomain file
    print("read global subdomain file") 
    global_subdomain_map = vos.readPCRmapClone(v = global_subdomain_file, cloneMapFileName = global_ldd_inp_file, tmpDir = tmp_folder, absolutePath = None, isLddMap = False, cover = None, isNomMap = True)


    # set initial subdomain
    print("assign subdomains to all catchments") 
    subdomains_initial = pcr.areamajority(global_subdomain_map, catchment_map)
    subdomains_initial = pcr.ifthen(landmask, subdomains_initial)

    pcr.aguila(subdomains_initial)

    pcr.report(subdomains_initial, "global_subdomains_initial.map")

    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0])))
    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])))


    
    print("Checking all subdomains, avoid too large subdomains") 

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])

    # clone code that will be assigned
    assigned_number = 0
    
    subdomains_final = pcr.ifthen(pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0))
    
    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the landmask %s" %(str(nr))
        print(msg)

        mask_selected_boolean = pcr.ifthen(subdomains_initial == nr, pcr.boolean(1.0))
        
        process_this_clone = False
        if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(mask_selected_boolean)), 1, 1)[0] > 0: process_this_clone = True
        
        # ~ if nr == 1: pcr.aguila(mask_selected_boolean)
        
        # - initial check value
        check_ok = True

        if process_this_clone:
            xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
            area_in_degree2 = (xmax - xmin) * (ymax - ymin)
            
            # ~ print(str(area_in_degree2))
            
            # check whether the size of bounding box is ok
            reference_area_in_degree2 = 2500.
            if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False
            if (xmax - xmin) > 10* (ymax - ymin): check_ok = False
        
        if check_ok == True and process_this_clone == True:

            msg = "Clump is not needed."
            msg = "\n\n" +str(msg) + "\n\n"
            print(msg)

            # assign the clone code
            assigned_number = assigned_number + 1

            # update global landmask for river and land
            mask_selected_nominal = pcr.ifthen(mask_selected_boolean, pcr.nominal(assigned_number))
            subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal) 
        
        if check_ok == False and process_this_clone == True:
			
            msg = "Clump is needed."
            msg = "\n\n" +str(msg) + "\n\n"
            print(msg)

            # make clump
            clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean))
            
            # merge clumps that are close together 
            clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0)
            clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids) 
            # ~ pcr.aguila(clump_ids)
            
            # minimimum and maximum values
            min_clump_id = int(pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)),1)[0])
            max_clump_id = int(pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)),1)[0])

            for clump_id in range(min_clump_id, max_clump_id + 1, 1):
            
                msg = "Processing the clump %s of %s from the landmask %s" %(str(clump_id), str(max_clump_id), str(nr))
                msg = "\n\n" +str(msg) + "\n\n"
                print(msg)

                # identify mask based on the clump
                mask_selected_boolean_from_clump = pcr.ifthen(clump_ids == pcr.nominal(clump_id), mask_selected_boolean)
                mask_selected_boolean_from_clump = pcr.ifthen(mask_selected_boolean_from_clump, mask_selected_boolean_from_clump)

                # check whether the clump is empty
                check_mask_selected_boolean_from_clump = pcr.ifthen(mask_selected_boolean, mask_selected_boolean_from_clump)
                check_if_empty = float(pcr.cellvalue(pcr.mapmaximum(pcr.scalar(pcr.defined(check_mask_selected_boolean_from_clump))),1)[0])
                
                if check_if_empty == 0.0: 
                
                    msg = "Map is empty !"
                    msg = "\n\n" +str(msg) + "\n\n"
                    print(msg)

                else:
                
                    msg = "Map is NOT empty !"
                    msg = "\n\n" +str(msg) + "\n\n"
                    print(msg)

                    # assign the clone code
                    assigned_number = assigned_number + 1
                    
                    # update global landmask for river and land
                    mask_selected_nominal = pcr.ifthen(mask_selected_boolean_from_clump, pcr.nominal(assigned_number))
                    subdomains_final = pcr.cover(subdomains_final, mask_selected_nominal)
                    
    # ~ # kill all aguila processes if exist
    # ~ os.system('killall aguila')
    
    pcr.aguila(subdomains_final)

    print("")
    print("")
    print("")

    print("The subdomain map is READY.") 

    pcr.report(subdomains_final, "global_subdomains_final.map")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])
    print(num_of_masks)

    print("")
    print("")
    print("")


    print("Making the clone and landmask maps for all subdomains") 

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])

    # clone and mask folders
    clone_folder = out_folder + "/clone/"
    if os.path.exists(clone_folder): shutil.rmtree(clone_folder)
    os.makedirs(clone_folder)
    mask_folder = out_folder + "/mask/"
    if os.path.exists(mask_folder): shutil.rmtree(mask_folder)
    os.makedirs(mask_folder)


    print("")
    print("")

    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the subdomain %s" %(str(nr))
        print(msg)

        # set the global clone
        pcr.setclone(global_ldd_inp_file)
        
        mask_selected_boolean = pcr.ifthen(subdomains_final == nr, pcr.boolean(1.0))
        
        mask_selected_nominal = pcr.ifthen(subdomains_final == nr, pcr.nominal(nr))
        mask_file = "mask/mask_%s.map" %(str(nr))
        pcr.report(mask_selected_nominal, mask_file)

        xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
        area_in_degree2 = (xmax - xmin) * (ymax - ymin)
        
        print(str(nr) + " ; " + str(area_in_degree2) + " ; " + str((xmax - xmin)) + " ; " + str((ymax - ymin)))

        # cellsize in arcdegree 
        cellsize = cellsize_in_arcmin / 60.
        
        # number of rows and cols
        num_rows = int(round(ymax - ymin) / cellsize)
        num_cols = int(round(xmax - xmin) / cellsize)
        
        # make the clone map using mapattr 
        clonemap_mask_file = "clone/clonemap_mask_%s.map" %(str(nr))
        cmd = "mapattr -s -R %s -C %s -B -P yb2t -x %s -y %s -l %s %s" %(str(num_rows), str(num_cols), str(xmin), str(ymax), str(cellsize), clonemap_mask_file)
        print(cmd); os.system(cmd)
        
        # set the local landmask for the clump
        pcr.setclone(clonemap_mask_file)
        local_mask = vos.readPCRmapClone(v = mask_file, \
                                         cloneMapFileName = clonemap_mask_file, 
                                         tmpDir = tmp_folder, \
                                         absolutePath = None, isLddMap = False, cover = None, isNomMap = True)
        local_mask_boolean = pcr.defined(local_mask)
        local_mask_boolean = pcr.ifthen(local_mask_boolean, local_mask_boolean)
        pcr.report(local_mask_boolean, mask_file)
        


    print("")
    print("")
    print("")

    print(num_of_masks)
예제 #56
0
    def additional_post_processing(self):
        # In this method/function, users can add their own post-processing.

        # consumption for and return flow from non irrigation water demand (unit: m/day)
        self.nonIrrWaterConsumption = self._model.routing.nonIrrWaterConsumption
        self.nonIrrReturnFlow = self._model.routing.nonIrrReturnFlow

        # accumulated runoff (m3/s) along the drainage network - not including local changes in water bodies
        if "accuRunoff" in self.variables_for_report:
            self.accuRunoff = pcr.catchmenttotal(
                self.runoff * self._model.routing.cellArea,
                self._model.routing.lddMap) / vos.secondsPerDay()

        # accumulated baseflow (m3) along the drainage network
        if "accuBaseflow" in self.variables_for_report:
            self.accuBaseflow = pcr.catchmenttotal(
                self.baseflow * self._model.routing.cellArea,
                self._model.routing.lddMap)

        # local changes in water bodies (i.e. abstraction, return flow, evaporation, bed exchange), excluding runoff
        self.local_water_body_flux = self._model.routing.local_input_to_surface_water / \
            self._model.routing.cellArea - self.runoff

        # total runoff (m) from local land surface runoff and local changes in water bodies
        # actually this is equal to self._model.routing.local_input_to_surface_water / self._model.routing.cellArea
        self.totalRunoff = self.runoff + self.local_water_body_flux

        # accumulated total runoff (m3) along the drainage network - not including local changes in water bodies
        if "accuTotalRunoff" in self.variables_for_report:
            self.accuTotalRunoff = pcr.catchmenttotal(
                self.totalRunoff * self._model.routing.cellArea,
                self._model.routing.lddMap) / vos.secondsPerDay()

        # fossil groundwater storage
        self.storGroundwaterFossil = self._model.groundwater.storGroundwaterFossil

        # total groundwater storage: (non fossil and fossil)
        self.storGroundwaterTotal = self._model.groundwater.storGroundwater + \
            self._model.groundwater.storGroundwaterFossil

        # total active storage thickness (m) for the entire water column - not including fossil groundwater (unmetDemand)
        # - including: interception, snow, soil and non fossil groundwater
        self.totalActiveStorageThickness = pcr.ifthen(
            self._model.routing.landmask,
            self._model.routing.channelStorage / self._model.routing.cellArea +
            self._model.landSurface.totalSto +
            self._model.groundwater.storGroundwater)

        # total water storage thickness (m) for the entire water column:
        # - including: interception, snow, soil, non fossil groundwater and fossil groundwater (unmetDemand)
        # - this is usually used for GRACE comparison
        self.totalWaterStorageThickness = self.totalActiveStorageThickness + \
            self._model.groundwater.storGroundwaterFossil

        # surfaceWaterStorage (unit: m) - negative values may be reported
        self.surfaceWaterStorage = self._model.routing.channelStorage / \
            self._model.routing.cellArea

        # Menno's post proccessing: fractions of water sources (allocated for) satisfying water demand in each cell
        self.fracSurfaceWaterAllocation = pcr.ifthen(
            self._model.routing.landmask,
            vos.getValDivZero(
                self._model.landSurface.allocSurfaceWaterAbstract,
                self.totalGrossDemand, vos.smallNumber))
        self.fracSurfaceWaterAllocation = pcr.ifthenelse(
            self.totalGrossDemand < vos.smallNumber, 1.0,
            self.fracSurfaceWaterAllocation)
        #
        self.fracNonFossilGroundwaterAllocation = pcr.ifthen(
            self._model.routing.landmask,
            vos.getValDivZero(
                self._model.groundwater.allocNonFossilGroundwater,
                self.totalGrossDemand, vos.smallNumber))
        self.fracNonFossilGroundwaterAllocation = pcr.ifthenelse(
            self.totalGrossDemand < vos.smallNumber, 0.0,
            self.fracNonFossilGroundwaterAllocation)
        #
        self.fracOtherWaterSourceAllocation = pcr.ifthen(
            self._model.routing.landmask,
            vos.getValDivZero(self._model.groundwater.unmetDemand,
                              self.totalGrossDemand, vos.smallNumber))
        self.totalFracWaterSourceAllocation = self.fracSurfaceWaterAllocation + \
            self.fracNonFossilGroundwaterAllocation + \
            self.fracOtherWaterSourceAllocation

        # Stefanie's post processing: reporting lake and reservoir storage (unit: m3)
        self.waterBodyStorage = pcr.ifthen(
            self._model.routing.landmask,
            pcr.ifthen(
                pcr.scalar(self._model.routing.WaterBodies.waterBodyIds) > 0.,
                self._model.routing.WaterBodies.waterBodyStorage)
        )  # Note: This value is after lake/reservoir outflow.
        #
        # snowMelt (m/day)
        self.snowMelt = self._model.landSurface.snowMelt

        # soil moisture state from (approximately) the first 5 cm soil
        if self._model.landSurface.numberOfSoilLayers == 3:
            self.storUppSurface = self._model.landSurface.storUpp000005  # unit: m
            self.satDegUppSurface = self._model.landSurface.satDegUpp000005  # unit: percentage

        # reporting water balance from the land surface part (excluding surface water bodies)
        self.land_surface_water_balance = self._model.waterBalance

        # evaporation from irrigation areas (m/day) - values are average over the entire cell area
        if self._model.landSurface.includeIrrigation:
            self.evaporation_from_irrigation = self._model.landSurface.landCoverObj['irrPaddy'].actualET * \
                self._model.landSurface.landCoverObj['irrPaddy'].fracVegCover + \
                self._model.landSurface.landCoverObj['irrNonPaddy'].actualET * \
                self._model.landSurface.landCoverObj['irrNonPaddy'].fracVegCover
예제 #57
0
    def readSoil(self, iniItems):

        # default values of soil parameters that are constant/uniform for the entire domain:
        self.clappAddCoeff = pcr.scalar(3.0)  # dimensionless
        self.matricSuctionFC = pcr.scalar(1.0)  # unit: m
        self.matricSuction50 = pcr.scalar(10. / 3.)  # unit: m
        self.matricSuctionWP = pcr.scalar(156.0)  # unit: m
        self.maxGWCapRise = pcr.scalar(5.0)  # unit: m
        #
        # values defined in the ini/configuration file:
        soilParameterConstants = [
            'clappAddCoeff', 'matricSuctionFC', 'matricSuction50',
            'matricSuctionWP', 'maxGWCapRise'
        ]
        for var in soilParameterConstants:
            if var in iniItems.landSurfaceOptions.keys():
                input = iniItems.landSurfaceOptions[str(var)]
                vars(self)[var] = vos.readPCRmapClone(input,self.cloneMap,\
                                                            self.tmpDir,self.inputDir)

        # read soil parameter based on the FAO soil map:
        self.readSoilMapOfFAO(iniItems)

        # assign Campbell's (1974) beta coefficient, as well as degree
        # of saturation at field capacity and corresponding unsaturated hydraulic conductivity
        #
        if self.numberOfLayers == 2:

            self.campbellBetaUpp = 2.*self.poreSizeBetaUpp + \
                                      self.clappAddCoeff                # Campbell's (1974) coefficient ; Rens's line: BCB = 2*BCH + BCH_ADD
            self.campbellBetaLow = 2.*self.poreSizeBetaLow + \
                                      self.clappAddCoeff

            self.effSatAtFieldCapUpp = \
                     (self.matricSuctionFC / self.airEntryValueUpp)**\
                                        (-1 / self.poreSizeBetaUpp )    # saturation degree at field capacity ; THEFF_FC = (PSI_FC/PSI_A)**(-1/BCH)
            self.effSatAtFieldCapLow = \
                     (self.matricSuctionFC / self.airEntryValueLow)**\
                                        (-1 / self.poreSizeBetaLow )

            self.kUnsatAtFieldCapUpp = pcr.max(0., \
             self.effSatAtFieldCapUpp ** self.poreSizeBetaUpp * self.kSatUpp)
            self.kUnsatAtFieldCapLow = pcr.max(0., \
             self.effSatAtFieldCapLow ** self.poreSizeBetaLow * self.kSatLow)
        #
        if self.numberOfLayers == 3:

            self.campbellBetaUpp000005 = 2.*self.poreSizeBetaUpp000005 + \
                                            self.clappAddCoeff
            self.campbellBetaUpp005030 = 2.*self.poreSizeBetaUpp005030 + \
                                            self.clappAddCoeff
            self.campbellBetaLow030150 = 2.*self.poreSizeBetaLow030150 + \
                                            self.clappAddCoeff

            self.effSatAtFieldCapUpp000005 = \
                     (self.matricSuctionFC / self.airEntryValueUpp000005)**\
                                        (-1 / self.poreSizeBetaUpp000005)
            self.effSatAtFieldCapUpp005030 = \
                     (self.matricSuctionFC / self.airEntryValueUpp005030)**\
                                        (-1 / self.poreSizeBetaUpp005030)
            self.effSatAtFieldCapLow030150 = \
                     (self.matricSuctionFC / self.airEntryValueLow030150)**\
                                        (-1 / self.poreSizeBetaLow030150)

            self.kUnsatAtFieldCapUpp000005 = pcr.max(0., \
             self.effSatAtFieldCapUpp000005 ** self.poreSizeBetaUpp000005 * self.kSatUpp000005)
            self.kUnsatAtFieldCapUpp005030 = pcr.max(0., \
             self.effSatAtFieldCapUpp005030 ** self.poreSizeBetaUpp005030 * self.kSatUpp005030)
            self.kUnsatAtFieldCapLow030150 = pcr.max(0., \
             self.effSatAtFieldCapLow030150 ** self.poreSizeBetaLow030150 * self.kSatLow030150)

        # calculate degree of saturation at which transpiration is halved (50)
        # and at wilting point
        #
        if self.numberOfLayers == 2:
            self.effSatAt50Upp = (self.matricSuction50/self.airEntryValueUpp)**\
                                                    (-1/self.poreSizeBetaUpp)
            self.effSatAt50Low = (self.matricSuction50/self.airEntryValueLow)**\
                                                    (-1/self.poreSizeBetaLow)
            self.effSatAtWiltPointUpp = \
                                 (self.matricSuctionWP/self.airEntryValueUpp)**\
                                                    (-1/self.poreSizeBetaUpp)
            self.effSatAtWiltPointLow = \
                                 (self.matricSuctionWP/self.airEntryValueLow)**\
                                                    (-1/self.poreSizeBetaLow)
        if self.numberOfLayers == 3:
            self.effSatAt50Upp000005 = (self.matricSuction50/self.airEntryValueUpp000005)**\
                                                          (-1/self.poreSizeBetaUpp000005)
            self.effSatAt50Upp005030 = (self.matricSuction50/self.airEntryValueUpp005030)**\
                                                          (-1/self.poreSizeBetaUpp005030)
            self.effSatAt50Low030150 = (self.matricSuction50/self.airEntryValueLow030150)**\
                                                          (-1/self.poreSizeBetaLow030150)
            self.effSatAtWiltPointUpp000005 = \
                                       (self.matricSuctionWP/self.airEntryValueUpp000005)**\
                                                          (-1/self.poreSizeBetaUpp000005)
            self.effSatAtWiltPointUpp005030 = \
                                       (self.matricSuctionWP/self.airEntryValueUpp005030)**\
                                                          (-1/self.poreSizeBetaUpp005030)
            self.effSatAtWiltPointLow030150 = \
                                       (self.matricSuctionWP/self.airEntryValueLow030150)**\
                                                          (-1/self.poreSizeBetaLow030150)

        # calculate interflow parameter (TCL):
        #
        if self.numberOfLayers == 2:
            self.interflowConcTime = (2.* self.kSatLow * self.tanslope) / \
                     (self.slopeLength * (1.- self.effSatAtFieldCapLow) * \
                    (self.satVolMoistContLow - self.resVolMoistContLow))    # TCL = Duration*(2*KS2*TANSLOPE)/(LSLOPE*(1-THEFF2_FC)*(THETASAT2-THETARES2))
        #
        if self.numberOfLayers == 3:
            self.interflowConcTime = (2.* self.kSatLow030150 * self.tanslope) / \
                     (self.slopeLength * (1.-self.effSatAtFieldCapLow030150) * \
             (self.satVolMoistContLow030150 - self.resVolMoistContLow030150))

        self.interflowConcTime = pcr.cover(self.interflowConcTime, 0.0)
예제 #58
0
    def readSoilMapOfFAO(self, iniItems):

        # soil variable names given either in the ini or netCDF file:
        soilParameters = [
            'airEntryValue1', 'airEntryValue2', 'poreSizeBeta1',
            'poreSizeBeta2', 'resVolWC1', 'resVolWC2', 'satVolWC1',
            'satVolWC2', 'KSat1', 'KSat2', 'percolationImp'
        ]
        if iniItems.landSurfaceOptions['soilPropertiesNC'] == str(None):
            for var in soilParameters:
                input = iniItems.landSurfaceOptions[str(var)]
                vars(self)[var] = \
                               vos.readPCRmapClone(input,self.cloneMap,\
                                             self.tmpDir,self.inputDir)
                vars(self)[var] = pcr.scalar(vars(self)[var])
                vars(self)[var] = pcr.cover(vars(self)[var], 0.0)
        else:
            soilPropertiesNC = vos.getFullPath(\
                               iniItems.landSurfaceOptions[\
                                               'soilPropertiesNC'],
                                                self.inputDir)
            for var in soilParameters:
                vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(\
                                    soilPropertiesNC,var, \
                                    cloneMapFileName = self.cloneMap)
                vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        if self.numberOfLayers == 2:
            self.satVolMoistContUpp = self.satVolWC1  # saturated volumetric moisture content (m3.m-3)
            self.satVolMoistContLow = self.satVolWC2
            self.resVolMoistContUpp = self.resVolWC1  # residual volumetric moisture content (m3.m-3)
            self.resVolMoistContLow = self.resVolWC2
            self.airEntryValueUpp = self.airEntryValue1  # air entry value (m) according to soil water retention curve of Clapp & Hornberger (1978)
            self.airEntryValueLow = self.airEntryValue2
            self.poreSizeBetaUpp = self.poreSizeBeta1  # pore size distribution parameter according to Clapp & Hornberger (1978)
            self.poreSizeBetaLow = self.poreSizeBeta2
            self.kSatUpp = self.KSat1  # saturated hydraulic conductivity (m.day-1)
            self.kSatLow = self.KSat2

        if self.numberOfLayers == 3:
            self.satVolMoistContUpp000005 = self.satVolWC1
            self.satVolMoistContUpp005030 = self.satVolWC1
            self.satVolMoistContLow030150 = self.satVolWC2
            self.resVolMoistContUpp000005 = self.resVolWC1
            self.resVolMoistContUpp005030 = self.resVolWC1
            self.resVolMoistContLow030150 = self.resVolWC2
            self.airEntryValueUpp000005 = self.airEntryValue1
            self.airEntryValueUpp005030 = self.airEntryValue1
            self.airEntryValueLow030150 = self.airEntryValue2
            self.poreSizeBetaUpp000005 = self.poreSizeBeta1
            self.poreSizeBetaUpp005030 = self.poreSizeBeta1
            self.poreSizeBetaLow030150 = self.poreSizeBeta2
            self.kSatUpp000005 = self.KSat1
            self.kSatUpp005030 = self.KSat1
            self.kSatLow030150 = self.KSat2

        self.percolationImp = self.percolationImp  # fractional area where percolation to groundwater store is impeded (dimensionless)

        # soil thickness and storage variable names
        # as given either in the ini or netCDF file:
        soilStorages = [
            'firstStorDepth', 'secondStorDepth', 'soilWaterStorageCap1',
            'soilWaterStorageCap2'
        ]
        if iniItems.landSurfaceOptions['soilPropertiesNC'] == str(None):
            for var in soilStorages:
                input = iniItems.landSurfaceOptions[str(var)]
                temp = str(var) + 'Inp'
                vars(self)[temp] = vos.readPCRmapClone(input,\
                                            self.cloneMap,
                                            self.tmpDir,self.inputDir)
                vars(self)[temp] = pcr.cover(vars(self)[temp], 0.0)
        else:
            soilPropertiesNC = vos.getFullPath(\
                               iniItems.landSurfaceOptions[\
                                               'soilPropertiesNC'],
                                                self.inputDir)
            for var in soilStorages:
                temp = str(var) + 'Inp'
                vars(self)[temp] = vos.netcdf2PCRobjCloneWithoutTime(\
                                     soilPropertiesNC,var, \
                                     cloneMapFileName = self.cloneMap)
                vars(self)[temp] = pcr.cover(vars(self)[temp], 0.0)

        # layer thickness
        if self.numberOfLayers == 2:
            self.thickUpp = (0.30 / 0.30) * self.firstStorDepthInp
            self.thickLow = (1.20 / 1.20) * self.secondStorDepthInp
        if self.numberOfLayers == 3:
            self.thickUpp000005 = (0.05 / 0.30) * self.firstStorDepthInp
            self.thickUpp005030 = (0.25 / 0.30) * self.firstStorDepthInp
            self.thickLow030150 = (1.20 / 1.20) * self.secondStorDepthInp

        # soil storage
        if self.numberOfLayers == 2:
            #~ self.storCapUpp = (0.30/0.30)*self.soilWaterStorageCap1Inp
            #~ self.storCapLow = (1.20/1.20)*self.soilWaterStorageCap2Inp                     # 22 Feb 2014: We can calculate this based on thickness and porosity.
            self.storCapUpp = self.thickUpp * \
                             (self.satVolMoistContUpp - self.resVolMoistContUpp)
            self.storCapLow = self.thickLow * \
                             (self.satVolMoistContLow - self.resVolMoistContLow)
            self.rootZoneWaterStorageCap = self.storCapUpp + \
                                           self.storCapLow
        if self.numberOfLayers == 3:
            self.storCapUpp000005 = self.thickUpp000005 * \
                             (self.satVolMoistContUpp000005 - self.resVolMoistContUpp000005)
            self.storCapUpp005030 = self.thickUpp005030 * \
                             (self.satVolMoistContUpp005030 - self.resVolMoistContUpp005030)
            self.storCapLow030150 = self.thickLow030150 * \
                             (self.satVolMoistContLow030150 - self.resVolMoistContLow030150)
            self.rootZoneWaterStorageCap = self.storCapUpp000005 + \
                                           self.storCapUpp005030 + \
                                           self.storCapLow030150
예제 #59
0
    def getParameterFiles(self,currTimeStep,cellArea,ldd,\
                               initial_condition_dictionary = None,\
                               currTimeStepInDateTimeFormat = False):

        # parameters for Water Bodies: fracWat
        #                              waterBodyIds
        #                              waterBodyOut
        #                              waterBodyArea
        #                              waterBodyTyp
        #                              waterBodyCap

        # cell surface area (m2) and ldd
        self.cellArea = cellArea
        ldd = pcr.ifthen(self.landmask, ldd)

        # date used for accessing/extracting water body information
        if currTimeStepInDateTimeFormat:
            date_used = currTimeStep
            year_used = currTimeStep.year
        else:
            date_used = currTimeStep.fulldate
            year_used = currTimeStep.year
        if self.onlyNaturalWaterBodies == True:
            date_used = self.dateForNaturalCondition
            year_used = self.dateForNaturalCondition[0:4]

        # fracWat = fraction of surface water bodies (dimensionless)
        self.fracWat = pcr.scalar(0.0)

        if self.useNetCDF:
            self.fracWat = vos.netcdf2PCRobjClone(self.ncFileInp,'fracWaterInp', \
                           date_used, useDoy = 'yearly',\
                           cloneMapFileName = self.cloneMap)
        else:
            self.fracWat = vos.readPCRmapClone(\
                           self.fracWaterInp+str(year_used)+".map",
                           self.cloneMap,self.tmpDir,self.inputDir)

        self.fracWat = pcr.cover(self.fracWat, 0.0)
        self.fracWat = pcr.max(0.0, self.fracWat)
        self.fracWat = pcr.min(1.0, self.fracWat)

        self.waterBodyIds = pcr.nominal(0)  # waterBody ids
        self.waterBodyOut = pcr.boolean(0)  # waterBody outlets
        self.waterBodyArea = pcr.scalar(0.)  # waterBody surface areas

        # water body ids
        if self.useNetCDF:
            self.waterBodyIds = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyIds', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)
        else:
            self.waterBodyIds = vos.readPCRmapClone(\
                self.waterBodyIdsInp+str(year_used)+".map",\
                self.cloneMap,self.tmpDir,self.inputDir,False,None,True)
        #
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.nominal(self.waterBodyIds))

        # water body outlets (correcting outlet positions)
        wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd)
        self.waterBodyOut = pcr.ifthen(wbCatchment ==\
                            pcr.areamaximum(wbCatchment, \
                            self.waterBodyIds),\
                            self.waterBodyIds) # = outlet ids           # This may give more than two outlets, particularly if there are more than one cells that have largest upstream areas
        # - make sure that there is only one outlet for each water body
        self.waterBodyOut = pcr.ifthen(\
                            pcr.areaorder(pcr.scalar(self.waterBodyOut), \
                            self.waterBodyOut) == 1., self.waterBodyOut)
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            self.waterBodyOut)

        # TODO: Please also consider endorheic lakes!

        # correcting water body ids
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.subcatchment(ldd,self.waterBodyOut))

        # boolean map for water body outlets:
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyOut) > 0.,\
                            pcr.boolean(1))

        # reservoir surface area (m2):
        if self.useNetCDF:
            resSfArea = 1000. * 1000. * \
                        vos.netcdf2PCRobjClone(self.ncFileInp,'resSfAreaInp', \
                        date_used, useDoy = 'yearly',\
                        cloneMapFileName = self.cloneMap)
        else:
            resSfArea = 1000. * 1000. * vos.readPCRmapClone(
                   self.resSfAreaInp+str(year_used)+".map",\
                   self.cloneMap,self.tmpDir,self.inputDir)
        resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds)
        resSfArea = pcr.cover(resSfArea, 0.)

        # water body surface area (m2): (lakes and reservoirs)
        self.waterBodyArea = pcr.max(pcr.areatotal(\
                             pcr.cover(\
                             self.fracWat*self.cellArea, 0.0), self.waterBodyIds),
                             pcr.areaaverage(\
                             pcr.cover(resSfArea, 0.0) ,       self.waterBodyIds))
        self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.,\
                             self.waterBodyArea)

        # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0)
        self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0.,
                                       self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyOut)

        # water body types:
        # - 2 = reservoirs (regulated discharge)
        # - 1 = lakes (weirFormula)
        # - 0 = non lakes or reservoirs (e.g. wetland)
        self.waterBodyTyp = pcr.nominal(0)

        if self.useNetCDF:
            self.waterBodyTyp = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyTyp', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)
        else:
            self.waterBodyTyp = vos.readPCRmapClone(
                self.waterBodyTypInp+str(year_used)+".map",\
                self.cloneMap,self.tmpDir,self.inputDir,False,None,True)

        # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs
        #
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.areamajority(self.waterBodyTyp,\
                                             self.waterBodyIds)     # choose only one type: either lake or reservoir
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyTyp)

        # correcting lakes and reservoirs ids and outlets
        self.waterBodyIds = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut)

        # reservoir maximum capacity (m3):
        self.resMaxCap = pcr.scalar(0.0)
        self.waterBodyCap = pcr.scalar(0.0)

        if self.useNetCDF:
            self.resMaxCap = 1000. * 1000. * \
                             vos.netcdf2PCRobjClone(self.ncFileInp,'resMaxCapInp', \
                             date_used, useDoy = 'yearly',\
                             cloneMapFileName = self.cloneMap)
        else:
            self.resMaxCap = 1000. * 1000. * vos.readPCRmapClone(\
                self.resMaxCapInp+str(year_used)+".map", \
                self.cloneMap,self.tmpDir,self.inputDir)

        self.resMaxCap = pcr.ifthen(self.resMaxCap > 0,\
                                    self.resMaxCap)
        self.resMaxCap = pcr.areaaverage(self.resMaxCap,\
                                         self.waterBodyIds)

        # water body capacity (m3): (lakes and reservoirs)
        self.waterBodyCap = pcr.cover(
            self.resMaxCap, 0.0)  # Note: Most of lakes have capacities > 0.
        self.waterBodyCap = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyCap)

        # correcting water body types:                                  # Reservoirs that have zero capacities will be assumed as lakes.
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp)
        self.waterBodyTyp = pcr.ifthenelse(self.waterBodyCap > 0.,\
                                           self.waterBodyTyp,\
                 pcr.ifthenelse(pcr.scalar(self.waterBodyTyp) == 2,\
                                           pcr.nominal(1),\
                                           self.waterBodyTyp))

        # final corrections:
        self.waterBodyTyp = pcr.ifthen(self.waterBodyArea > 0.,\
                                       self.waterBodyTyp)                     # make sure that all lakes and/or reservoirs have surface areas
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp)                     # make sure that only types 1 and 2 will be considered in lake/reservoir functions
        self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                            self.waterBodyIds)                                # make sure that all lakes and/or reservoirs have ids
        self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.,\
                                                  self.waterBodyOut)          # make sure that all lakes and/or reservoirs have outlets

        # for a natural run (self.onlyNaturalWaterBodies == True)
        # which uses only the year 1900, assume all reservoirs are lakes
        if self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition:
            logger.info(
                "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes."
            )
            self.waterBodyTyp = \
             pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                        pcr.nominal(1))

        # check that all lakes and/or reservoirs have types, ids, surface areas and outlets:
        test = pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) &\
               pcr.defined(self.waterBodyIds) & pcr.boolean(pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds))
        a, b, c = vos.getMinMaxMean(
            pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0))
        threshold = 1e-3
        if abs(a) > threshold or abs(b) > threshold:
            logger.warning(
                "Missing information in some lakes and/or reservoirs.")

        # at the beginning of simulation period (timeStepPCR = 1)
        # - we have to define/get the initial conditions
        #
        if initial_condition_dictionary != None and currTimeStep.timeStepPCR == 1:
            self.getICs(initial_condition_dictionary)

        # For each new reservoir (introduced at the beginning of the year)
        # initiating storage, average inflow and outflow
        # PS: THIS IS NOT NEEDED FOR OFFLINE MODFLOW RUN!
        #
        try:
            self.waterBodyStorage = pcr.cover(self.waterBodyStorage, 0.0)
            self.avgInflow = pcr.cover(self.avgInflow, 0.0)
            self.avgOutflow = pcr.cover(self.avgOutflow, 0.0)
            self.waterBodyStorage = pcr.ifthen(self.landmask,
                                               self.waterBodyStorage)
            self.avgInflow = pcr.ifthen(self.landmask, self.avgInflow)
            self.avgOutflow = pcr.ifthen(self.landmask, self.avgOutflow)
        except:
            # PS: FOR OFFLINE MODFLOW RUN!
            pass
        # TODO: Remove try and except

        # cropping only in the landmask region:
        self.fracWat = pcr.ifthen(self.landmask, self.fracWat)
        self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut)
        self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea)
        self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp)
        self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap)
예제 #60
0
    def checkWaterBalance(self, storesAtBeginning, storesAtEnd):
        # for the entire modules: snow + interception + soil + groundwater + waterDemand
        # except: river/routing

        irrGrossDemand  = pcr.ifthen(self.landmask,\
                                self.landSurface.irrGrossDemand)        # unit: m

        nonIrrGrossDemand = \
                           pcr.ifthen(self.landmask,\
                                self.landSurface.nonIrrGrossDemand)     # unit: m

        precipitation   = pcr.ifthen(self.landmask,\
                                     self.meteo.precipitation)          # unit: m

        surfaceWaterInf =  pcr.ifthen(self.landmask,\
                                      self.groundwater.surfaceWaterInf)

        surfaceWaterAbstraction = \
                           pcr.ifthen(self.landmask,\
                                      self.landSurface.actSurfaceWaterAbstract)

        nonFossilGroundwaterAbs = pcr.ifthen(
            self.landmask, self.groundwater.nonFossilGroundwaterAbs)

        unmetDemand      = pcr.ifthen(self.landmask,\
                                      self.groundwater.unmetDemand)                                   # PS: We assume that unmetDemand is extracted (only) to satisfy local demand.

        runoff = pcr.ifthen(self.landmask, self.routing.runoff)

        actualET         = pcr.ifthen(self.landmask,\
                                      self.landSurface.actualET)

        vos.waterBalanceCheck([precipitation,surfaceWaterInf,irrGrossDemand],\
                              [actualET,runoff,nonFossilGroundwaterAbs],\
                              [storesAtBeginning],\
                              [storesAtEnd],\
                              'all modules (including water demand), but except river/routing',\
                               True,\
                               self._modelTime.fulldate,threshold=1e-3)

        if self.landSurface.usingAllocSegments:

            allocSurfaceWaterAbstract = \
                           pcr.ifthen(self.landmask,\
                                      self.landSurface.allocSurfaceWaterAbstract)

            allocNonFossilGroundwaterAbs = \
                           pcr.ifthen(self.landmask,\
                                      self.groundwater.allocNonFossilGroundwater)

            allocUnmetDemand = unmetDemand  # PS: We assume that unmetDemand is extracted (only) to satisfy local demand.

            segTotalDemand = pcr.areatotal(
                pcr.cover((irrGrossDemand + nonIrrGrossDemand) *
                          self.routing.cellArea, 0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            segAllocSurfaceWaterAbstract = pcr.areatotal(
                pcr.cover(allocSurfaceWaterAbstract * self.routing.cellArea,
                          0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            segAllocNonFossilGroundwaterAbs = pcr.areatotal(
                pcr.cover(allocNonFossilGroundwaterAbs * self.routing.cellArea,
                          0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            segAllocUnmetDemand = pcr.areatotal(
                pcr.cover(allocUnmetDemand * self.routing.cellArea, 0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            vos.waterBalanceCheck([segTotalDemand],\
                                  [segAllocSurfaceWaterAbstract,segAllocNonFossilGroundwaterAbs,segAllocUnmetDemand],\
                                  [pcr.scalar(0.0)],\
                                  [pcr.scalar(0.0)],\
                                  'Water balance error in water allocation (per zone). Note that error here is most likely due to rounding error (32 bit implementation of pcraster)',\
                                   True,\
                                   self._modelTime.fulldate,threshold=5e-3)
        else:

            vos.waterBalanceCheck([irrGrossDemand,nonIrrGrossDemand],\
                                  [surfaceWaterAbstraction,nonFossilGroundwaterAbs,unmetDemand],\
                                  [pcr.scalar(0.0)],\
                                  [pcr.scalar(0.0)],\
                                  'Water balance error in water allocation.',\
                                   True,\
                                   self._modelTime.fulldate,threshold=1e-3)