예제 #1
0
def snaptomap(points, mmap):
    """
    Snap the points in _points_ to nearest non missing
    values in _mmap_. Can be used to move gauge locations
    to the nearest rivers.

    Input:
        - points - map with points to move
        - mmap - map with points to move to

    Return:
        - map with shifted points
    """
    points = pcr.cover(points, 0)
    # Create unique id map of mmap cells
    unq = pcr.nominal(pcr.cover(pcr.uniqueid(pcr.defined(mmap)), pcr.scalar(0.0)))
    # Now fill holes in mmap map with lues indicating the closes mmap cell.
    dist_cellid = pcr.scalar(pcr.spreadzone(unq, 0, 1))
    # Get map with values at location in points with closes mmap cell
    dist_cellid = pcr.ifthenelse(points > 0, dist_cellid, 0)
    # Spread this out
    dist_fill = pcr.spreadzone(pcr.nominal(dist_cellid), 0, 1)
    # Find the new (moved) locations
    npt = pcr.uniqueid(pcr.boolean(pcr.ifthen(dist_fill == unq, unq)))
    # Now recreate the original value in the points maps
    ptcover = pcr.spreadzone(pcr.cover(points, 0), 0, 1)
    # Now get the org point value in the pt map
    nptorg = pcr.ifthen(npt > 0, ptcover)

    return nptorg
예제 #2
0
    def __init__(self, netcdffile, logging):
        """
        First try to setup a class read netcdf files
        (converted with pcr2netcdf.py)

        netcdffile: file to read the forcing data from
        logging: python logging object
        vars: list of variables to get from file
        """

        if os.path.exists(netcdffile):
            self.dataset = netCDF4.Dataset(netcdffile, mode="r")
        else:
            msg = os.path.abspath(netcdffile) + " not found!"
            logging.error(msg)
            raise ValueError(msg)

        try:
            self.x = self.dataset.variables["x"][:]
        except:
            self.x = self.dataset.variables["lon"][:]
        # Now check Y values to see if we must flip the data
        try:
            self.y = self.dataset.variables["y"][:]
        except:
            self.y = self.dataset.variables["lat"][:]

        x = pcr.pcr2numpy(pcr.xcoordinate(pcr.boolean(pcr.cover(1.0))), np.nan)[0, :]
        y = pcr.pcr2numpy(pcr.ycoordinate(pcr.boolean(pcr.cover(1.0))), np.nan)[:, 0]

        (self.latidx,) = np.logical_and(self.x >= x.min(), self.x < x.max()).nonzero()
        (self.lonidx,) = np.logical_and(self.y >= x.min(), self.y < y.max()).nonzero()

        logging.info("Reading static input from netCDF file: " + netcdffile)
예제 #3
0
def subcatch_order_a(ldd, oorder):
    """
    Determines subcatchments using the catchment order

    This version uses the last cell BELOW order to derive the
    catchments. In general you want the _b version

    Input:
        - ldd
        - order - order to use

    Output:
        - map with catchment for the given streamorder
    """
    outl = find_outlet(ldd)
    large = pcr.subcatchment(ldd, pcr.boolean(outl))
    stt = pcr.streamorder(ldd)
    sttd = pcr.downstream(ldd, stt)
    pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
    dif = pcr.upstream(
        ldd,
        pcr.cover(
            pcr.ifthen(
                large,
                pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts))),
            ),
            0,
        ),
    )
    dif = pcr.cover(pcr.scalar(outl), dif)  # Add catchment outlet
    dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(dif)))
    sc = pcr.subcatchment(ldd, dif)

    return sc, dif, stt
예제 #4
0
    def getLakeOutflow(
        self, avgChannelDischarge, length_of_time_step=vos.secondsPerDay()
    ):

        # waterHeight (m): temporary variable, a function of storage:
        minWaterHeight = (
            0.001
        )  # (m) Rens used 0.001 m as the limit # this is to make sure there is always lake outflow,
        # but it will be still limited by available self.waterBodyStorage
        waterHeight = pcr.cover(
            pcr.max(
                minWaterHeight,
                (self.waterBodyStorage - pcr.cover(self.waterBodyCap, 0.0))
                / self.waterBodyArea,
            ),
            0.0,
        )

        # weirWidth (m) :
        # - estimated from avgOutflow (m3/s) using the bankfull discharge formula
        #
        avgOutflow = self.avgOutflow
        avgOutflow = pcr.ifthenelse(
            avgOutflow > 0.0,
            avgOutflow,
            pcr.max(avgChannelDischarge, self.avgInflow, 0.001),
        )  # This is needed when new lakes/reservoirs introduced (its avgOutflow is still zero).
        avgOutflow = pcr.areamaximum(avgOutflow, self.waterBodyIds)
        #
        bankfullWidth = pcr.cover(pcr.scalar(4.8) * ((avgOutflow) ** (0.5)), 0.0)
        weirWidthUsed = bankfullWidth
        weirWidthUsed = pcr.max(
            weirWidthUsed, self.minWeirWidth
        )  # TODO: minWeirWidth based on the GRanD database
        weirWidthUsed = pcr.cover(
            pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.0, weirWidthUsed), 0.0
        )

        # avgInflow <= lakeOutflow (weirFormula) <= waterBodyStorage
        lakeOutflowInM3PerSec = pcr.max(
            self.weirFormula(waterHeight, weirWidthUsed), self.avgInflow
        )  # unit: m3/s

        # estimate volume of water relased by lakes
        lakeOutflow = lakeOutflowInM3PerSec * length_of_time_step  # unit: m3
        lakeOutflow = pcr.min(self.waterBodyStorage, lakeOutflow)
        #
        lakeOutflow = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.0, lakeOutflow)
        lakeOutflow = pcr.ifthen(pcr.scalar(self.waterBodyTyp) == 1, lakeOutflow)

        # TODO: Consider endorheic lake/basin. No outflow for endorheic lake/basin!

        return lakeOutflow
예제 #5
0
def derive_HAND(dem, ldd, accuThreshold, rivers=None, basin=None, up_area=None, neg_HAND=None):
    """
    Function derives Height-Above-Nearest-Drain.
    See http://www.sciencedirect.com/science/article/pii/S003442570800120X
    Input:
        dem -- pcraster object float32, elevation data
        ldd -- pcraster object direction, local drain directions
        accuThreshold -- upstream amount of cells as threshold for river
            delineation
        rivers=None -- you can provide a rivers layer here. Pixels that are
                        identified as river should have a value > 0, other
                        pixels a value of zero.
        basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance
                        and areas with False by means of the nearest friction distance. Friction distance estimated using the
                        upstream area as weight (i.e. drains with a bigger upstream area have a lower friction)
                        the spreadzone operator is used in this case.
        up_area=None -- provide the upstream area (if not assigned a guesstimate is prepared, assuming the LDD covers a
                        full catchment area)
        neg_HAND=None -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than
                        stream (for example when there are natural embankments)
    Output:
        hand -- pcraster bject float32, height, normalised to nearest stream
        dist -- distance to nearest stream measured in cell lengths
            according to D8 directions
    """
    if rivers is None:
        # prepare stream from a strahler threshold
        stream = pcr.ifthenelse(pcr.accuflux(ldd, 1) >= accuThreshold,
                                pcr.boolean(1), pcr.boolean(0))
    else:
        # convert stream network to boolean
        stream = pcr.boolean(pcr.cover(rivers, 0))
    # determine height in river (in DEM*100 unit as ordinal)
    height_river = pcr.ifthenelse(stream, pcr.ordinal(dem*100), 0)
    if basin is None:
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
    else:
        # use basin to allocate areas outside basin to the nearest stream. Nearest is weighted by upstream area
        if up_area is None:
            up_area = pcr.accuflux(ldd, 1)
        up_area = pcr.ifthen(stream, up_area)  # mask areas outside streams
        friction = 1./pcr.scalar(pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0))
        # if basin, use nearest river within subcatchment, if outside basin, use weighted-nearest river
        up_elevation = pcr.ifthenelse(basin, pcr.scalar(pcr.subcatchment(ldd, height_river)), pcr.scalar(pcr.spreadzone(height_river, 0, friction)))
        # replace areas outside of basin by a spread zone calculation.
    # make negative HANDS also possible
    if neg_HAND == 1:
        hand = (pcr.scalar(pcr.ordinal(dem*100))-up_elevation)/100  # convert back to float in DEM units
    else:
        hand = pcr.max(pcr.scalar(pcr.ordinal(dem*100))-up_elevation, 0)/100  # convert back to float in DEM units
    dist = pcr.ldddist(ldd, stream, 1)  # compute horizontal distance estimate
    return hand, dist
예제 #6
0
def sum_list_cover(list_of_maps, covermap):
    """
    Sums a list of pcrastermap using cover to fill in missing values

    :param list_of_maps: list of maps to sum
    :param covermap: maps/ value to use fro cover

    :return: sum of list of maps (single map)
    """
    sum_ = pcr.cover(0.0)
    for map in list_of_maps:
        sum_ = sum_ + pcr.cover(map, covermap)

    return sum_
예제 #7
0
def classify(
    inmap, lower=[0, 10, 20, 30], upper=[10, 20, 30, 40], classes=[2, 2, 3, 4]
):
    """
    classify a scaler maps accroding to the boundaries given in classes.

    """

    result = pcr.ordinal(pcr.cover(-1))
    for l, u, c in zip(lower, upper, classes):
        result = pcr.cover(
            pcr.ifthen(inmap >= l, pcr.ifthen(inmap < u, pcr.ordinal(c))), result
        )

    return pcr.ifthen(result >= 0, result)
예제 #8
0
def pt_flow_in_river(ldd, river):
    """
    Returns all points (True) that flow into the mak river (boolean map with river set to True)

    :param ldd: Drainage network
    :param river: Map of river (True River, False non-river)
    :return ifmap: map with infrlo points into the river (True)
    :return ctach: catchment of each of the inflow points
    """

    dspts = pcr.downstream(ldd, pcr.cover(river, 0))
    dspts = pcr.ifthenelse(pcr.cover(river, 0) == 1, 0, dspts)

    catch = pcr.subcatchment(ldd, pcr.nominal(pcr.uniqueid(dspts)))

    return dspts, catch
예제 #9
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    print(v)
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            co = 'rm '+str(tmpDir)+'*.*'
            cOut,err = subprocess.Popen(co, stdout=subprocess.PIPE,stderr=open('/dev/null'),shell=True).communicate()
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
예제 #10
0
def subcatch_stream(ldd, stream, threshold):
    """
    Derive catchments based upon strahler threshold
    Input:
        ldd -- pcraster object direction, local drain directions
        stream -- pcraster object direction, streamorder
        threshold -- integer, strahler threshold, subcatchments ge threshold are
                 derived
    output:
        stream_ge -- pcraster object, streams of strahler order ge threshold
        subcatch -- pcraster object, subcatchments of strahler order ge threshold

    """
    # derive stream order

    # stream = pcr.streamorder(ldd)
    stream_ge = pcr.ifthen(stream >= threshold, stream)
    stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0)))
    # detect any transfer of strahler order, to a higher strahler order.
    transition_strahler = pcr.ifthenelse(pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1),
                                         pcr.ifthenelse(pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse(pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1),
                                                                                           pcr.boolean(0))))

    # make unique ids (write to file)
    transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler))

    # derive upstream catchment areas (write to file)
    subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique))
    return stream_ge, subcatch
예제 #11
0
    def update(
        self,
        newStorageAtLakeAndReservoirs,
        timestepsToAvgDischarge,
        maxTimestepsToAvgDischargeShort,
        maxTimestepsToAvgDischargeLong,
        currTimeStep,
        avgChannelDischarge,
        length_of_time_step=vos.secondsPerDay(),
        downstreamDemand=None,
    ):

        if self.debugWaterBalance:
            preStorage = self.waterBodyStorage  # unit: m

        self.timestepsToAvgDischarge = (
            timestepsToAvgDischarge
        )  # TODO: include this one in "currTimeStep"

        # obtain inflow (and update storage)
        self.moveFromChannelToWaterBody(
            newStorageAtLakeAndReservoirs,
            timestepsToAvgDischarge,
            maxTimestepsToAvgDischargeShort,
            length_of_time_step,
        )

        # calculate outflow (and update storage)
        self.getWaterBodyOutflow(
            maxTimestepsToAvgDischargeLong,
            avgChannelDischarge,
            length_of_time_step,
            downstreamDemand,
        )

        if self.debugWaterBalance:
            vos.waterBalanceCheck(
                [pcr.cover(self.inflow / self.waterBodyArea, 0.0)],
                [pcr.cover(self.waterBodyOutflow / self.waterBodyArea, 0.0)],
                [pcr.cover(preStorage / self.waterBodyArea, 0.0)],
                [pcr.cover(self.waterBodyStorage / self.waterBodyArea, 0.0)],
                "WaterBodyStorage (unit: m)",
                True,
                currTimeStep.fulldate,
                threshold=5e-3,
            )
예제 #12
0
def derive_HAND(dem, ldd, accuThreshold, rivers=None, basin=None):
    """
    Function derives Height-Above-Nearest-Drain.
    See http://www.sciencedirect.com/science/article/pii/S003442570800120X
    Input:
        dem -- pcraster object float32, elevation data
        ldd -- pcraster object direction, local drain directions
        accuThreshold -- upstream amount of cells as threshold for river
            delineation
        rivers=None -- you can provide a rivers layer here. Pixels that are
                        identified as river should have a value > 0, other
                        pixels a value of zero.
        basin=None -- set a boolean pcraster map where areas with True are estimated using the nearest drain in ldd distance
                        and areas with False by means of the nearest friction distance. Friction distance estimated using the
                        upstream area as weight (i.e. drains with a bigger upstream area have a lower friction)
                        the spreadzone operator is used in this case.
    Output:
        hand -- pcraster bject float32, height, normalised to nearest stream
        dist -- distance to nearest stream measured in cell lengths
            according to D8 directions
    """
    if rivers is None:
        stream = pcr.ifthenelse(
            pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0)
        )
    else:
        stream = pcr.boolean(pcr.cover(rivers, 0))

    height_river = pcr.ifthenelse(stream, pcr.ordinal(dem * 100), 0)
    if basin is None:
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
    else:
        drainage_surf = pcr.ifthen(rivers, pcr.accuflux(ldd, 1))
        weight = 1.0 / pcr.scalar(
            pcr.spreadzone(pcr.cover(pcr.ordinal(drainage_surf), 0), 0, 0)
        )
        up_elevation = pcr.ifthenelse(
            basin,
            pcr.scalar(pcr.subcatchment(ldd, height_river)),
            pcr.scalar(pcr.spreadzone(height_river, 0, weight)),
        )
        # replace areas outside of basin by a spread zone calculation.
    hand = pcr.max(pcr.scalar(pcr.ordinal(dem * 100)) - up_elevation, 0) / 100
    dist = pcr.ldddist(ldd, stream, 1)

    return hand, dist
예제 #13
0
def agriZone_Ep_Sa_cropG(self, k):
    """
    - Potential evaporation is decreased by energy used for interception evaporation    
    - Formula for evaporation based on LP
    - Outgoing fluxes are determined based on (value in previous timestep + inflow) 
    and if this leads to negative storage, the outgoing fluxes are corrected to rato --> Eu is 
    no longer taken into account for this correction
    - Qa u is determined from overflow from Sa
    - Fa is based on storage in Sa
    - Code for ini-file: 4
    """
    JarvisCoefficients.calcEp(self, k)
    self.PotEvaporation = pcr.cover(pcr.ifthenelse(self.EpHour >= 0, self.EpHour, 0), 0)

    self.samax2 = self.samax[k] * self.cropG
    self.Qaadd = pcr.max(self.Sa_t[k] - self.samax2, 0)

    self.Qa = pcr.max(self.Pe - (self.samax2 - self.Sa_t[k]), 0) + self.Qaadd
    self.Sa[k] = self.Sa_t[k] + (self.Pe - self.Qa)
    self.SaN = pcr.min(self.Sa[k] / self.samax2, 1)
    self.SuN = self.Su[k] / self.sumax[k]

    self.Ea1 = pcr.max((self.PotEvaporation - self.Ei), 0) * pcr.min(
        self.Sa[k] / (self.samax2 * self.LP[k]), 1
    )

    self.Fa1 = pcr.ifthenelse(
        self.SaN > 0,
        self.Fmin[k]
        + (self.Fmax[k] - self.Fmin[k]) * e ** (-self.decF[k] * (1 - self.SaN)),
        0,
    )
    self.Sa[k] = self.Sa_t[k] + (self.Pe - self.Qa) - self.Fa1 - self.Ea1

    self.Sa_diff = pcr.ifthenelse(self.Sa[k] < 0, self.Sa[k], 0)
    self.Fa = (
        self.Fa1
        + (self.Fa1 / pcr.ifthenelse(self.Fa1 + self.Ea1 > 0, self.Fa1 + self.Ea1, 1))
        * self.Sa_diff
    )
    self.Ea = (
        self.Ea1
        + (self.Ea1 / pcr.ifthenelse(self.Fa1 + self.Ea1 > 0, self.Fa1 + self.Ea1, 1))
        * self.Sa_diff
    )
    self.Sa[k] = self.Sa_t[k] + (self.Pe - self.Qa) - self.Ea - self.Fa
    self.Sa[k] = pcr.ifthenelse(self.Sa[k] < 0, 0, self.Sa[k])
    self.Sa_diff2 = pcr.ifthen(self.Sa[k] < 0, self.Sa[k])

    self.wbSa_[k] = self.Pe - self.Ea - self.Qa - self.Fa - self.Sa[k] + self.Sa_t[k]

    self.Ea_[k] = self.Ea
    self.Qa_[k] = self.Qa
    self.Fa_[k] = self.Fa
예제 #14
0
def glacierHBV(GlacierFrac, 
                GlacierStore, 
                Snow, 
                Temperature, 
                TT, 
                Cfmax, 
                G_SIfrac,
                timestepsecs,
                basetimestep):
    """
    Run Glacier module and add the snowpack on-top of it.
    First, a fraction of the snowpack is converted into ice using the HBV-light
    model (fraction between 0.001-0.005 per day).
    Glacier melting is modelled using a Temperature degree factor and only
    occurs if the snow cover < 10 mm.


    :ivar GlacierFrac: Fraction of wflow cell covered by glaciers
    :ivar GlacierStore: Volume of the galcier in the cell in mm w.e.
    :ivar Snow: Snow pack on top of Glacier
    :ivar Temperature: Air temperature
    :ivar TT: Temperature threshold for ice melting
    :ivar Cfmax: Ice degree-day factor in mm/(°C/day)
    :ivar G_SIfrac: Fraction of the snow part turned into ice each timestep
    :ivar timestepsecs: Model timestep in seconds
    :ivar basetimestep: Model base timestep (86 400 seconds)

    :returns: Snow,Snow2Glacier,GlacierStore,GlacierMelt,
    """
    
    #Fraction of the snow transformed into ice (HBV-light model)
    Snow2Glacier = G_SIfrac * Snow

    Snow2Glacier = pcr.ifthenelse(
        GlacierFrac > 0.0, Snow2Glacier, pcr.scalar(0.0)
    )
    # Max conversion to 8mm/day
    Snow2Glacier = (
        pcr.min(Snow2Glacier, 8.0) * timestepsecs / basetimestep
    )

    Snow = Snow - (Snow2Glacier * GlacierFrac)
    GlacierStore = GlacierStore + Snow2Glacier

    PotMelt = pcr.ifthenelse(
        Temperature > TT, Cfmax * (Temperature - TT), pcr.scalar(0.0)
    )  # Potential snow melt, based on temperature

    GlacierMelt = pcr.ifthenelse(
        Snow < 10.0, pcr.min(PotMelt, GlacierStore), pcr.cover(0.0)
    )  # actual Glacier melt
    GlacierStore = GlacierStore - GlacierMelt  # dry snow content

    return Snow, Snow2Glacier, GlacierStore, GlacierMelt
예제 #15
0
def rainfall_interception_gash(
    Cmax, EoverR, CanopyGapFraction, Precipitation, CanopyStorage, maxevap=9999
):
    """
    Interception according to the Gash model (For daily timesteps). 
    """
    # TODO:  add other rainfall interception method (lui)
    # TODO: Include subdaily Gash model
    # TODO: add LAI variation in year
    # Hack for stemflow

    pt = 0.1 * CanopyGapFraction

    P_sat = pcr.max(
        pcr.scalar(0.0),
        pcr.cover(
            (-Cmax / EoverR) * pcr.ln(1.0 - (EoverR / (1.0 - CanopyGapFraction - pt))),
            pcr.scalar(0.0),
        ),
    )

    # large storms P > P_sat
    largestorms = Precipitation > P_sat

    Iwet = pcr.ifthenelse(
        largestorms,
        ((1 - CanopyGapFraction - pt) * P_sat) - Cmax,
        Precipitation * (1 - CanopyGapFraction - pt),
    )
    Isat = pcr.ifthenelse(largestorms, (EoverR) * (Precipitation - P_sat), 0.0)
    Idry = pcr.ifthenelse(largestorms, Cmax, 0.0)
    Itrunc = 0

    StemFlow = pt * Precipitation

    ThroughFall = Precipitation - Iwet - Idry - Isat - Itrunc - StemFlow
    Interception = Iwet + Idry + Isat + Itrunc

    # Non corect for area without any Interception (say open water Cmax -- zero)
    CmaxZero = Cmax <= 0.0
    ThroughFall = pcr.ifthenelse(CmaxZero, Precipitation, ThroughFall)
    Interception = pcr.ifthenelse(CmaxZero, pcr.scalar(0.0), Interception)
    StemFlow = pcr.ifthenelse(CmaxZero, pcr.scalar(0.0), StemFlow)

    # Now corect for maximum potential evap
    OverEstimate = pcr.ifthenelse(
        Interception > maxevap, Interception - maxevap, pcr.scalar(0.0)
    )
    Interception = pcr.min(Interception, maxevap)
    # Add surpluss to the thoughdfall
    ThroughFall = ThroughFall + OverEstimate

    return ThroughFall, Interception, StemFlow, CanopyStorage
    def dynamic(self):
        
        # update model time using the current pcraster timestep value
        self.modelTime.update(self.currentTimeStep())

        # reading gross and netto values:
        if self.modelTime.isLastDayOfMonth():

            gross_value = vos.netcdf2PCRobjClone(ncFile  = self.input_netcdf['gross_file_name'],
                                                 varName = self.input_netcdf['gross_variable_name'],
                                                 dateInput = str(self.modelTime.fulldate))

            netto_value = vos.netcdf2PCRobjClone(ncFile  = self.input_netcdf['netto_file_name'],
                                                 varName = self.input_netcdf['netto_variable_name'],
                                                 dateInput = str(self.modelTime.fulldate))
            
            # covering with zero and convert the unit to 
            gross_value = pcr.cover(gross_value, 0.0)/self.cell_area                                     
            netto_value = pcr.cover(netto_value, 0.0)/self.cell_area                                     

        # reporting
        if self.modelTime.isLastDayOfMonth():

            # put the output in a dictionary
            output = {}
            output[self.output_netcdf['gross_variable_name']] = pcr.pcr2numpy(gross_value, vos.MV)
            output[self.output_netcdf['netto_variable_name']] = pcr.pcr2numpy(netto_value, vos.MV)
            
            # time stamp 
            timeStamp = datetime.datetime(self.modelTime.year,\
                                          self.modelTime.month,\
                                          self.modelTime.day,0)
            # to netcdf 
            self.output.dataList2NetCDF(self.output_netcdf['file_name'],\
                                       [self.output_netcdf['gross_variable_name'], self.output_netcdf['netto_variable_name']],\
                                        output,\
                                        timeStamp)

        # closing the file at the end of
        if self.modelTime.isLastTimeStep(): self.output.close(self.output_netcdf['file_name'])
예제 #17
0
    def getICs(self, initial_condition):

        avgInflow = initial_condition["avgLakeReservoirInflowShort"]
        avgOutflow = initial_condition["avgLakeReservoirOutflowLong"]
        #
        if not isinstance(initial_condition["waterBodyStorage"], type(None)):
            # read directly
            waterBodyStorage = initial_condition["waterBodyStorage"]
        else:
            # calculate waterBodyStorage at cells where lakes and/or reservoirs are defined
            #
            storageAtLakeAndReservoirs = pcr.cover(
                pcr.ifthen(
                    pcr.scalar(self.waterBodyIds) > 0.0,
                    initial_condition["channelStorage"],
                ),
                0.0,
            )
            #
            # - move only non negative values and use rounddown values
            storageAtLakeAndReservoirs = pcr.max(
                0.00, pcr.rounddown(storageAtLakeAndReservoirs)
            )
            #
            # lake and reservoir storages = waterBodyStorage (m3) ; values are given for the entire lake / reservoir cells
            waterBodyStorage = pcr.ifthen(
                pcr.scalar(self.waterBodyIds) > 0.0,
                pcr.areatotal(storageAtLakeAndReservoirs, self.waterBodyIds),
            )

        self.avgInflow = pcr.cover(avgInflow, 0.0)  # unit: m3/s
        self.avgOutflow = pcr.cover(avgOutflow, 0.0)  # unit: m3/s
        self.waterBodyStorage = pcr.cover(waterBodyStorage, 0.0)  # unit: m3

        self.avgInflow = pcr.ifthen(self.landmask, self.avgInflow)
        self.avgOutflow = pcr.ifthen(self.landmask, self.avgOutflow)
        self.waterBodyStorage = pcr.ifthen(self.landmask, self.waterBodyStorage)
예제 #18
0
def volume_spread(ldd, hand, subcatch, volume, volume_thres=0., area_multiplier=1., iterations=15):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unittrue")
    dem_min = pcr.areaminimum(hand, subcatch)  # minimum elevation in subcatchments
    # pcr.report(dem_min, 'dem_min.map')
    dem_norm = hand - dem_min
    # pcr.report(dem_norm, 'dem_norm.map')
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch)*area_multiplier
    pcr.report(surface, 'surface.map')

    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    # pcr.report(volume_catch, 'volume_catch.map')

    depth_catch = volume_catch/surface
    pcr.report(depth_catch, 'depth_catch.map')

    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        print('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max)/2
        # pcr.report(dem_av, 'dem_av00.{:03d}'.format(n + 1))
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0), subcatch)
        # pcr.report(average_depth_catch, 'depth_c0.{:03d}'.format(n + 1))
        error = pcr.cover((depth_catch-average_depth_catch)/depth_catch, depth_catch*0)
        # pcr.report(error, 'error000.{:03d}'.format(n + 1))
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    # error_abs = np.abs(error)  # TODO: not needed probably, remove
    inundation = pcr.max(dem_av - dem_norm, 0)
    return inundation
예제 #19
0
파일: pcrut.py 프로젝트: openstreams/wflow
def interpolategauges(inputmap, method):
    """"
    Interpolate time series gauge data onto a grid using different methods
    inputmap: map with points data for a single timestep
    method: string indicating the method
        inv
        pol
        
    input: inputmap, method
    returns: interpolated map
    """

    if method == "inv":
        result = pcr.inversedistance(1, inputmap, 3, 0, 0)
    elif method == "pol":
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)
    else:
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)

    return result
예제 #20
0
def stackAverage(path,Root,StackStart,StackEnd):
	#calculates the average from a stack of maps, missing maps are skipped
	#Initialization
	MV= pcr.scalar(-999)
	NCount= pcr.scalar(0)
	SumStack= pcr.scalar(0)
	for StackNumber in range(StackStart,StackEnd):
		try:
			InMap= pcr.readmap(generateNameT(os.path.join(path,Root),StackNumber))
		except:
			InMap= MV;
		InMap= pcr.cover(InMap,MV)
		SumStack= SumStack+InMap
		NCount= NCount+pcr.ifthenelse(InMap <> MV,pcr.scalar(1),pcr.scalar(0))
	AvgStack= pcr.ifthenelse(NCount>0,SumStack/NCount,MV)
	return AvgStack
예제 #21
0
def volume_spread(ldd, hand, subcatch, volume, volume_thres=0., cell_surface=1., iterations=15, logging=logging, order=0, neg_HAND=None):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
        neg_HAND -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than
        stream (for example when there are natural embankments)
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unitcell")
    dem_min = pcr.areaminimum(hand, subcatch)  # minimum elevation in subcatchments
    dem_norm = hand - dem_min
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch)*pcr.areaaverage(cell_surface, subcatch) # area_multiplier
    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    depth_catch = volume_catch/surface  # meters water disc averaged over subcatchment
    # ilt(depth_catch, 'depth_catch_{:02d}.map'.format(order))
    # pcr.report(volume, 'volume_{:02d}.map'.format(order))
    if neg_HAND == 1:
        dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(-32.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(-32.)
    else:
        dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(0.)
    for n in range(iterations):
        logging.debug('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max)/2
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0), subcatch)
        error = pcr.cover((depth_catch-average_depth_catch)/depth_catch, depth_catch*0)
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    inundation = pcr.max(dem_av - dem_norm, 0)
    pcr.setglobaloption('unittrue')
    return inundation
예제 #22
0
def riverlength(ldd, order):
    """
    Determines the length of a river using the ldd.
    only determined for order and higher.

    Input:
        - ldd, order (streamorder)

    Returns:
        - totallength,lengthpercell, streamorder
    """
    strorder = pcr.streamorder(ldd)
    strorder = pcr.ifthen(strorder >= pcr.ordinal(order), strorder)
    dist = pcr.max(
        pcr.celllength(), pcr.ifthen(pcr.boolean(strorder), pcr.downstreamdist(ldd))
    )

    return pcr.catchmenttotal(pcr.cover(dist, 0), ldd), dist, strorder
예제 #23
0
    def gettimestep(self, timestep, logging, var="P"):
        """
        Gets a map for a single timestep. reads data in blocks assuming sequential access

        timestep: framework timestep (1-based)
        logging: python logging object
        var: variable to get from the file
        """

        if var in self.dataset.variables:
            np_step = self.alldat[var][
                timestep - 1,
                self.latidx.min() : self.latidx.max() + 1,
                self.lonidx.min() : self.lonidx.max() + 1,
            ]
            miss = float(self.dataset.variables[var]._FillValue)
            return pcr.numpy2pcr(pcr.Scalar, np_step, miss), True
        else:
            logging.debug("Var (" + var + ") not found returning map with 0.0")
            return pcr.cover(pcr.scalar(0.0)), False
예제 #24
0
파일: pcrut.py 프로젝트: openstreams/wflow
def detRealCellLength(ZeroMap, sizeinmetres):
    """
    Determine cellength. Always returns the length
    in meters.
    """

    if sizeinmetres:
        reallength = pcr.celllength()
        xl = pcr.celllength()
        yl = pcr.celllength()
    else:
        aa = pcr.ycoordinate(pcr.boolean(pcr.cover(ZeroMap + 1, 1)))
        yl, xl = lattometres(aa)

        xl = xl * pcr.celllength()
        yl = yl * pcr.celllength()
        # Average length for surface area calculations.

        reallength = (xl + yl) * 0.5

    return xl, yl, reallength
예제 #25
0
    def moveFromChannelToWaterBody(
        self,
        newStorageAtLakeAndReservoirs,
        timestepsToAvgDischarge,
        maxTimestepsToAvgDischargeShort,
        length_of_time_step=vos.secondsPerDay(),
    ):

        # new lake and/or reservoir storages (m3)
        newStorageAtLakeAndReservoirs = pcr.cover(
            pcr.areatotal(newStorageAtLakeAndReservoirs, self.waterBodyIds), 0.0
        )

        # incoming volume (m3)
        self.inflow = newStorageAtLakeAndReservoirs - self.waterBodyStorage

        # inflowInM3PerSec (m3/s)
        inflowInM3PerSec = self.inflow / length_of_time_step

        # updating (short term) average inflow (m3/s) ;
        # - needed to constrain lake outflow:
        #
        temp = pcr.max(
            1.0,
            pcr.min(
                maxTimestepsToAvgDischargeShort,
                self.timestepsToAvgDischarge
                - 1.0
                + length_of_time_step / vos.secondsPerDay(),
            ),
        )
        deltaInflow = inflowInM3PerSec - self.avgInflow
        R = deltaInflow * (length_of_time_step / vos.secondsPerDay()) / temp
        self.avgInflow = self.avgInflow + R
        self.avgInflow = pcr.max(0.0, self.avgInflow)
        #
        # for the reference, see the "weighted incremental algorithm" in http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance

        # updating waterBodyStorage (m3)
        self.waterBodyStorage = newStorageAtLakeAndReservoirs
예제 #26
0
def readPCRmapClone(v,cloneMapFileName,tmpDir,absolutePath=None,isLddMap=False,cover=None,isNomMap=False,inputEPSG="EPSG:4326",outputEPSG="EPSG:4326",method="near"):
	# v: inputMapFileName or floating values
	# cloneMapFileName: If the inputMap and cloneMap have different clones,
	#                   resampling will be done.   
    logger.debug('read file/values: '+str(v))
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$",v):
        if absolutePath != None: v = getFullPath(v,absolutePath)
        # print(v)
        sameClone = isSameClone(v,cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir+'temp.map'
            # if no re-projection needed:
            if inputEPSG == outputEPSG or outputEPSG == None: 
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap)
            else:
                warp = gdalwarpPCR(v,output,cloneMapFileName,tmpDir,isLddMap,isNomMap,inputEPSG,outputEPSG,method)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True: PCRmap = pcr.ifthen(pcr.scalar(PCRmap) >  0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            if os.path.isdir(tmpDir):
                shutil.rmtree(tmpDir)
            os.makedirs(tmpDir)
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None; cOut = None; err = None; warp = None
    del co; del cOut; del err; del warp
    stdout = None; del stdout
    stderr = None; del stderr
    return PCRmap    
def inverse_gumbel(p_zero, loc, scale, return_period):
    """
    This function computes values for a given return period using the zero probability, location and shape
    parameters given. 
    """

    p = pcr.scalar(1. - 1./return_period)
    
    # p_residual is the probability density function of the population consisting of any values above zero
    p_residual = pcr.min(pcr.max((p - p_zero) / (1.0 - p_zero), 0.0), 1.0) 

    #~ # - alternative equation found on:  https://repos.deltares.nl/repos/Hydrology/trunk/GLOFRIS/src/rp_bias_corr.py (see the method inv_gumbel)
    #~ p_residual = np.minimum(np.maximum((p-p_zero)/(1-p_zero), 0), np.float64(1-1./1e9))  # I think this is the correct equation"""
    
    reduced_variate = -pcr.ln(-pcr.ln(p_residual))

    flvol = reduced_variate * scale + loc

    # infinite numbers can occur. reduce these to zero!
    # if any values become negative due to the statistical extrapolation, fix them to zero (may occur if the sample size for fitting was small and a small return period is requested)
    flvol = pcr.max(0.0, pcr.cover(flvol, 0.0))

    return flvol
예제 #28
0
    def gettimestep(self, timestep, logging, var="P", tsdatetime=None):
        """
        Gets a map for a single timestep. reads data in blocks assuming sequential access

        timestep: framework timestep (1-based)
        logging: python logging object
        var: variable to get from the file
        """
        ncindex = timestep - 1

        if var in self.dataset.variables:
            if tsdatetime != None:
                if tsdatetime.replace(tzinfo=None) != self.datetimelist[
                    ncindex
                ].replace(tzinfo=None):
                    logging.warning(
                        "Date/time of state ("
                        + var
                        + " in "
                        + self.fname
                        + ")does not match. Wanted "
                        + str(tsdatetime)
                        + " got "
                        + str(self.datetimelist[ncindex])
                    )

            np_step = self.dataset.variables[var][
                ncindex,
                self.latidx.min() : self.latidx.max() + 1,
                self.lonidx.min() : self.lonidx.max() + 1,
            ]

            miss = float(self.dataset.variables[var]._FillValue)
            return pcr.numpy2pcr(pcr.Scalar, np_step, miss), True
        else:
            # logging.debug("Var (" + var + ") not found returning map with 0.0")
            return pcr.cover(pcr.scalar(0.0)), False
예제 #29
0
def upscale_riverlength(ldd, order, factor):
    """
    Upscales the riverlength using 'factor'
    The resulting maps can be resampled (e.g. using resample.exe) by factor and should
    include the accurate length as determined with the original higher
    resolution maps.  This function is **depricated**,
    use are_riverlength instead as this version
    is very slow for large maps

    Input:
        - ldd
        - minimum streamorder to include

    Output:
        - distance per factor cells
    """

    strorder = pcr.streamorder(ldd)
    strorder = pcr.ifthen(strorder >= order, strorder)
    dist = pcr.cover(
        pcr.max(
            pcr.celllength(), pcr.ifthen(pcr.boolean(strorder), pcr.downstreamdist(ldd))
        ),
        0,
    )
    totdist = pcr.max(
        pcr.ifthen(
            pcr.boolean(strorder),
            pcr.windowtotal(
                pcr.ifthen(pcr.boolean(strorder), dist), pcr.celllength() * factor
            ),
        ),
        dist,
    )

    return totdist
예제 #30
0
def simplereservoir(
    storage,
    inflow,
    ResArea,
    maxstorage,
    target_perc_full,
    maximum_Q,
    demand,
    minimum_full_perc,
    ReserVoirLocs,
    precip,
    pet,
    ReservoirSimpleAreas,
    timestepsecs=86400,
):
    """

    :param storage: initial storage m^3
    :param inflow: inflow m^3/s
    :param maxstorage: maximum storage (above which water is spilled) m^3
    :param target_perc_full: target fraction full (of max storage) -
    :param maximum_Q: maximum Q to release m^3/s if below spillway
    :param demand: water demand (all combined) m^3/s
    :param minimum_full_perc: target minimum full fraction (of max storage) -
    :param ReserVoirLocs: map with reservoir locations
    :param timestepsecs: timestep of the model in seconds (default = 86400)
    :return: storage (m^3), outflow (m^3/s), PercentageFull (0-1), Release (m^3/sec)
    """

    inflow = pcr.ifthen(pcr.boolean(ReserVoirLocs), inflow)

    prec_av = pcr.cover(
        pcr.ifthen(pcr.boolean(ReserVoirLocs),
                   pcr.areaaverage(precip, ReservoirSimpleAreas)),
        pcr.scalar(0.0),
    )
    pet_av = pcr.cover(
        pcr.ifthen(pcr.boolean(ReserVoirLocs),
                   pcr.areaaverage(pet, ReservoirSimpleAreas)),
        pcr.scalar(0.0),
    )

    _outflow = 0
    _demandRelease = 0

    nr_loop = np.max([int(timestepsecs / 21600), 1])
    for n in range(0, nr_loop):

        fl_nr_loop = float(nr_loop)

        storage = (storage + (inflow * timestepsecs / fl_nr_loop) +
                   (prec_av / fl_nr_loop / 1000.0) * ResArea -
                   (pet_av / fl_nr_loop / 1000.0) * ResArea)

        percfull = storage / maxstorage
        # first determine minimum (environmental) flow using a simple sigmoid curve to scale for target level
        fac = sCurve(percfull, a=minimum_full_perc, c=30.0)
        demandRelease = pcr.min(fac * demand * timestepsecs / fl_nr_loop,
                                storage)
        storage = storage - demandRelease

        wantrel = pcr.max(0.0, storage - (maxstorage * target_perc_full))
        # Assume extra maximum Q if spilling
        overflowQ = pcr.max((storage - maxstorage), 0.0)
        torelease = pcr.min(
            wantrel,
            overflowQ + maximum_Q * timestepsecs / fl_nr_loop - demandRelease)
        storage = storage - torelease
        outflow = torelease + demandRelease
        percfull = storage / maxstorage

        _outflow = _outflow + outflow
        _demandRelease = _demandRelease + demandRelease

    return storage, _outflow / timestepsecs, percfull, prec_av, pet_av, _demandRelease / timestepsecs
예제 #31
0
def main():
    """
        
    :ivar masterdem: digital elevation model
    :ivar dem: digital elevation model
    :ivar river: optional river map
    """

    # Default values
    strRiver = 8
    masterdem = "dem.map"
    step1dir = "step1"
    step2dir = "step2"
    workdir = "."
    inifile = "wflow_prepare.ini"
    recreate = False
    snapgaugestoriver = False

    try:
        opts, args = getopt.getopt(sys.argv[1:], "W:hI:f",['version'])
    except getopt.error as msg:
        usage(msg)

    for o, a in opts:
        if o == "-W":
            workdir = a
        if o == "-I":
            inifile = a
        if o == "-h":
            usage()
        if o == "-f":
            recreate = True
        if o == "--version":
            import wflow
            print("wflow version: ", wflow.__version__)
            sys.exit(0)

    pcr.setglobaloption("unitcell")
    os.chdir(workdir)

    config = OpenConf(workdir + "/" + inifile)

    masterdem = configget(config, "files", "masterdem", "dem.map")
    pcr.setclone(masterdem)

    strRiver = int(configget(config, "settings", "riverorder", "4"))

    try:
        gauges_x = config.get("settings", "gauges_x")
        gauges_y = config.get("settings", "gauges_y")
    except:
        print("gauges_x and  gauges_y are required entries in the ini file")
        sys.exit(1)

    step1dir = configget(config, "directories", "step1dir", "step1")
    step2dir = configget(config, "directories", "step2dir", "step2")
    # upscalefactor = float(config.get("settings","upscalefactor"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35")
    )
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(configget(config, "settings", "lddoutflowdepth", "1E35"))

    initialscale = int(configget(config, "settings", "initialscale", "1"))
    csize = float(configget(config, "settings", "cellsize", "1"))

    snapgaugestoriver = bool(
        int(configget(config, "settings", "snapgaugestoriver", "1"))
    )
    lddglobaloption = configget(config, "settings", "lddglobaloption", "lddout")
    pcr.setglobaloption(lddglobaloption)
    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    # X/Y coordinates of the gauges the system    
    X = np.fromstring(gauges_x, sep=',')
    Y = np.fromstring(gauges_y, sep=',')

    tr.Verbose = 1

    # make the directories to save results in
    if not os.path.isdir(step1dir + "/"):
        os.makedirs(step1dir)
    if not os.path.isdir(step2dir):
        os.makedirs(step2dir)

    if initialscale > 1:
        print("Initial scaling of DEM...")
        os.system(
            "resample -r "
            + str(initialscale)
            + " "
            + masterdem
            + " "
            + step1dir
            + "/dem_scaled.map"
        )
        print("Reading dem...")
        dem = pcr.readmap(step1dir + "/dem_scaled.map")
        ldddem = dem
    else:
        print ("Reading dem...")
        dem = pcr.readmap(masterdem)
        ldddem = dem

    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask...")
    else:
        print("clipping DEM with mask.....")
        mask = pcr.readmap(catchmask)
        ldddem = pcr.ifthen(pcr.boolean(mask), ldddem)
        dem = pcr.ifthen(pcr.boolean(mask), dem)

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        outletpointX = float(configget(config, "settings", "outflowpointX", "0.0"))
        outletpointY = float(configget(config, "settings", "outflowpointY", "0.0"))
    else:
        print("river file specified.....")
        try:
            outletpointX = float(configget(config, "settings", "outflowpointX", "0.0"))
            outletpointY = float(configget(config, "settings", "outflowpointY", "0.0"))
        except:
            print(
                "Need to specify the river outletpoint (a point at the end of the river within the current map)"
            )
            exit(1)

        outletpointmap = tr.points_to_map(dem, outletpointX, outletpointY, 0.5)
        pcr.report(outletpointmap, step1dir + "/outletpoint.map")
        # rivshpattr = config.get("files","riverattr")
        pcr.report(dem * 0.0, step1dir + "/nilmap.map")
        thestr = (
            "gdal_translate -of GTiff "
            + step1dir
            + "/nilmap.map "
            + step1dir
            + "/riverburn.tif"
        )
        os.system(thestr)
        rivshpattr = os.path.splitext(os.path.basename(rivshp))[0]
        os.system(
            "gdal_rasterize -burn 1 -l "
            + rivshpattr
            + " "
            + rivshp
            + " "
            + step1dir
            + "/riverburn.tif"
        )
        thestr = (
            "gdal_translate -of PCRaster "
            + step1dir
            + "/riverburn.tif "
            + step1dir
            + "/riverburn.map"
        )
        os.system(thestr)
        riverburn = pcr.readmap(step1dir + "/riverburn.map")
        # Determine regional slope assuming that is the way the river should run
        # Determine regional slope assuming that is the way the river should run
        # pcr.setglobaloption("unitcell")
        # demregional=pcr.windowaverage(dem,100)
        ldddem = pcr.ifthenelse(riverburn >= 1.0, dem - 1000, dem)

    pcr.setglobaloption("unittrue")
    upscalefactor = int(csize / pcr.celllength())

    print("Creating ldd...")
    ldd = tr.lddcreate_save(
        step1dir + "/ldd.map",
        ldddem,
        recreate,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    print("Determining streamorder...")
    stro = pcr.streamorder(ldd)
    pcr.report(stro, step1dir + "/streamorder.map")
    strdir = pcr.ifthen(stro >= strRiver, stro)
    pcr.report(strdir, step1dir + "/streamorderrive.map")
    pcr.report(pcr.boolean(pcr.ifthen(stro >= strRiver, stro)), step1dir + "/rivers.map")

    pcr.setglobaloption("unittrue")
    # outlet (and other gauges if given)
    # TODO: check is x/y set if not skip this
    print("Outlet...")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)

    if snapgaugestoriver:
        print("Snapping gauges to nearest river cells...")
        pcr.report(outlmap, step1dir + "/orggauges.map")
        outlmap = tr.snaptomap(outlmap, strdir)

    # noutletmap = tr.points_to_map(dem,XX,YY,0.5)
    # pcr.report(noutletmap,'noutlet.map')

    pcr.report(outlmap, step1dir + "/gauges.map")

    # check if there is a pre-define catchment map
    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask, finding outlet")
        # Find catchment (overall)
        outlet = tr.find_outlet(ldd)
        sub = tr.subcatch(ldd, outlet)
        pcr.report(sub, step1dir + "/catchment_overall.map")
    else:
        print("reading and converting catchment mask.....")
        os.system(
            "resample -r "
            + str(initialscale)
            + " "
            + catchmask
            + " "
            + step1dir
            + "/catchment_overall.map"
        )
        sub = pcr.readmap(step1dir + "/catchment_overall.map")

    print("Scatch...")
    sd = tr.subcatch(ldd, pcr.ifthen(outlmap > 0, outlmap))
    pcr.report(sd, step1dir + "/scatch.map")

    pcr.setglobaloption("unitcell")
    print("Upscalefactor: " + str(upscalefactor))

    if upscalefactor > 1:
        gc.collect()
        print("upscale river length1 (checkerboard map)...")
        ck = tr.checkerboard(dem, upscalefactor)
        pcr.report(ck, step1dir + "/ck.map")
        pcr.report(dem, step1dir + "/demck.map")
        print("upscale river length2...")
        fact = tr.area_riverlength_factor(ldd, ck, upscalefactor)
        pcr.report(fact, step1dir + "/riverlength_fact.map")

        # print("make dem statistics...")
        dem_ = pcr.areaaverage(dem, ck)
        pcr.report(dem_, step1dir + "/demavg.map")

        print("Create DEM statistics...")
        dem_ = pcr.areaminimum(dem, ck)
        pcr.report(dem_, step1dir + "/demmin.map")
        dem_ = pcr.areamaximum(dem, ck)
        pcr.report(dem_, step1dir + "/demmax.map")
        # calculate percentiles
        order = pcr.areaorder(dem, ck)
        n = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), ck)
        #: calculate 25 percentile
        perc = tr.area_percentile(dem, ck, n, order, 25.0)
        pcr.report(perc, step1dir + "/dem25.map")
        perc = tr.area_percentile(dem, ck, n, order, 10.0)
        pcr.report(perc, step1dir + "/dem10.map")
        perc = tr.area_percentile(dem, ck, n, order, 50.0)
        pcr.report(perc, step1dir + "/dem50.map")
        perc = tr.area_percentile(dem, ck, n, order, 33.0)
        pcr.report(perc, step1dir + "/dem33.map")
        perc = tr.area_percentile(dem, ck, n, order, 66.0)
        pcr.report(perc, step1dir + "/dem66.map")
        perc = tr.area_percentile(dem, ck, n, order, 75.0)
        pcr.report(perc, step1dir + "/dem75.map")
        perc = tr.area_percentile(dem, ck, n, order, 90.0)
        pcr.report(perc, step1dir + "/dem90.map")
    else:
        print("No fancy scaling done. Going strait to step2....")
        pcr.report(dem, step1dir + "/demavg.map")
        Xul = float(config.get("settings", "Xul"))
        Yul = float(config.get("settings", "Yul"))
        Xlr = float(config.get("settings", "Xlr"))
        Ylr = float(config.get("settings", "Ylr"))
        gdalstr = (
            "gdal_translate  -projwin "
            + str(Xul)
            + " "
            + str(Yul)
            + " "
            + str(Xlr)
            + " "
            + str(Ylr)
            + " -of PCRaster  "
        )
        # gdalstr = "gdal_translate  -a_ullr " + str(Xul) + " " + str(Yul) + " " +str(Xlr) + " " +str(Ylr) + " -of PCRaster  "
        print(gdalstr)
        pcr.report(pcr.cover(1.0), step1dir + "/wflow_riverlength_fact.map")
        # Now us gdat tp convert the maps
        os.system(
            gdalstr
            + step1dir
            + "/wflow_riverlength_fact.map"
            + " "
            + step2dir
            + "/wflow_riverlength_fact.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_dem.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmin.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmax.map"
        )
        os.system(
            gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_gauges.map"
        )
        os.system(
            gdalstr + step1dir + "/rivers.map" + " " + step2dir + "/wflow_river.map"
        )
        os.system(
            gdalstr
            + step1dir
            + "/streamorder.map"
            + " "
            + step2dir
            + "/wflow_streamorder.map"
        )
        os.system(
            gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_outlet.map"
        )
        os.system(
            gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_catchment.map"
        )
        os.system(gdalstr + step1dir + "/ldd.map" + " " + step2dir + "/wflow_ldd.map")
        os.system(
            gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_subcatch.map"
        )

        if lu_water:
            os.system(gdalstr + lu_water + " " + step2dir + "/WaterFrac.map")

        if lu_paved:
            os.system(gdalstr + lu_paved + " " + step2dir + "/PathFrac.map")

        try:
            lumap = config.get("files", "landuse")
        except:
            print("no landuse map...creating uniform map")
            # clone=pcr.readmap(step2dir + "/wflow_dem.map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_landuse.map")
        else:
            os.system(
                "resample --clone "
                + step2dir
                + "/wflow_dem.map "
                + lumap
                + " "
                + step2dir
                + "/wflow_landuse.map"
            )

        try:
            soilmap = config.get("files", "soil")
        except:
            print("no soil map..., creating uniform map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_soil.map")
        else:
            os.system(
                "resample --clone "
                + step2dir
                + "/wflow_dem.map "
                + soilmap
                + " "
                + step2dir
                + "/wflow_soil.map"
            )
    def adusting_parameters(self, configuration, system_argument):

        # global pre-multipliers given in the argument:
        if len(system_argument) > 4:

            logger.info(
                "Adjusting some model parameters based on given values in the system argument."
            )

            # pre-multipliers for minSoilDepthFrac, kSat, recessionCoeff, storCap and degreeDayFactor
            multiplier_for_minSoilDepthFrac = float(
                system_argument[4]
            )  # linear scale                                        # Note that this one does NOT work for the changing WMIN or Joyce land cover options.
            multiplier_for_kSat = float(system_argument[5])  # log scale
            multiplier_for_recessionCoeff = float(
                system_argument[6])  # log scale
            multiplier_for_storCap = float(system_argument[7])  # linear scale
            multiplier_for_degreeDayFactor = float(
                system_argument[8])  # linear scale

            # pre-multiplier for the reference potential ET
            self.multiplier_for_refPotET = float(
                system_argument[9])  # linear scale

        # it is also possible to define prefactors via the ini/configuration file:
        # - this will be overwrite any previous given pre-multipliers
        if 'prefactorOptions' in configuration.allSections:

            logger.info(
                "Adjusting some model parameters based on given values in the ini/configuration file."
            )

            self.multiplier_for_refPotET = float(
                configuration.
                prefactorOptions['linear_multiplier_for_refPotET']
            )  # linear scale  # Note that this one does NOT work for the changing WMIN or Joyce land cover options.
            multiplier_for_degreeDayFactor = float(
                configuration.prefactorOptions[
                    'linear_multiplier_for_degreeDayFactor'])  # linear scale
            multiplier_for_minSoilDepthFrac = float(
                configuration.prefactorOptions[
                    'linear_multiplier_for_minSoilDepthFrac'])  # linear scale
            multiplier_for_kSat = float(
                configuration.prefactorOptions['log_10_multiplier_for_kSat']
            )  # log scale
            multiplier_for_storCap = float(
                configuration.prefactorOptions['linear_multiplier_for_storCap']
            )  # linear scale
            multiplier_for_recessionCoeff = float(
                configuration.prefactorOptions[
                    'log_10_multiplier_for_recessionCoeff'])  # log scale

        # saving global pre-multipliers to the log file:
        msg = "\n"
        msg += "\n"
        msg += "Multiplier values used: " + "\n"
        msg += "For minSoilDepthFrac           : " + str(
            multiplier_for_minSoilDepthFrac) + "\n"
        msg += "For kSat (log-scale)           : " + str(
            multiplier_for_kSat) + "\n"
        msg += "For recessionCoeff (log-scale) : " + str(
            multiplier_for_recessionCoeff) + "\n"
        msg += "For storCap                    : " + str(
            multiplier_for_storCap) + "\n"
        msg += "For degreeDayFactor            : " + str(
            multiplier_for_degreeDayFactor) + "\n"
        msg += "For refPotET                   : " + str(
            self.multiplier_for_refPotET) + "\n"
        logger.info(msg)
        # - also to a txt file
        f = open(
            "multiplier.txt", "w"
        )  # this will be stored in the "map" folder of the 'outputDir' (as we set the current working directory to this "map" folder, see configuration.py)
        f.write(msg)
        f.close()

        # set parameter "recessionCoeff" based on the given pre-multiplier
        # - also saving the adjusted parameter maps to pcraster files
        # - these will be stored in the "map" folder of the 'outputDir' (as we set the current working directory to this "map" folder, see configuration.py)
        # "recessionCoeff"
        # minimum value is zero and using log-scale
        self.model.groundwater.recessionCoeff = pcr.max(
            0.0, (10**(multiplier_for_recessionCoeff)) *
            self.model.groundwater.recessionCoeff)
        self.model.groundwater.recessionCoeff = pcr.min(
            1.0, self.model.groundwater.recessionCoeff)
        # report the map
        pcr.report(self.model.groundwater.recessionCoeff, "recessionCoeff.map")

        # set parameters "kSat", "storCap", "minSoilDepthFrac", and "degreeDayFactor" based on the given pre-multipliers
        for coverType in self.model.landSurface.coverTypes:

            # "degreeDayFactor"
            self.model.landSurface.landCoverObj[coverType].degreeDayFactor  = pcr.max(0.0, multiplier_for_degreeDayFactor  *\
                                                           self.model.landSurface.landCoverObj[coverType].degreeDayFactor)
            # report the map
            pcraster_filename = "degreeDayFactor" + "_" + coverType + ".map"
            pcr.report(
                self.model.landSurface.landCoverObj[coverType].degreeDayFactor,
                pcraster_filename)

            # "kSat" and "storCap" for 2 layer model
            if self.model.landSurface.numberOfSoilLayers == 2:

                # "kSat"
                # minimum value is zero and using-log-scale
                self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp = \
                       pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp)
                self.model.landSurface.landCoverObj[coverType].parameters.kSatLow = \
                       pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatLow)
                # report the maps
                pcraster_filename = "kSatUpp" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    kSatUpp, pcraster_filename)
                pcraster_filename = "kSatLow" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    kSatLow, pcraster_filename)

                # "storCap"
                # minimum value is zero
                self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp = pcr.max(0.0, multiplier_for_storCap*\
                                                                                                    self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp)
                self.model.landSurface.landCoverObj[coverType].parameters.storCapLow = pcr.max(0.0, multiplier_for_storCap*\
                                                                                                    self.model.landSurface.landCoverObj[coverType].parameters.storCapLow)
                # report the maps
                pcraster_filename = "storCapUpp" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    storCapUpp, pcraster_filename)
                pcraster_filename = "storCapLow" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    storCapLow, pcraster_filename)

            # "kSat" and "storCap" for 3 layer model
            if self.model.landSurface.numberOfSoilLayers == 3:

                # "kSat"
                # minimum value is zero and using-log-scale
                self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp000005 = \
                       pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp000005)
                self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp005030 = \
                       pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatUpp005030)
                self.model.landSurface.landCoverObj[coverType].parameters.kSatLow030150 = \
                       pcr.max(0.0, (10**(multiplier_for_kSat)) * self.model.landSurface.landCoverObj[coverType].parameters.kSatLow030150)
                # report the maps
                pcraster_filename = "kSatUpp000005" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    kSatUpp000005, pcraster_filename)
                pcraster_filename = "kSatUpp005030" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    kSatUpp005030, pcraster_filename)
                pcraster_filename = "kSatLow030150" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    kSatLow030150, pcraster_filename)

                # "storCap"
                # minimum value is zero
                self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005 = pcr.max(0.0, multiplier_for_storCap*\
                                                                                                          self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005)
                self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030 = pcr.max(0.0, multiplier_for_storCap*\
                                                                                                          self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030)
                self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150 = pcr.max(0.0, multiplier_for_storCap*\
                                                                                                          self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150)
                # report the maps
                pcraster_filename = "storCapUpp000005" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    storCapUpp000005, pcraster_filename)
                pcraster_filename = "storCapUpp005030" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    storCapUpp005030, pcraster_filename)
                pcraster_filename = "storCapLow030150" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].parameters.
                    storCapLow030150, pcraster_filename)

# re-calculate rootZoneWaterStorageCap as the consequence of the modification of "storCap"
# This is WMAX in the oldcalc script.
            if self.model.landSurface.numberOfSoilLayers == 2:
                self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap = self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp +\
                                                                                                    self.model.landSurface.landCoverObj[coverType].parameters.storCapLow
            if self.model.landSurface.numberOfSoilLayers == 3:
                self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap = self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp000005 +\
                                                                                                    self.model.landSurface.landCoverObj[coverType].parameters.storCapUpp005030 +\
                         self.model.landSurface.landCoverObj[coverType].parameters.storCapLow030150

# report the map
            pcraster_filename = "rootZoneWaterStorageCap" + "_" + coverType + ".map"
            pcr.report(
                self.model.landSurface.landCoverObj[coverType].parameters.
                rootZoneWaterStorageCap, pcraster_filename)

            # "minSoilDepthFrac"
            if multiplier_for_minSoilDepthFrac != 1.0:

                # minimum value is zero
                self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac = pcr.max(0.0, multiplier_for_minSoilDepthFrac*\
                                                               self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac)
                # for minSoilDepthFrac - values will be limited by maxSoilDepthFrac
                self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac = pcr.min(\
                                                               self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac,\
                                                               self.model.landSurface.landCoverObj[coverType].maxSoilDepthFrac)
                # maximum value is 1.0
                self.model.landSurface.landCoverObj[
                    coverType].minSoilDepthFrac = pcr.min(
                        1.0, self.model.landSurface.landCoverObj[coverType].
                        minSoilDepthFrac)
                # report the map
                pcraster_filename = "minSoilDepthFrac" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].
                    minSoilDepthFrac, pcraster_filename)

                # re-calculate arnoBeta (as the consequence of the modification of minSoilDepthFrac)
                self.model.landSurface.landCoverObj[coverType].arnoBeta = pcr.max(0.001,\
                     (self.model.landSurface.landCoverObj[coverType].maxSoilDepthFrac-1.)/(1.-self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac)+\
                                               self.model.landSurface.landCoverObj[coverType].parameters.orographyBeta-0.01)
                self.model.landSurface.landCoverObj[coverType].arnoBeta = pcr.cover(pcr.max(0.001,\
                      self.model.landSurface.landCoverObj[coverType].arnoBeta), 0.001)
                # report the map
                pcraster_filename = "arnoBeta" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].arnoBeta,
                    pcraster_filename)

                # re-calculate rootZoneWaterStorageMin (as the consequence of the modification of minSoilDepthFrac)
                # This is WMIN in the oldcalc script.
                # WMIN (unit: m): minimum local soil water capacity within the grid-cell
                self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageMin = self.model.landSurface.landCoverObj[coverType].minSoilDepthFrac *\
                                                                                         self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap
                # report the map
                pcraster_filename = "rootZoneWaterStorageMin" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].
                    rootZoneWaterStorageMin, pcraster_filename)

                # re-calculate rootZoneWaterStorageRange (as the consequence of the modification of rootZoneWaterStorageRange and minSoilDepthFrac)
                # WMAX - WMIN (unit: m)
                self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageRange = self.model.landSurface.landCoverObj[coverType].parameters.rootZoneWaterStorageCap -\
                                                                                           self.model.landSurface.landCoverObj[coverType].rootZoneWaterStorageMin
                # report the map
                pcraster_filename = "rootZoneWaterStorageRange" + "_" + coverType + ".map"
                pcr.report(
                    self.model.landSurface.landCoverObj[coverType].
                    rootZoneWaterStorageRange, pcraster_filename)
예제 #33
0
def naturalLake(
    waterlevel,
    LakeLocs,
    LinkedLakeLocs,
    LakeArea,
    LakeThreshold,
    LakeStorFunc,
    LakeOutflowFunc,
    sh,
    hq,
    lake_b,
    lake_e,
    inflow,
    precip,
    pet,
    LakeAreasMap,
    JDOY,
    timestepsecs=86400,
):
    """
    Run Natural Lake module to compute the new waterlevel and outflow.
    Solves lake water balance with linearisation and iteration procedure,
    for any rating and storage curve.
    For the case where storage curve is S = AH and Q=b(H-Ho)^2, uses the direct
    solution from the Modified Puls Approach (LISFLOOD).


    :ivar waterlevel: water level H in the lake
    :ivar LakeLocs: location of lake's outlet
    :ivar LinkedLakeLocs: ID of linked lakes
    :ivar LakeArea: total lake area
    :ivar LakeThreshold: water level threshold Ho under which outflow is zero
    :ivar LakeStorFunc: type of lake storage curve
                        1: S = AH
                        2: S = f(H) from lake data and interpolation
    :ivar LakeOutflowFunc: type of lake rating curve
                           1: Q = f(H) from lake data and interpolation
                           2: General Q = b(H - Ho)^e
                           3: Case of Puls Approach Q = b(H - Ho)^2
    :ivar sh: data for storage curve
    :ivar hq: data for rating curve
    :ivar lake_b: rating curve coefficient
    :ivar lake_e: rating curve exponent
    :ivar inflow: inflow to the lake (surface runoff + river discharge + seepage)
    :ivar precip: precipitation map
    :ivar pet: PET map
    :ivar LakeAreasMap: lake extent map (for filtering P and PET)
    :ivar JDOY: Julian Day of Year to read storage/rating curve from data
    :ivar timestepsecs: model timestep in seconds

    :returns: waterlevel, outflow, prec_av, pet_av, storage
    """

    mv = -999.0
    LakeZeros = LakeArea * 0.0

    waterlevel_start = waterlevel

    inflow = pcr.ifthen(pcr.boolean(LakeLocs), inflow)

    prec_av = pcr.ifthen(pcr.boolean(LakeLocs),
                         pcr.areaaverage(precip, LakeAreasMap))
    pet_av = pcr.ifthen(pcr.boolean(LakeLocs),
                        pcr.areaaverage(pet, LakeAreasMap))

    ### Modified Puls Approach (Burek et al., 2013, LISFLOOD) ###
    #ResOutflowFunc = 3

    #Calculate lake factor and SI parameter
    LakeFactor = pcr.ifthenelse(LakeOutflowFunc == 3,
                                LakeArea / (timestepsecs * (lake_b)**0.5), mv)

    storage_start = pcr.ifthenelse(
        LakeStorFunc == 1,
        LakeArea * waterlevel_start,
        lookupResFunc(LakeLocs, waterlevel_start, sh, "0-1"),
    )

    SIFactor = pcr.ifthenelse(
        LakeOutflowFunc == 3,
        ((storage_start +
          (prec_av - pet_av) * LakeArea / 1000.0) / timestepsecs + inflow), mv)
    #Adjust SIFactor for ResThreshold != 0
    SIFactorAdj = SIFactor - LakeArea * LakeThreshold / timestepsecs

    #Calculate the new lake outflow/waterlevel/storage
    outflow = pcr.ifthenelse(
        LakeOutflowFunc == 3,
        pcr.ifthenelse(SIFactorAdj > 0.0,
                       (-LakeFactor +
                        (LakeFactor**2 + 2 * SIFactorAdj)**0.5)**2, 0.0),
        LakeZeros)
    storage = pcr.ifthenelse(LakeOutflowFunc == 3,
                             (SIFactor - outflow) * timestepsecs, LakeZeros)
    waterlevel = pcr.ifthenelse(LakeOutflowFunc == 3, storage / LakeArea,
                                LakeZeros)

    ### Linearisation and iteration for specific storage/rating curves ###
    np_lakeoutflowfunc = pcr.pcr2numpy(LakeOutflowFunc, 0.0)
    if ((bool(np.isin(1, np.unique(np_lakeoutflowfunc))))
            or (bool(np.isin(2, np.unique(np_lakeoutflowfunc))))):

        np_lakelocs = pcr.pcr2numpy(LakeLocs, 0.0)
        np_linkedlakelocs = pcr.pcr2numpy(LinkedLakeLocs, 0.0)
        waterlevel_loop = waterlevel_start

        _outflow = []
        nr_loop = np.max([int(timestepsecs / 21600), 1])
        for n in range(0, nr_loop):
            np_waterlevel = pcr.pcr2numpy(waterlevel_loop, np.nan)
            np_waterlevel_lower = np_waterlevel.copy()

            for val in np.unique(np_linkedlakelocs):
                if val > 0:
                    np_waterlevel_lower[np_linkedlakelocs ==
                                        val] = np_waterlevel[np.where(
                                            np_lakelocs == val)]

            diff_wl = np_waterlevel - np_waterlevel_lower
            diff_wl[np.isnan(diff_wl)] = mv
            np_waterlevel_lower[np.isnan(np_waterlevel_lower)] = mv

            pcr_diff_wl = pcr.numpy2pcr(pcr.Scalar, diff_wl, mv)
            pcr_wl_lower = pcr.numpy2pcr(pcr.Scalar, np_waterlevel_lower, mv)

            storage_start_loop = pcr.ifthenelse(
                LakeStorFunc == 1,
                LakeArea * waterlevel_loop,
                lookupResFunc(LakeLocs, waterlevel_loop, sh, "0-1"),
            )

            outflow_loop = pcr.ifthenelse(
                LakeOutflowFunc == 1,
                lookupResRegMatr(LakeLocs, waterlevel_loop, hq, JDOY),
                pcr.ifthenelse(
                    pcr_diff_wl >= 0,
                    pcr.max(lake_b * (waterlevel_loop - LakeThreshold)**lake_e,
                            0),
                    pcr.min(
                        -1 * lake_b * (pcr_wl_lower - LakeThreshold)**lake_e,
                        0),
                ),
            )

            np_outflow = pcr.pcr2numpy(outflow_loop, np.nan)
            np_outflow_linked = np_lakelocs * 0.0

            with np.errstate(invalid="ignore"):
                if np_outflow[np_outflow < 0] is not None:
                    np_outflow_linked[np.in1d(
                        np_lakelocs,
                        np_linkedlakelocs[np_outflow < 0]).reshape(
                            np_linkedlakelocs.shape)] = np_outflow[
                                np_outflow < 0]

            outflow_linked = pcr.numpy2pcr(pcr.Scalar, np_outflow_linked, 0.0)

            fl_nr_loop = float(nr_loop)
            storage_loop = (
                storage_start_loop + (inflow * timestepsecs / fl_nr_loop) +
                (prec_av / fl_nr_loop / 1000.0) * LakeArea -
                (pet_av / fl_nr_loop / 1000.0) * LakeArea -
                (pcr.cover(outflow_loop, 0.0) * timestepsecs / fl_nr_loop) +
                (pcr.cover(outflow_linked, 0.0) * timestepsecs / fl_nr_loop))

            waterlevel_loop = pcr.ifthenelse(
                LakeStorFunc == 1,
                waterlevel_loop +
                (storage_loop - storage_start_loop) / LakeArea,
                lookupResFunc(LakeLocs, storage_loop, sh, "1-0"),
            )

            np_outflow_nz = np_outflow * 0.0
            with np.errstate(invalid="ignore"):
                np_outflow_nz[np_outflow > 0] = np_outflow[np_outflow > 0]
            _outflow.append(np_outflow_nz)

        outflow_av_temp = np.average(_outflow, 0)
        outflow_av_temp[np.isnan(outflow_av_temp)] = mv
        outflow_av = pcr.numpy2pcr(pcr.Scalar, outflow_av_temp, mv)

        #Add the discharge/waterlevel/storage from the loop to the one from puls approach
        outflow = pcr.ifthenelse(LakeOutflowFunc == 3, outflow, outflow_av)
        waterlevel = pcr.ifthenelse(LakeOutflowFunc == 3, waterlevel,
                                    waterlevel_loop)
        storage = pcr.ifthenelse(LakeOutflowFunc == 3, storage, storage_loop)

    return waterlevel, outflow, prec_av, pet_av, storage
    def __init__(self, clone_map_file,\
                       input_thickness_netcdf_file,\
                       input_thickness_var_name   ,\
                       margat_aquifers,\
                       tmp_directory,
                       landmask = None,
                       arcdegree = True):

        object.__init__(self)

        # aquifer table from Margat and van der Gun 
        self.margat_aquifers = margat_aquifers

        # clone map
        self.clone_map_file = clone_map_file
        self.clone_map_attr = vos.getMapAttributesALL(self.clone_map_file)
        if arcdegree == True:
            self.clone_map_attr['cellsize'] = round(self.clone_map_attr['cellsize'] * 360000.)/360000.
        xmin = self.clone_map_attr['xUL']
        xmax = xmin + self.clone_map_attr['cols'] * self.clone_map_attr['cellsize']
        ymax = self.clone_map_attr['yUL']
        ymin = ymax - self.clone_map_attr['rows'] * self.clone_map_attr['cellsize']
        pcr.setclone(self.clone_map_file)

        # temporary directory 
        self.tmp_directory = tmp_directory

        # thickness approximation (unit: m, file in netcdf with variable name = average 
        self.approx_thick = vos.netcdf2PCRobjCloneWithoutTime(input_thickness_netcdf_file,\
                                                              input_thickness_var_name,\
                                                              self.clone_map_file)
        # set minimum value to 0.1 mm
        self.approx_thick = pcr.max(0.0001, self.approx_thick)

        # rasterize the shape file 
        #               -        
        # save current directory and move to temporary directory
        current_dir = str(os.getcwd()+"/")
        os.chdir(str(self.tmp_directory))
        #
        cmd_line  = 'gdal_rasterize -a MARGAT '                                     # layer name = MARGAT
        cmd_line += '-te '+str(xmin)+' '+str(ymin)+' '+str(xmax)+' '+str(ymax)+ ' '       
        cmd_line += '-tr '+str(self.clone_map_attr['cellsize'])+' '+str(self.clone_map_attr['cellsize'])+' '
        cmd_line += str(margat_aquifers['shapefile'])+' '
        cmd_line += 'tmp.tif'
        print(cmd_line); os.system(cmd_line)
        #
        # make it nomial
        cmd_line = 'pcrcalc tmp.map = "nominal(tmp.tif)"' 
        print(cmd_line); os.system(cmd_line)
        #
        # make sure that the clone map is correct
        cmd_line = 'mapattr -c '+str(self.clone_map_file)+' tmp.map'
        print(cmd_line); os.system(cmd_line)
        #
        # read the map
        self.margat_aquifer_map = pcr.nominal(pcr.readmap("tmp.map"))
        #
        # clean temporary directory and return to the original directory
        vos.clean_tmp_dir(self.tmp_directory)
        os.chdir(current_dir)
        
        # extend the extent of each aquifer
        self.margat_aquifer_map = pcr.cover(self.margat_aquifer_map, 
                                  pcr.windowmajority(self.margat_aquifer_map, 1.25))

        # assign aquifer thickness, unit: m (lookuptable operation) 
        self.margat_aquifer_thickness = pcr.lookupscalar(margat_aquifers['txt_table'], self.margat_aquifer_map)
        self.margat_aquifer_thickness = pcr.ifthen(self.margat_aquifer_thickness > 0., \
                                                   self.margat_aquifer_thickness)
        #~ pcr.report(self.margat_aquifer_thickness,"thick.map"); os.system("aguila thick.map")

        # aquifer map
        self.margat_aquifer_map       = pcr.ifthen(self.margat_aquifer_thickness > 0., self.margat_aquifer_map)        
        
        # looping per aquifer: cirrecting or rescaling 
        aquifer_ids = np.unique(pcr.pcr2numpy(pcr.scalar(self.margat_aquifer_map), vos.MV))
        aquifer_ids = aquifer_ids[aquifer_ids > 0]
        aquifer_ids = aquifer_ids[aquifer_ids < 10000]
        self.rescaled_thickness = None
        for id in aquifer_ids:
            rescaled_thickness = self.correction_per_aquifer(id)
            try:
                self.rescaled_thickness = pcr.cover(self.rescaled_thickness, rescaled_thickness)
            except:
                self.rescaled_thickness = rescaled_thickness
        
        # integrating
        ln_aquifer_thickness  = self.mapFilling( pcr.ln(self.rescaled_thickness), pcr.ln(self.approx_thick) )
        self.aquifer_thickness = pcr.exp(ln_aquifer_thickness)
        #~ pcr.report(self.aquifer_thickness,"thick.map"); os.system("aguila thick.map")

        # cropping only in the landmask region
        if landmask == None: landmask = self.clone_map_file
        self.landmask = pcr.defined(vos.readPCRmapClone(landmask,self.clone_map_file,self.tmp_directory))
        #~ pcr.report(self.landmask,"test.map"); os.system("aguila test.map")

        self.aquifer_thickness = pcr.ifthen(self.landmask, self.aquifer_thickness)
예제 #35
0
def readPCRmapClone(v,
                    cloneMapFileName,
                    tmpDir,
                    absolutePath=None,
                    isLddMap=False,
                    cover=None,
                    isNomMap=False,
                    inputEPSG="EPSG:4326",
                    outputEPSG="EPSG:4326",
                    method="near"):
    # v: inputMapFileName or floating values
    # cloneMapFileName: If the inputMap and cloneMap have different clones,
    #                   resampling will be done.
    logger.debug('read file/values: ' + str(v))
    if v == "None":
        PCRmap = str("None")
    elif not re.match(r"[0-9.-]*$", v):
        if absolutePath != None: v = getFullPath(v, absolutePath)
        # print(v)
        sameClone = isSameClone(v, cloneMapFileName)
        if sameClone == True:
            PCRmap = pcr.readmap(v)
        else:
            # resample using GDAL:
            output = tmpDir + 'temp.map'
            # if no re-projection needed:
            if inputEPSG == outputEPSG or outputEPSG == None:
                warp = gdalwarpPCR(v, output, cloneMapFileName, tmpDir,
                                   isLddMap, isNomMap)
            else:
                warp = gdalwarpPCR(v, output, cloneMapFileName, tmpDir,
                                   isLddMap, isNomMap, inputEPSG, outputEPSG,
                                   method)
            # read from temporary file and delete the temporary file:
            PCRmap = pcr.readmap(output)
            if isLddMap == True:
                PCRmap = pcr.ifthen(pcr.scalar(PCRmap) < 10., PCRmap)
            if isLddMap == True: PCRmap = pcr.ldd(PCRmap)
            if isNomMap == True:
                PCRmap = pcr.ifthen(pcr.scalar(PCRmap) > 0., PCRmap)
            if isNomMap == True: PCRmap = pcr.nominal(PCRmap)
            if os.path.isdir(tmpDir):
                shutil.rmtree(tmpDir)
            os.makedirs(tmpDir)
    else:
        PCRmap = pcr.scalar(float(v))
    if cover != None:
        PCRmap = pcr.cover(PCRmap, cover)
    co = None
    cOut = None
    err = None
    warp = None
    del co
    del cOut
    del err
    del warp
    stdout = None
    del stdout
    stderr = None
    del stderr
    return PCRmap
예제 #36
0
파일: meteo.py 프로젝트: wk1984/glofrim
    def read_forcings(self, currTimeStep):
        # reading precipitation:
        self.precipitation = vos.netcdf2PCRobjClone(\
                                  self.preFileNC,'precipitation',\
                                  str(currTimeStep.fulldate),
                                  useDoy = None,
                                  cloneMapFileName=self.cloneMap,\
                                  LatitudeLongitude = True)

        precipitationCorrectionFactor = pcr.scalar(
            1.0
        )  # Since 19 Feb 2014, Edwin removed the support for correcting precipitation.
        self.precipitation = pcr.max(0.,self.precipitation*\
                precipitationCorrectionFactor)
        self.precipitation = pcr.cover(self.precipitation, 0.0)

        # ignore very small values of precipitation (less than 0.00001 m/day or less than 0.01 kg.m-2.day-1 )
        if self.usingDailyTimeStepForcingData:
            self.precipitation = pcr.rounddown(
                self.precipitation * 100000.) / 100000.

        # reading temperature
        self.temperature = vos.netcdf2PCRobjClone(\
                                 self.tmpFileNC,'temperature',\
                                 str(currTimeStep.fulldate),
                                 useDoy = None,
                                  cloneMapFileName=self.cloneMap,\
                                  LatitudeLongitude = True)

        # Downscaling precipitation and temperature
        if self.downscalePrecipitationOption:
            self.downscalePrecipitation(currTimeStep)
        if self.downscaleTemperatureOption:
            self.downscaleTemperature(currTimeStep)

        # calculate or obtain referencePotET
        if self.refETPotMethod == 'Hamon':            self.referencePotET = \
   refPotET.HamonPotET(self.temperature,\
                       currTimeStep.doy,\
                       self.latitudes)
        if self.refETPotMethod == 'Input':
            self.referencePotET = vos.netcdf2PCRobjClone(\
                                     self.etpFileNC,'evapotranspiration',\
                                     str(currTimeStep.fulldate),
                                     useDoy = None,
                                      cloneMapFileName=self.cloneMap,\
                                      LatitudeLongitude = True)

        # Downscaling referenceETPot (based on temperature)
        if self.downscaleReferenceETPotOption: self.downscaleReferenceETPot()

        # smoothing:
        if self.forcingSmoothing == True:
            logger.info("Forcing data are smoothed.")
            self.precipitation = pcr.windowaverage(self.precipitation,
                                                   self.smoothingWindowsLength)
            self.temperature = pcr.windowaverage(self.temperature,
                                                 self.smoothingWindowsLength)
            self.referencePotET = pcr.windowaverage(
                self.referencePotET, self.smoothingWindowsLength)

        # define precipitation, temperature and referencePotET ONLY at landmask area (for reporting):
        self.precipitation = pcr.ifthen(self.landmask, self.precipitation)
        self.temperature = pcr.ifthen(self.landmask, self.temperature)
        self.referencePotET = pcr.ifthen(self.landmask, self.referencePotET)

        if self.report == True:
            timeStamp = datetime.datetime(currTimeStep.year,\
                                          currTimeStep.month,\
                                          currTimeStep.day,\
                                          0)
            # writing daily output to netcdf files
            timestepPCR = currTimeStep.timeStepPCR
            if self.outDailyTotNC[0] != "None":
                for var in self.outDailyTotNC:
                    self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_dailyTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,timestepPCR-1)

            # writing monthly output to netcdf files
            # -cummulative
            if self.outMonthTotNC[0] != "None":
                for var in self.outMonthTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.day == 1:                        \
                                               vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthTot'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            # -average
            if self.outMonthAvgNC[0] != "None":
                for var in self.outMonthAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outMonthTotNC:

                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the month
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.day == 1:                            \
                                                       vars(self)[var+'MonthTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # calculating average & reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        vars(self)[var+'MonthAvg'] = vars(self)[var+'MonthTot']/\
                                                     currTimeStep.day
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthAvg'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            #
            # -last day of the month
            if self.outMonthEndNC[0] != "None":
                for var in self.outMonthEndNC:
                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.monthIdx-1)

            # writing yearly output to netcdf files
            # -cummulative
            if self.outAnnuaTotNC[0] != "None":
                for var in self.outAnnuaTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.doy == 1:                        \
                                               vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'AnnuaTot'] += vars(self)[var]

                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaTot'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            # -average
            if self.outAnnuaAvgNC[0] != "None":
                for var in self.outAnnuaAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outAnnuaTotNC:
                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the year
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.doy == 1:                            \
                                                       vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'AnnuaTot'] += vars(self)[var]
                    #
                    # calculating average & reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        vars(self)[var+'AnnuaAvg'] = vars(self)[var+'AnnuaTot']/\
                                                     currTimeStep.doy
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaAvg'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            #
            # -last day of the year
            if self.outAnnuaEndNC[0] != "None":
                for var in self.outAnnuaEndNC:
                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.annuaIdx-1)
예제 #37
0
    def getParameterFiles(self,currTimeStep,cellArea,ldd,\
                               initial_condition_dictionary = None):

        # parameters for Water Bodies: fracWat              
        #                              waterBodyIds
        #                              waterBodyOut
        #                              waterBodyArea 
        #                              waterBodyTyp
        #                              waterBodyCap
        
        # cell surface area (m2) and ldd
        self.cellArea = cellArea
        ldd = pcr.ifthen(self.landmask, ldd)
        
        # date used for accessing/extracting water body information
        date_used = currTimeStep.fulldate
        year_used = currTimeStep.year
        if self.onlyNaturalWaterBodies == True:
            date_used = self.dateForNaturalCondition
            year_used = self.dateForNaturalCondition[0:4] 
        
        # fracWat = fraction of surface water bodies (dimensionless)
        self.fracWat = pcr.scalar(0.0)
        
        if self.useNetCDF:
            self.fracWat = vos.netcdf2PCRobjClone(self.ncFileInp,'fracWaterInp', \
                           date_used, useDoy = 'yearly',\
                           cloneMapFileName = self.cloneMap)
        else:
            self.fracWat = vos.readPCRmapClone(\
                           self.fracWaterInp+str(year_used)+".map",
                           self.cloneMap,self.tmpDir,self.inputDir)
        
        self.fracWat = pcr.cover(self.fracWat, 0.0)
        self.fracWat = pcr.max(0.0,self.fracWat)
        self.fracWat = pcr.min(1.0,self.fracWat)
        
        self.waterBodyIds  = pcr.nominal(0)    # waterBody ids
        self.waterBodyOut  = pcr.boolean(0)    # waterBody outlets
        self.waterBodyArea = pcr.scalar(0.)    # waterBody surface areas

        # water body ids
        if self.useNetCDF:
            self.waterBodyIds = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyIds', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)
        else:
            self.waterBodyIds = vos.readPCRmapClone(\
                self.waterBodyIdsInp+str(year_used)+".map",\
                self.cloneMap,self.tmpDir,self.inputDir,False,None,True)
        #
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.nominal(self.waterBodyIds))    

        # water body outlets (correcting outlet positions)
        wbCatchment = pcr.catchmenttotal(pcr.scalar(1),ldd)
        self.waterBodyOut = pcr.ifthen(wbCatchment ==\
                            pcr.areamaximum(wbCatchment, \
                            self.waterBodyIds),\
                            self.waterBodyIds)     # = outlet ids   
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            self.waterBodyOut)
        # TODO: Please also consider endorheic lakes!                    

        # correcting water body ids
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.subcatchment(ldd,self.waterBodyOut))
        
        # boolean map for water body outlets:   
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyOut) > 0.,\
                            pcr.boolean(1))

        # reservoir surface area (m2):
        if self.useNetCDF:
            resSfArea = 1000. * 1000. * \
                        vos.netcdf2PCRobjClone(self.ncFileInp,'resSfAreaInp', \
                        date_used, useDoy = 'yearly',\
                        cloneMapFileName = self.cloneMap)
        else:
            resSfArea = 1000. * 1000. * vos.readPCRmapClone(
                   self.resSfAreaInp+str(year_used)+".map",\
                   self.cloneMap,self.tmpDir,self.inputDir)
        resSfArea = pcr.areaaverage(resSfArea,self.waterBodyIds)                        
        resSfArea = pcr.cover(resSfArea,0.)                        

        # water body surface area (m2): (lakes and reservoirs)
        self.waterBodyArea = pcr.max(pcr.areatotal(\
                             pcr.cover(\
                             self.fracWat*self.cellArea, 0.0), self.waterBodyIds),
                             pcr.areaaverage(\
                             pcr.cover(resSfArea, 0.0) ,       self.waterBodyIds))
        self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.,\
                             self.waterBodyArea)
                                
        # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0)
        self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0.,
                            self.waterBodyIds)               
        self.waterBodyOut = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                                   self.waterBodyOut)

        # water body types:
        # - 2 = reservoirs (regulated discharge)
        # - 1 = lakes (weirFormula)
        # - 0 = non lakes or reservoirs (e.g. wetland)
        self.waterBodyTyp = pcr.nominal(0)
        
        if self.useNetCDF:
            self.waterBodyTyp = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyTyp', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)
        else:
            self.waterBodyTyp = vos.readPCRmapClone(
                self.waterBodyTypInp+str(year_used)+".map",\
                self.cloneMap,self.tmpDir,self.inputDir,False,None,True)

        # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs 
        #
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))    
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0,\
                            pcr.nominal(self.waterBodyTyp))    
        self.waterBodyTyp = pcr.areamajority(self.waterBodyTyp,\
                                             self.waterBodyIds)     # choose only one type: either lake or reservoir                  
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))    
        self.waterBodyTyp = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                                   self.waterBodyTyp)

        # correcting lakes and reservoirs ids and outlets
        self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0,
                                                  self.waterBodyIds)               
        self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0,
                                                  self.waterBodyOut)

        # reservoir maximum capacity (m3):
        self.resMaxCap = pcr.scalar(0.0)
        self.waterBodyCap = pcr.scalar(0.0)

        if self.useNetCDF:
            self.resMaxCap = 1000. * 1000. * \
                             vos.netcdf2PCRobjClone(self.ncFileInp,'resMaxCapInp', \
                             date_used, useDoy = 'yearly',\
                             cloneMapFileName = self.cloneMap)
        else:
            self.resMaxCap = 1000. * 1000. * vos.readPCRmapClone(\
                self.resMaxCapInp+str(year_used)+".map", \
                self.cloneMap,self.tmpDir,self.inputDir)

        self.resMaxCap = pcr.ifthen(self.resMaxCap > 0,\
                                    self.resMaxCap)
        self.resMaxCap = pcr.areaaverage(self.resMaxCap,\
                                         self.waterBodyIds)
                                         
        # water body capacity (m3): (lakes and reservoirs)
        self.waterBodyCap = pcr.cover(self.resMaxCap,0.0)               # Note: Most of lakes have capacities > 0.
        self.waterBodyCap = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                                   self.waterBodyCap)
                                               
        # correcting water body types:                                  # Reservoirs that have zero capacities will be assumed as lakes.
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp) 
        self.waterBodyTyp = pcr.ifthenelse(self.waterBodyCap > 0.,\
                                           self.waterBodyTyp,\
                 pcr.ifthenelse(pcr.scalar(self.waterBodyTyp) == 2,\
                                           pcr.nominal(1),\
                                           self.waterBodyTyp)) 

        # final corrections:
        self.waterBodyTyp = pcr.ifthen(self.waterBodyArea > 0.,\
                                       self.waterBodyTyp)                     # make sure that all lakes and/or reservoirs have surface areas
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp)                     # make sure that only types 1 and 2 will be considered in lake/reservoir functions
        self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                            self.waterBodyIds)                                # make sure that all lakes and/or reservoirs have ids
        self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.,\
                                                  self.waterBodyOut)          # make sure that all lakes and/or reservoirs have outlets
        
        
        # for a natural run (self.onlyNaturalWaterBodies == True) 
        # which uses only the year 1900, assume all reservoirs are lakes
        if self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition:
            logger.info("Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes.")
            self.waterBodyTyp = \
             pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                        pcr.nominal(1))                         
        
        # check that all lakes and/or reservoirs have types, ids, surface areas and outlets:
        test = pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) &\
               pcr.defined(self.waterBodyIds) & pcr.boolean(pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds))
        a,b,c = vos.getMinMaxMean(pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0))
        threshold = 1e-3
        if abs(a) > threshold or abs(b) > threshold:
            logger.warning("Missing information in some lakes and/or reservoirs.")

        # at the beginning of simulation period (timeStepPCR = 1)
        # - we have to define/get the initial conditions 
        #
        if currTimeStep.timeStepPCR == 1:
            self.getICs(initial_condition_dictionary)
        
        # For each new reservoir (introduced at the beginning of the year)
        # initiating storage, average inflow and outflow
        #
        self.waterBodyStorage = pcr.cover(self.waterBodyStorage,0.0)
        self.avgInflow        = pcr.cover(self.avgInflow ,0.0)
        self.avgOutflow       = pcr.cover(self.avgOutflow,0.0)

        # cropping only in the landmask region:
        self.fracWat           = pcr.ifthen(self.landmask, self.fracWat         )
        self.waterBodyIds      = pcr.ifthen(self.landmask, self.waterBodyIds    ) 
        self.waterBodyOut      = pcr.ifthen(self.landmask, self.waterBodyOut    )
        self.waterBodyArea     = pcr.ifthen(self.landmask, self.waterBodyArea   )
        self.waterBodyTyp      = pcr.ifthen(self.landmask, self.waterBodyTyp    )  
        self.waterBodyCap      = pcr.ifthen(self.landmask, self.waterBodyCap    )
        self.waterBodyStorage  = pcr.ifthen(self.landmask, self.waterBodyStorage)
        self.avgInflow         = pcr.ifthen(self.landmask, self.avgInflow       )
        self.avgOutflow        = pcr.ifthen(self.landmask, self.avgOutflow      )
예제 #38
0
    def checkWaterBalance(self, storesAtBeginning, storesAtEnd):
        # for the entire modules: snow + interception + soil + groundwater + waterDemand
        # except: river/routing

        irrGrossDemand = pcr.ifthen(self.landmask,
                                    self.landSurface.irrGrossDemand)  # unit: m

        nonIrrGrossDemand = \
            pcr.ifthen(self.landmask,
                       self.landSurface.nonIrrGrossDemand)     # unit: m

        precipitation = pcr.ifthen(self.landmask,
                                   self.meteo.precipitation)  # unit: m

        surfaceWaterInf = pcr.ifthen(self.landmask,
                                     self.groundwater.surfaceWaterInf)

        surfaceWaterAbstraction = \
            pcr.ifthen(self.landmask,
                       self.landSurface.actSurfaceWaterAbstract)

        nonFossilGroundwaterAbs = pcr.ifthen(
            self.landmask, self.groundwater.nonFossilGroundwaterAbs)

        unmetDemand = pcr.ifthen(
            self.landmask, self.groundwater.unmetDemand
        )  # PS: We assume that unmetDemand is extracted (only) to satisfy local demand.

        runoff = pcr.ifthen(self.landmask, self.routing.runoff)

        actualET = pcr.ifthen(self.landmask, self.landSurface.actualET)

        vos.waterBalanceCheck(
            [precipitation, surfaceWaterInf, irrGrossDemand],
            [actualET, runoff, nonFossilGroundwaterAbs], [storesAtBeginning],
            [storesAtEnd],
            'all modules (including water demand), but except river/routing',
            True,
            self._modelTime.fulldate,
            threshold=1e-3)

        if self.landSurface.usingAllocSegments:

            allocSurfaceWaterAbstract = \
                pcr.ifthen(self.landmask,
                           self.landSurface.allocSurfaceWaterAbstract)

            allocNonFossilGroundwaterAbs = \
                pcr.ifthen(self.landmask,
                           self.groundwater.allocNonFossilGroundwater)

            # PS: We assume that unmetDemand is extracted (only) to satisfy local demand.
            allocUnmetDemand = unmetDemand

            segTotalDemand = pcr.areatotal(
                pcr.cover((irrGrossDemand + nonIrrGrossDemand) *
                          self.routing.cellArea, 0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            segAllocSurfaceWaterAbstract = pcr.areatotal(
                pcr.cover(allocSurfaceWaterAbstract * self.routing.cellArea,
                          0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            segAllocNonFossilGroundwaterAbs = pcr.areatotal(
                pcr.cover(allocNonFossilGroundwaterAbs * self.routing.cellArea,
                          0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            segAllocUnmetDemand = pcr.areatotal(
                pcr.cover(allocUnmetDemand * self.routing.cellArea, 0.0),
                self.landSurface.allocSegments) / self.landSurface.segmentArea

            vos.waterBalanceCheck(
                [segTotalDemand], [
                    segAllocSurfaceWaterAbstract,
                    segAllocNonFossilGroundwaterAbs, segAllocUnmetDemand
                ], [pcr.scalar(0.0)], [pcr.scalar(0.0)],
                'Water balance error in water allocation (per zone). Note that error here is most likely due to rounding error (32 bit implementation of pcraster)',
                True,
                self._modelTime.fulldate,
                threshold=5e-3)
        else:

            vos.waterBalanceCheck([irrGrossDemand, nonIrrGrossDemand], [
                surfaceWaterAbstraction, nonFossilGroundwaterAbs, unmetDemand
            ], [pcr.scalar(0.0)], [pcr.scalar(0.0)],
                                  'Water balance error in water allocation.',
                                  True,
                                  self._modelTime.fulldate,
                                  threshold=1e-3)
예제 #39
0
def main(
        source,
        destination,
        inifile,
        dem_in,
        rivshp,
        catchshp,
        gaugeshp=None,
        landuse=None,
        soil=None,
        lai=None,
        other_maps=None,
        logfilename="wtools_static_maps.log",
        verbose=True,
        clean=True,
        alltouch=False,
        outlets=([], []),
):
    # parse other maps into an array
    if not other_maps == None:
        if type(other_maps) == str:
            print other_maps
            other_maps = (other_maps.replace(" ", "").replace("[", "").replace(
                "]", "").split(","))

    source = os.path.abspath(source)
    clone_map = os.path.join(source, "mask.map")
    clone_shp = os.path.join(source, "mask.shp")
    clone_prj = os.path.join(source, "mask.prj")

    if None in (rivshp, catchshp, dem_in):
        msg = """The following files are compulsory:
        - DEM (raster)
        - river (shape)
        - catchment (shape)
        """
        print(msg)
        parser.print_help()
        sys.exit(1)
    if (inifile is not None) and (not os.path.exists(inifile)):
        print "path to ini file cannot be found"
        sys.exit(1)
    if not os.path.exists(rivshp):
        print "path to river shape cannot be found"
        sys.exit(1)
    if not os.path.exists(catchshp):
        print "path to catchment shape cannot be found"
        sys.exit(1)
    if not os.path.exists(dem_in):
        print "path to DEM cannot be found"
        sys.exit(1)

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wt.setlogger(logfilename, "WTOOLS", verbose)

    # create directories # TODO: check if workdir is still necessary, try to
    # keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(destination), destination is not source):
        shutil.rmtree(destination)
    if destination is not source:
        os.makedirs(destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            "Clone file {:s} not found. Please run create_grid first.".format(
                clone_map))
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = wt.gdal_readmap(clone_map, "GTiff")
        trans = wt.get_geotransform(clone_map)
        extent = wt.get_extent(clone_map)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wt.get_projection(clone_map)
        unit_clone = srs.GetAttrValue("UNIT").lower()

    # READ CONFIG FILE
    # open config-file
    if inifile is None:
        config = ConfigParser.SafeConfigParser()
        config.optionxform = str
    else:
        config = wt.OpenConf(inifile)

    # read settings
    snapgaugestoriver = wt.configget(config,
                                     "settings",
                                     "snapgaugestoriver",
                                     True,
                                     datatype="boolean")
    burnalltouching = wt.configget(config,
                                   "settings",
                                   "burncatchalltouching",
                                   True,
                                   datatype="boolean")
    burninorder = wt.configget(config,
                               "settings",
                               "burncatchalltouching",
                               False,
                               datatype="boolean")
    verticetollerance = wt.configget(config,
                                     "settings",
                                     "vertice_tollerance",
                                     0.0001,
                                     datatype="float")
    """ read parameters """
    burn_outlets = wt.configget(config,
                                "parameters",
                                "burn_outlets",
                                10000,
                                datatype="int")
    burn_rivers = wt.configget(config,
                               "parameters",
                               "burn_rivers",
                               200,
                               datatype="int")
    burn_connections = wt.configget(config,
                                    "parameters",
                                    "burn_connections",
                                    100,
                                    datatype="int")
    burn_gauges = wt.configget(config,
                               "parameters",
                               "burn_gauges",
                               100,
                               datatype="int")
    minorder = wt.configget(config,
                            "parameters",
                            "riverorder_min",
                            3,
                            datatype="int")
    try:
        percentiles = np.array(
            config.get("parameters", "statisticmaps",
                       "0, 100").replace(" ", "").split(","),
            dtype="float",
        )
    except ConfigParser.NoOptionError:
        percentiles = [0.0, 100.0]
    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == "degree":
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_degree",
                                   0.0005,
                                   datatype="float")
    elif (unit_clone == "metre") or (unit_clone == "meter"):
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_metre",
                                   50,
                                   datatype="float")

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    clone_hr = os.path.join(destination, "clone_highres.tif")
    # make a highres clone as well!
    wt.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")

    # read mask location (optional)
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)

    # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0))

    # TODO: check if extents are correct this way
    # TODO: check what the role of missing values is in zeros and ones (l. 123
    # in old code)

    # first add a missing value to dem_in
    ds = gdal.Open(dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info("Resampling dem from {:s} to {:s}".format(
        os.path.abspath(dem_in), os.path.join(destination, dem_map)))
    wt.gdal_warp(
        dem_in,
        clone_map,
        os.path.join(destination, dem_map),
        format="PCRaster",
        gdal_interp=gdalconst.GRA_Average,
    )
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wt.get_projection(dem_in)
    except:
        logger.warning(
            "No projection found in DEM, assuming WGS 1984 lat long")
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    # if srs.ExportToProj4() == srs_dem.ExportToProj4():

    wt.windowstats(
        dem_in,
        len(yax),
        len(xax),
        trans,
        srs,
        destination,
        percentiles,
        transform=clone2dem_transform,
        logger=logger,
    )

    ## read catchment shape-file to create catchment map
    src = rasterio.open(clone_map)
    shapefile = fiona.open(catchshp, "r")
    catchment_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(catchment_shapes,
                               out_shape=src.shape,
                               all_touched=True,
                               transform=src.transform)
    catchment_domain = pcr.numpy2pcr(pcr.Ordinal, image.copy(), 0)

    ## read river shape-file and create burn layer
    shapefile = fiona.open(rivshp, "r")
    river_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(river_shapes,
                               out_shape=src.shape,
                               all_touched=False,
                               transform=src.transform)
    rivers = pcr.numpy2pcr(pcr.Nominal, image.copy(), 0)
    riverdem = pcr.scalar(rivers) * pcr.readmap(
        os.path.join(destination, dem_map))
    pcr.setglobaloption("lddin")
    riverldd = pcr.lddcreate(riverdem, 1e35, 1e35, 1e35, 1e35)

    riveroutlet = pcr.cover(
        pcr.ifthen(pcr.scalar(riverldd) == 5, pcr.scalar(1000)), 0)
    burn_layer = pcr.cover(
        (pcr.scalar(
            pcr.ifthen(
                pcr.streamorder(riverldd) > 1, pcr.streamorder(riverldd))) - 1)
        * 1000 + riveroutlet,
        0,
    )

    outlets_x, outlets_y = outlets
    n_outlets = len(outlets_x)
    logger.info("Number of outlets: {}".format(n_outlets))
    if n_outlets >= 1:
        outlets_map_numbered = tr.points_to_map(pcr.scalar(0), outlets_x,
                                                outlets_y, 0.5)
        outlets_map = pcr.boolean(outlets_map_numbered)
        # snap outlets to closest river (max 1 cell closer to river)
        outlets_map = pcr.boolean(
            pcr.cover(tr.snaptomap(pcr.ordinal(outlets_map), rivers), 0))

    ## create ldd per catchment
    logger.info("Calculating ldd")
    ldddem = pcr.scalar(clone_map)

    # per subcatchment, burn dem, then create modified dem that fits the ldd of the subcatchment
    # this ldd dem is merged over catchments, to create a global ldd that abides to the subcatchment boundaries
    for idx, shape in enumerate(catchment_shapes):
        logger.info("Computing ldd for catchment " + str(idx + 1) + "/" +
                    str(len(catchment_shapes)))
        image = features.rasterize([shape],
                                   out_shape=src.shape,
                                   all_touched=True,
                                   transform=src.transform)
        catchment = pcr.numpy2pcr(pcr.Scalar, image.copy(), 0)
        dem_burned_catchment = (
            pcr.readmap(os.path.join(destination, dem_map)) *
            pcr.scalar(catchment_domain) * catchment) - burn_layer
        # ldddem_catchment = pcr.lddcreatedem(
        #    dem_burned_catchment, 1e35, 1e35, 1e35, 1e35)
        ldddem = pcr.cover(ldddem, dem_burned_catchment)

    pcr.report(ldddem, os.path.join(destination, "ldddem.map"))

    wflow_ldd = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    if n_outlets >= 1:
        # set outlets to pit
        wflow_ldd = pcr.ifthenelse(outlets_map, pcr.ldd(5), wflow_ldd)
        wflow_ldd = pcr.lddrepair(wflow_ldd)

    pcr.report(wflow_ldd, os.path.join(destination, "wflow_ldd.map"))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(wflow_ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(destination, dem_map))
    pcr.report(streamorder, os.path.join(destination, streamorder_map))
    pcr.report(river, os.path.join(destination, river_map))

    # deal with your catchments
    if gaugeshp == None:
        logger.info("No gauges defined, using outlets instead")
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(
                    pcr.ifthen(pcr.scalar(wflow_ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, os.path.join(destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # since the products here (river length fraction) are not yet used
    # this is disabled for now, as it also takes a lot of computation time
    if False:
        # report river length
        # make a high resolution empty map
        dem_hr_file = os.path.join(destination, "dem_highres.tif")
        burn_hr_file = os.path.join(destination, "burn_highres.tif")
        demburn_hr_file = os.path.join(destination, "demburn_highres.map")
        riv_hr_file = os.path.join(destination, "riv_highres.map")
        wt.gdal_warp(dem_in, clone_hr, dem_hr_file)
        # wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
        # open the shape layer
        ds = ogr.Open(rivshp)
        lyr = ds.GetLayer(0)
        wt.ogr_burn(
            lyr,
            clone_hr,
            -100,
            file_out=burn_hr_file,
            format="GTiff",
            gdal_type=gdal.GDT_Float32,
            fill_value=0,
        )
        # read dem and burn values and add
        xax_hr, yax_hr, burn_hr, fill = wt.gdal_readmap(burn_hr_file, "GTiff")
        burn_hr[burn_hr == fill] = 0
        xax_hr, yax_hr, dem_hr, fill = wt.gdal_readmap(dem_hr_file, "GTiff")
        dem_hr[dem_hr == fill] = np.nan
        demburn_hr = dem_hr + burn_hr
        demburn_hr[np.isnan(demburn_hr)] = -9999
        wt.gdal_writemap(demburn_hr_file, "PCRaster", xax_hr, yax_hr,
                         demburn_hr, -9999.)
        pcr.setclone(demburn_hr_file)
        demburn_hr = pcr.readmap(demburn_hr_file)

        logger.info("Calculating ldd to determine river length")
        ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
        pcr.report(ldd_hr, os.path.join(destination, "ldd_hr.map"))
        pcr.setglobaloption("unitcell")
        riv_hr = pcr.scalar(
            pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr)
        pcr.report(riv_hr, riv_hr_file)
        pcr.setglobaloption("unittrue")
        pcr.setclone(clone_map)
        logger.info("Computing river length")
        wt.windowstats(
            riv_hr_file,
            len(yax),
            len(xax),
            trans,
            srs,
            destination,
            stat="fact",
            transform=False,
            logger=logger,
        )
        # TODO: nothing happens with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(
        pcr.ifthen(pcr.ordinal(wflow_ldd) == 5, pcr.ordinal(1)),
        os.path.join(destination, outlet_map),
    )

    # report subcatchment map
    subcatchment = pcr.subcatchment(wflow_ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment),
               os.path.join(destination, subcatch_map))

    # Report land use map
    if landuse == None:
        logger.info(
            "No land use map used. Preparing {:s} with only ones.".format(
                os.path.join(destination, landuse_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, landuse_map))
    else:
        logger.info("Resampling land use from {:s} to {:s}".format(
            os.path.abspath(landuse),
            os.path.join(destination, os.path.abspath(landuse_map)),
        ))
        wt.gdal_warp(
            landuse,
            clone_map,
            os.path.join(destination, landuse_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    # report soil map
    if soil == None:
        logger.info("No soil map used. Preparing {:s} with only ones.".format(
            os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        logger.info("Resampling soil from {:s} to {:s}".format(
            os.path.abspath(soil),
            os.path.join(destination, os.path.abspath(soil_map)),
        ))
        wt.gdal_warp(
            soil,
            clone_map,
            os.path.join(destination, soil_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    if lai == None:
        logger.info(
            "No vegetation LAI maps used. Preparing default maps {:s} with only ones."
            .format(os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        dest_lai = os.path.join(destination, "clim")
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(lai, "LAI00000.{:03d}".format(month + 1))
            lai_out = os.path.join(dest_lai,
                                   "LAI00000.{:03d}".format(month + 1))
            logger.info("Resampling vegetation LAI from {:s} to {:s}".format(
                os.path.abspath(lai_in), os.path.abspath(lai_out)))
            wt.gdal_warp(
                lai_in,
                clone_map,
                lai_out,
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Bilinear,
                gdal_type=gdalconst.GDT_Float32,
            )

    # report soil map
    if other_maps == None:
        logger.info("No other maps used. Skipping other maps.")
    else:
        logger.info("Resampling list of other maps...")
        for map_file in other_maps:
            map_name = os.path.split(map_file)[1]
            logger.info("Resampling a map from {:s} to {:s}".format(
                os.path.abspath(map_file),
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
            ))
            wt.gdal_warp(
                map_file,
                clone_map,
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Mode,
                gdal_type=gdalconst.GDT_Float32,
            )

    if clean:
        wt.DeleteList(glob.glob(os.path.join(destination, "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "clim", "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "*highres*")),
                      logger=logger)
예제 #40
0
def main():
    clone_map = "mask\mask.map"
    clone_shp = "mask\mask.shp"
    clone_prj = "mask\mask.prj"
    workdir = "work\\"
    resultdir = "staticmaps\\"
    ''' read commandline arguments '''
    argv = sys.argv
    clone_EPSG = False

    try:
        opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA')
    except getopt.error:
        print 'fout'
        Usage()
        sys.exit(1)

    inifile = None
    rivshp = None
    catchshp = None
    dem_in = None
    landuse = None
    soiltype = None
    clean = False
    gaugeshp = None
    alltouching = False

    for o, a in opts:
        if o == '-i': inifile = a
        if o == '-p': clone_EPSG = 'EPSG:' + a
        if o == '-r': rivshp = a
        if o == '-c': catchshp = a
        if o == '-d': dem_in = a
        if o == '-l': landuse = a
        if o == '-s': soiltype = a
        if o == '-C': clean = True
        if o == '-g': gaugeshp = a
        if o == '-A': alltouching = True

    if inifile == None or rivshp == None or catchshp == None or dem_in == None:
        print 'the following files are compulsory:'
        print ' - ini-file'
        print ' - DEM (raster)'
        print ' - river (shape)'
        print ' - catchment (shape)'
        Usage()
        sys.exit(1)

    if landuse == None:
        print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain'

    if soiltype == None:
        print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain'
    ''' read mask '''
    if not os.path.exists(clone_map):
        print 'Mask not found. Make sure the file mask\mask.map exists'
        print 'This file is usually created with the CreateGrid script'
        sys.exit(1)
    else:
        pcr.setclone(clone_map)
        ds = gdal.Open(clone_map, GA_ReadOnly)
        clone_trans = ds.GetGeoTransform()
        cellsize = clone_trans[1]
        clone_rows = ds.RasterYSize
        clone_columns = ds.RasterXSize
        extent_mask = [
            clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize,
            clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3]
        ]
        xmin, ymin, xmax, ymax = map(str, extent_mask)
        ds = None
        ones = pcr.scalar(pcr.readmap(clone_map))
        zeros = ones * 0
        empty = pcr.ifthen(ones == 0, pcr.scalar(0))
    ''' read projection from mask.shp '''
    # TODO: check how to deal with projections (add .prj to mask.shp in creategrid)
    if not os.path.exists(clone_prj):
        print 'please add prj-file to mask.shp'
        sys.exit(1)
    if os.path.exists(clone_shp):
        ds = ogr.Open(clone_shp)
        file_att = os.path.splitext(os.path.basename(clone_shp))[0]
        lyr = ds.GetLayerByName(file_att)
        spatialref = lyr.GetSpatialRef()
        if not spatialref == None:
            srs_clone = osr.SpatialReference()
            srs_clone.ImportFromWkt(spatialref.ExportToWkt())
            srs_clone.AutoIdentifyEPSG()
            unit_clone = False
            unit_clone = srs_clone.GetAttrValue('UNIT').lower()
            #clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1)
            # TODO: fix hard EPSG code below
            clone_EPSG = 'EPSG:' + '4167'
            print 'EPSG-code is read from mask.shp: ' + clone_EPSG
            spatialref == None
    if not clone_EPSG:
        print 'EPSG-code cannot be read from mask.shp'
        print 'please add prj-file to mask.shp or specify on command line'
        print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)'

    ds = None
    clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)])
    ''' open config-file '''
    config = wt.OpenConf(inifile)
    ''' read settings '''
    snapgaugestoriver = bool(
        int(wt.configget(config, "settings", "snapgaugestoriver", "1")))
    burnalltouching = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "1")))
    burninorder = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "0")))
    verticetollerance = float(
        wt.configget(config, "settings", "vertice_tollerance", "0.0001"))
    ''' read parameters '''
    burn_outlets = int(
        wt.configget(config, "parameters", "burn_outlets", 10000))
    burn_rivers = int(wt.configget(config, "parameters", "burn_rivers", 200))
    burn_connections = int(
        wt.configget(config, "parameters", "burn_connections", 100))
    burn_gauges = int(wt.configget(config, "parameters", "burn_gauges", 100))
    minorder = int(wt.configget(config, "parameters", "riverorder_min", 3))
    exec "percentile=tr.array(" + wt.configget(config, "parameters",
                                               "statisticmaps", [0, 100]) + ")"
    if not unit_clone:
        print 'failed to read unit (meter or degree) from mask projection'
        unit_clone = str(wt.configget(config, "settings", "unit", 'meter'))
        print 'unit read from settings: ' + unit_clone
    if unit_clone == 'degree':
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_degree", 0.0005))
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_metre", 50))

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    ''' read staticmap locations '''
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")
    ''' read mask location (optional) '''
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)
    ''' create directories '''
    if os.path.isdir(workdir):
        shutil.rmtree(workdir)
    os.makedirs(workdir)

    if os.path.isdir(resultdir):
        shutil.rmtree(resultdir)
    os.makedirs(resultdir)
    ''' Preperation steps '''
    zero_map = workdir + "zero.map"
    zero_tif = workdir + "zero.tif"
    pcr.report(zeros, zero_map)
    # TODO: replace gdal_translate call
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, zero_tif))
    pcr.setglobaloption("lddin")
    ''' resample DEM '''
    dem_resample = workdir + "dem_resampled.tif"
    ds = gdal.Open(dem_in, GA_ReadOnly)
    band = ds.GetRasterBand(1)
    nodata = band.GetNoDataValue()
    proj = ds.GetGeoTransform()
    cellsize_dem = proj[1]
    ''' read DEM projection '''
    spatialref == None
    spatialref = ds.GetProjection()
    if not spatialref == None:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(spatialref)
        srs.AutoIdentifyEPSG()
        dem_EPSG = 'EPSG:' + srs.GetAttrValue("AUTHORITY", 1)
        print 'EPSG-code is read from ' + os.path.basename(
            dem_in) + ': ' + dem_EPSG
        spatialref == None
        dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)])
        srs_DEM = osr.SpatialReference()
        srs_DEM.ImportFromEPSG(dem_EPSG_int)
        clone2dem_transform = osr.CoordinateTransformation(srs_clone, srs_DEM)
    else:
        dem_EPSG = clone_EPSG
        print 'No projection defined for ' + os.path.basename(dem_in)
        print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    ds = None
    print 'Resampling DEM...'
    if nodata == None:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin,
              xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-dstnodata',
              str(-9999), '-r', 'cubic', dem_in, dem_resample))
    else:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj,
              '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-srcnodata', str(nodata), '-dstnodata',
              str(nodata), '-r', 'cubic', dem_in, dem_resample))
    ''' create dem.map and statistic maps '''
    dem_resample_map = resultdir + dem_map
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', dem_resample, dem_resample_map))
    print 'Computing DEM statistics ....'
    stats = wt.windowstats(dem_in, clone_rows, clone_columns, clone_trans,
                           srs_clone, resultdir, percentile)
    ''' burn DEM '''
    ds = ogr.Open(rivshp)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    rivshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    # strip rivers to nodes
    xminc = str(float(xmin) + 0.5 * cellsize)
    yminc = str(float(ymin) + 0.5 * cellsize)
    xmaxc = str(float(xmax) - 0.5 * cellsize)
    ymaxc = str(float(ymax) - 0.5 * cellsize)
    if rivshp_EPSG == clone_EPSG:
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivshp))
    else:
        rivprojshp = workdir + 'rivshape_proj.shp'
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', rivshp_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, rivprojshp, rivshp))
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivprojshp))

    rivshp = rivclipshp

    #### BURNING BELOW ####

    # TODO: check if extraction can be done within memory and retun a burn layer
    shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int,
                            cellsize * verticetollerance, workdir)

    outlets = shapes[1]
    connections = shapes[2]
    outlets_att = os.path.splitext(os.path.basename(outlets))[0]
    connections_att = os.path.splitext(os.path.basename(connections))[0]
    dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0]
    connections_tif = workdir + connections_att + ".tif"
    outlets_tif = workdir + outlets_att + ".tif"
    # TODO: make the burning in memory
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, connections_tif))
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', outlets_att, outlets,
          outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', connections_att, connections,
          connections_tif))

    # convert rivers to order
    rivshp_att = os.path.splitext(os.path.basename(rivshp))[0]
    rivers_tif = workdir + rivshp_att + ".tif"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, rivers_tif))
    if burninorder:  # make river shape with an order attribute
        OrderSHPs = wt.ReachOrder(rivshp, clone_EPSG_int,
                                  cellsize * verticetollerance, workdir)
        wt.Burn2Tif(OrderSHPs, 'order', rivers_tif)
    else:
        call(('gdal_rasterize', '-burn', '1', '-l', rivshp_att, rivshp,
              rivers_tif))

    # convert 2 maps
    connections_map = workdir + connections_att + ".map"
    rivers_map = workdir + rivshp_att + ".map"
    outlets_map = workdir + outlets_att + ".map"
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', connections_tif, connections_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', rivers_tif, rivers_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', outlets_tif, outlets_map))

    # burn the layers in DEM
    outletsburn = pcr.scalar(
        pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets)
    connectionsburn = pcr.scalar(
        pcr.readmap(connections_map)) * pcr.scalar(burn_connections)
    riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers)
    ldddem = pcr.cover(dem_resample_map,
                       pcr.ifthen(riverburn > 0, pcr.scalar(0)))
    ldddem = ldddem - outletsburn - connectionsburn - riverburn
    ldddem = pcr.cover(ldddem, pcr.scalar(0))
    pcr.report(ldddem, workdir + "dem_burn.map")
    ''' create ldd for multi-catchments '''
    ldd = pcr.ldd(empty)
    # reproject catchment shape-file
    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    catchshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    if not rivshp_EPSG == clone_EPSG:
        catchprojshp = workdir + 'catchshape_proj.shp'
        call(('ogr2ogr', '-s_srs', catchshp_EPSG, '-t_srs', clone_ESPG,
              catchprojshp, catchshp))
        catchshp = catchprojshp
    ds.Destroy()

    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)

    fieldDef = ogr.FieldDefn("ID", ogr.OFTString)
    fieldDef.SetWidth(12)
    TEMP_out = Driver.CreateDataSource(workdir + "temp.shp")
    if not srs == None:
        TEMP_LYR = TEMP_out.CreateLayer("temp",
                                        srs,
                                        geom_type=ogr.wkbMultiPolygon)
    else:
        TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon)
    TEMP_LYR.CreateField(fieldDef)

    for i in range(lyr.GetFeatureCount()):
        orgfeature = lyr.GetFeature(i)
        geometry = orgfeature.geometry()
        feature = ogr.Feature(TEMP_LYR.GetLayerDefn())
        feature.SetGeometry(geometry)
        feature.SetField("ID", str(i + 1))
        TEMP_LYR.CreateFeature(feature)
    TEMP_out.Destroy()
    ds.Destroy

    # rasterize catchment map
    catchments_tif = workdir + "catchments.tif"
    catchments_map = workdir + "catchments.map"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map,
          catchments_tif))
    if alltouching:
        call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp",
              workdir + 'temp.shp', catchments_tif))
    else:
        call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp',
              catchments_tif))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG,
          catchments_tif, catchments_map))
    catchments = pcr.readmap(catchments_map)
    riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn)))
    rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique)
    #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0))
    catchments = pcr.cover(
        pcr.ifthen(catchments > 0, pcr.ordinal(catchments)),
        pcr.ifthen(
            riverburn > 0,
            pcr.ordinal(
                pcr.spreadzone(pcr.nominal(catchments),
                               pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1))))
    rivercatch_map = workdir + "catchments_river.map"
    catchclip_map = workdir + "catchments_clip.map"
    pcr.report(rivercatch, rivercatch_map)
    pcr.report(catchments, catchclip_map)

    ds = ogr.Open(workdir + "temp.shp")
    lyr = ds.GetLayerByName("temp")

    print 'calculating ldd'
    for i in range(lyr.GetFeatureCount()):
        feature = lyr.GetFeature(i)
        catch = int(feature.GetField("ID"))
        print "calculating ldd for catchment: " + str(i + 1) + "/" + str(
            lyr.GetFeatureCount()) + "...."
        ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch,
                                              catchments)) * 0 + 1 * ldddem
        ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"),
                                   float("1E35"), float("1E35"))
        ldd = pcr.cover(ldd, ldd_select)
    pcr.report(ldd, resultdir + ldd_map)
    ds.Destroy()
    ''' report stream order, river and dem '''
    streamorder = pcr.ordinal(pcr.streamorder(ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999)))
    dem_resample_map = pcr.cover(dem_resample_map,
                                 pcr.scalar(river) * 0 + mindem)
    pcr.report(dem_resample_map, resultdir + dem_map)
    pcr.report(streamorder, resultdir + streamorder_map)
    pcr.report(river, resultdir + river_map)
    ''' deal with your catchments '''
    if gaugeshp == None:
        print 'No gauges defined, using outlets instead'
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, resultdir + gauges_map)


#    ds = ogr.Open(gaugeshp)
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    lyr = ds.GetLayerByName(file_att)
#    spatialref = lyr.GetSpatialRef()
##    if not spatialref == None:
##        srs = osr.SpatialReference()
##        srs.ImportFromWkt(spatialref.ExportToWkt())
##        srs.AutoIdentifyEPSG()
##        gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
##        spatialref == None
#    #else:
#    gaugeshp_EPSG = clone_EPSG
#    print 'No projection defined for ' + file_att + '.shp'
#    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'
#
#    # reproject gauge shape if necesarry
#    if not gaugeshp_EPSG == clone_EPSG:
#        gaugeprojshp = workdir + 'gaugeshape_proj.shp'
#        call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp))
#        gaugeshp = gaugeprojshp
#
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    gaugestif = workdir + file_att + '.tif'
#    gaugesmap = workdir + file_att + '.map'
#    call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif))
#    call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif))
#    call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap))
#    gaugelocs = pcr.readmap(gaugesmap)
#    snapgaugestoriver = True
#
#    if snapgaugestoriver:
#        print "Snapping gauges to river"
#        gauges = pcr.uniqueid(pcr.boolean(gaugelocs))
#        gauges= wt.snaptomap(pcr.ordinal(gauges),river)
#
#    gaugesmap = pcr.ifthen(gauges > 0, gauges)
    ''' report riverlengthfrac '''
    riv_hr = workdir + 'river_highres.tif'
    wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs_clone, 0)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    call(('gdal_rasterize', '-burn', '1', '-l', file_att, rivshp, riv_hr))
    print 'Computing river length...'
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wt.windowstats(riv_hr, clone_rows, clone_columns,
                                 clone_trans, srs_clone, resultdir, 'frac')
    ''' report outlet map '''
    pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)),
               resultdir + outlet_map)
    ''' report  map '''
    catchment = pcr.ifthen(catchments > 0, pcr.ordinal(1))
    pcr.report(catchment, resultdir + catchment_map)
    ''' report subcatchment map '''
    subcatchment = pcr.subcatchment(ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment), resultdir + subcatch_map)
    ''' report landuse map '''
    if landuse == None:
        pcr.report(pcr.nominal(ones), resultdir + landuse_map)
    else:
        landuse_resample = workdir + 'landuse.tif'
        landuse_map = resultdir + landuse_map
        transform = wt.GetRasterTranform(landuse, srs_clone)
        if not transform[0]:
            call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        else:
            call(('gdalwarp', '-overwrite', '-s_srs', transform[1], '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              landuse_resample, landuse_map))
        landuse_work = pcr.readmap(landuse_map)
        pcr.report(pcr.nominal(landuse_work), landuse_map)
    ''' report soil map '''
    if soiltype == None:
        pcr.report(pcr.nominal(ones), resultdir + soil_map)
    else:
        soiltype_resample = workdir + 'soiltype.tif'
        soil_map = resultdir + soil_map
        #transform = wt.GetRasterTranform(soiltype,srs_clone)
        #        if not transform[0]:
        call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
              clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-r', 'mode', soiltype, soiltype_resample))
        #        else:
        #        call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              soiltype_resample, soil_map))
        soiltype_work = pcr.readmap(soil_map)
        pcr.report(pcr.nominal(soiltype_work), soil_map)

    if clean:
        wt.DeleteList(glob.glob(os.getcwd() + '\\' + resultdir + '/*.xml'))
예제 #41
0
    def read_forcings(self, currTimeStep):

        # reading precipitation:
        if self.precipitation_set_per_year:
            #~ print currTimeStep.year
            nc_file_per_year = self.preFileNC % (float(
                currTimeStep.year), float(currTimeStep.year))
            self.precipitation = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, 'precipitation',\
                                      str(currTimeStep.fulldate),
                                      useDoy = None,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
        else:
            #self.precipitation = vos.netcdf2PCRobjClone(\
            #                          self.preFileNC, 'precipitation',\
            #                          str(currTimeStep.fulldate),
            #                          useDoy = None,
            #                          cloneMapFileName = self.cloneMap,\
            #                          LatitudeLongitude = True)

            # reading precip when ISI-MIP is used
            precipitation = vos.netcdf2PCRobjClone(\
                                      self.preFileNC, 'pr',\
                                      str(currTimeStep.fulldate),
                                      useDoy = None,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
            # unit original km m-2 s-1 to m d-1
            # soortelijk gewicht water = 998 km/m3
        self.precipitation = (precipitation / 998.0) * (60. * 60. * 24)

        precipitationCorrectionFactor = pcr.scalar(
            1.0
        )  # Since 19 Feb 2014, Edwin removed the support for correcting precipitation.
        self.precipitation = pcr.max(0.,self.precipitation*\
                precipitationCorrectionFactor)
        self.precipitation = pcr.cover(self.precipitation, 0.0)

        # ignore very small values of precipitation (less than 0.00001 m/day or less than 0.01 kg.m-2.day-1 )
        if self.usingDailyTimeStepForcingData:
            self.precipitation = pcr.rounddown(
                self.precipitation * 100000.) / 100000.

        # reading temperature
        if self.temperature_set_per_year:
            nc_file_per_year = self.tmpFileNC % (int(
                currTimeStep.year), int(currTimeStep.year))
            self.temperature = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, 'temperature',\
                                      str(currTimeStep.fulldate),
                                      useDoy = None,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
        else:
            # updated to temp when using ISI-MIP projection
            temperature = vos.netcdf2PCRobjClone(\
                                 self.tmpFileNC,'tas',\
                                 str(currTimeStep.fulldate),
                                 useDoy = None,
                                 cloneMapFileName=self.cloneMap,\
                                 LatitudeLongitude = True)
# unit K to C
        self.temperature = temperature - 273.15

        # Downscaling precipitation and temperature
        if self.downscalePrecipitationOption:
            self.downscalePrecipitation(currTimeStep)
        if self.downscaleTemperatureOption:
            self.downscaleTemperature(currTimeStep)

        # calculate or obtain referencePotET
        if self.refETPotMethod == 'Hamon':            self.referencePotET = \
   refPotET.HamonPotET(self.temperature,\
                       currTimeStep.doy,\
                       self.latitudes)
        if self.refETPotMethod == 'Input':
            if self.refETPotFileNC_set_per_year:
                nc_file_per_year = self.etpFileNC % (int(
                    currTimeStep.year), int(currTimeStep.year))
                self.referencePotET = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, 'evapotranspiration',\
                                      str(currTimeStep.fulldate),
                                      useDoy = None,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
            else:
                self.referencePotET = vos.netcdf2PCRobjClone(\
                                      self.etpFileNC,'evapotranspiration',\
                                      str(currTimeStep.fulldate),
                                      useDoy = None,
                                      cloneMapFileName=self.cloneMap,\
                                      LatitudeLongitude = True)

        # Downscaling referenceETPot (based on temperature)
        if self.downscaleReferenceETPotOption: self.downscaleReferenceETPot()

        # smoothing:
        if self.forcingSmoothing == True:
            logger.debug("Forcing data are smoothed.")
            self.precipitation = pcr.windowaverage(self.precipitation,
                                                   self.smoothingWindowsLength)
            self.temperature = pcr.windowaverage(self.temperature,
                                                 self.smoothingWindowsLength)
            self.referencePotET = pcr.windowaverage(
                self.referencePotET, self.smoothingWindowsLength)

        # make sure precipitation is always positive:
        self.precipitation = pcr.max(0.0, self.precipitation)

        # rounding temperature values to minimize numerical errors (note only to minimize, not remove)
        self.temperature = pcr.roundoff(self.temperature * 1000.) / 1000.

        # ignore snow by setting temperature to 25 deg C
        if self.ignore_snow: self.temperature = pcr.spatial(pcr.scalar(25.))

        # define precipitation, temperature and referencePotET ONLY at landmask area (for reporting):
        self.precipitation = pcr.ifthen(self.landmask, self.precipitation)
        self.temperature = pcr.ifthen(self.landmask, self.temperature)
        self.referencePotET = pcr.ifthen(self.landmask, self.referencePotET)

        if self.report == True:
            timeStamp = datetime.datetime(currTimeStep.year,\
                                          currTimeStep.month,\
                                          currTimeStep.day,\
                                          0)
            # writing daily output to netcdf files
            timestepPCR = currTimeStep.timeStepPCR
            if self.outDailyTotNC[0] != "None":
                for var in self.outDailyTotNC:
                    self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_dailyTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,timestepPCR-1)

            # writing monthly output to netcdf files
            # -cummulative
            if self.outMonthTotNC[0] != "None":
                for var in self.outMonthTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.day == 1:                        \
                                               vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthTot'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            # -average
            if self.outMonthAvgNC[0] != "None":
                for var in self.outMonthAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outMonthTotNC:

                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the month
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.day == 1:                            \
                                                       vars(self)[var+'MonthTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # calculating average & reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        vars(self)[var+'MonthAvg'] = vars(self)[var+'MonthTot']/\
                                                     currTimeStep.day
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthAvg'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            #
            # -last day of the month
            if self.outMonthEndNC[0] != "None":
                for var in self.outMonthEndNC:
                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.monthIdx-1)

            # writing yearly output to netcdf files
            # -cummulative
            if self.outAnnuaTotNC[0] != "None":
                for var in self.outAnnuaTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.doy == 1:                        \
                                               vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'AnnuaTot'] += vars(self)[var]

                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaTot'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            # -average
            if self.outAnnuaAvgNC[0] != "None":
                for var in self.outAnnuaAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outAnnuaTotNC:
                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the year
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.doy == 1:                            \
                                                       vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'AnnuaTot'] += vars(self)[var]
                    #
                    # calculating average & reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        vars(self)[var+'AnnuaAvg'] = vars(self)[var+'AnnuaTot']/\
                                                     currTimeStep.doy
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaAvg'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            #
            # -last day of the year
            if self.outAnnuaEndNC[0] != "None":
                for var in self.outAnnuaEndNC:
                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.annuaIdx-1)
예제 #42
0
    def __init__(self, iniItems, landmask, spinUp):
        object.__init__(self)

        self.cloneMap = iniItems.cloneMap
        self.tmpDir = iniItems.tmpDir
        self.inputDir = iniItems.globalOptions['inputDir']
        self.landmask = landmask

        # option to activate water balance check
        self.debugWaterBalance = True
        if iniItems.routingOptions['debugWaterBalance'] == "False":
            self.debugWaterBalance = False

        if iniItems.groundwaterOptions['groundwaterPropertiesNC'] == str(None):
            # assign the recession coefficient parameter(s)
            self.recessionCoeff = vos.readPCRmapClone(
                iniItems.groundwaterOptions['recessionCoeff'],
                self.cloneMap, self.tmpDir, self.inputDir)
        else:
            groundwaterPropertiesNC = vos.getFullPath(
                iniItems.groundwaterOptions[
                    'groundwaterPropertiesNC'],
                self.inputDir)
            self.recessionCoeff = vos.netcdf2PCRobjCloneWithoutTime(
                groundwaterPropertiesNC, 'recessionCoeff',
                cloneMapFileName=self.cloneMap)

        # groundwater recession coefficient (day-1_
        self.recessionCoeff = pcr.cover(self.recessionCoeff, 0.00)
        self.recessionCoeff = pcr.min(1.0000, self.recessionCoeff)
        #
        if 'minRecessionCoeff' in iniItems.groundwaterOptions.keys():
            minRecessionCoeff = float(
                iniItems.groundwaterOptions['minRecessionCoeff'])
        else:
            # This is the minimum value used in Van Beek et al. (2011).
            minRecessionCoeff = 1.0e-4
        self.recessionCoeff = pcr.max(minRecessionCoeff, self.recessionCoeff)

        if iniItems.groundwaterOptions['groundwaterPropertiesNC'] == str(None):
            # assign aquifer specific yield
            self.specificYield = vos.readPCRmapClone(
                iniItems.groundwaterOptions['specificYield'],
                self.cloneMap, self.tmpDir, self.inputDir)
        else:
            self.specificYield = vos.netcdf2PCRobjCloneWithoutTime(
                groundwaterPropertiesNC, 'specificYield',
                cloneMapFileName=self.cloneMap)

        self.specificYield = pcr.cover(self.specificYield, 0.0)
        # TODO: TO BE CHECKED: The resample process of specificYield
        self.specificYield = pcr.max(0.010, self.specificYield)
        self.specificYield = pcr.min(1.000, self.specificYield)

        if iniItems.groundwaterOptions['groundwaterPropertiesNC'] == str(None):
            # assign aquifer saturated conductivity
            self.kSatAquifer = vos.readPCRmapClone(
                iniItems.groundwaterOptions['kSatAquifer'],
                self.cloneMap, self.tmpDir, self.inputDir)
        else:
            self.kSatAquifer = vos.netcdf2PCRobjCloneWithoutTime(
                groundwaterPropertiesNC, 'kSatAquifer',
                cloneMapFileName=self.cloneMap)

        self.kSatAquifer = pcr.cover(self.kSatAquifer, 0.0)
        self.kSatAquifer = pcr.max(0.010, self.kSatAquifer)

        # limitAbstraction options
        self.limitAbstraction = False
        if iniItems.landSurfaceOptions['limitAbstraction'] == "True":
            self.limitAbstraction = True

        # option for limitting fossil groundwater abstractions - This option is only defined for IWMI project
        self.limitFossilGroundwaterAbstraction = False
        if self.limitAbstraction == False and\
           "extraOptionsforProjectWithIWMI" in iniItems.allSections and\
           iniItems.extraOptionsforProjectWithIWMI['limitFossilGroundWaterAbstraction'] == "True":

            #logger.info('Fossil groundwater abstraction limit is used (IWMI project).')
            self.limitFossilGroundwaterAbstraction = True

            # estimate of thickness (unit: mm) of aceesible groundwater: shallow and deep
            totalGroundwaterThickness = vos.readPCRmapClone(
                iniItems.extraOptionsforProjectWithIWMI['estimateOfTotalGroundwaterThickness'],
                self.cloneMap, self.tmpDir, self.inputDir)
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                                  pcr.windowaverage(totalGroundwaterThickness, 1.0))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                                  pcr.windowaverage(totalGroundwaterThickness, 1.5))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                                  pcr.windowaverage(totalGroundwaterThickness, 2.5))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                                  pcr.windowaverage(totalGroundwaterThickness, 5.0))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                                  pcr.windowaverage(totalGroundwaterThickness, 7.5))
            totalGroundwaterThickness = pcr.cover(totalGroundwaterThickness,
                                                  pcr.mapmaximum(totalGroundwaterThickness))

            # set minimum thickness to 50 m:
            totalGroundwaterThickness = pcr.max(
                50.0, totalGroundwaterThickness)

            # estimate of capacity (unit: m) of renewable groundwater (shallow)
            storGroundwaterCap = pcr.cover(
                vos.readPCRmapClone(
                    iniItems.extraOptionsforProjectWithIWMI['estimateOfRenewableGroundwaterCapacity'],
                    self.cloneMap, self.tmpDir, self.inputDir),
                0.0)

            # fossil groundwater capacity (unit: m)
            self.fossilWaterCap = pcr.max(0.0,
                                          totalGroundwaterThickness*self.specificYield - storGroundwaterCap)

        # option for limitting regional groundwater abstractions - This option is only defined
        self.limitRegionalAnnualGroundwaterAbstraction = False
        if "extraOptionsforProjectWithIWMI" in iniItems.allSections and\
           iniItems.extraOptionsforProjectWithIWMI['limitRegionalAnnualGroundwaterAbstraction'] == "True":

            #logger.info('Limit for regional groundwater abstraction is used (IWMI project).')
            self.limitRegionalAnnualGroundwaterAbstraction = True

            region_ids = vos.readPCRmapClone(
                iniItems.extraOptionsforProjectWithIWMI['regionIds'],
                self.cloneMap, self.tmpDir, self.inputDir)
            self.region_ids = pcr.nominal(region_ids)
            self.region_ids = pcr.ifthen(self.landmask, self.region_ids)

            self.regionalAnnualGroundwaterAbstractionLimit = vos.readPCRmapClone(
                iniItems.extraOptionsforProjectWithIWMI['pumpingCapacity'],
                self.cloneMap, self.tmpDir, self.inputDir)
            self.regionalAnnualGroundwaterAbstractionLimit = pcr.roundup(
                self.regionalAnnualGroundwaterAbstractionLimit*1000.)/1000.
            self.regionalAnnualGroundwaterAbstractionLimit = pcr.cover(
                self.regionalAnnualGroundwaterAbstractionLimit, 0.0)

            self.regionalAnnualGroundwaterAbstractionLimit *= 1000. * \
                1000. * 1000.  # unit: m3/year
            self.regionalAnnualGroundwaterAbstractionLimit = pcr.ifthen(self.landmask,
                                                                        self.regionalAnnualGroundwaterAbstractionLimit)

        # zones at which water allocation (surface and groundwater allocation) is determined
        self.usingAllocSegments = False
        if iniItems.landSurfaceOptions['allocationSegmentsForGroundSurfaceWater'] != "None":
            self.usingAllocSegments = True

        # incorporating groundwater distribution network:
        if self.usingAllocSegments and self.limitAbstraction == False:

            self.allocSegments = vos.readPCRmapClone(
                iniItems.landSurfaceOptions['allocationSegmentsForGroundSurfaceWater'],
                self.cloneMap, self.tmpDir, self.inputDir, isLddMap=False, cover=None, isNomMap=True)
            self.allocSegments = pcr.ifthen(self.landmask, self.allocSegments)

            cellArea = vos.readPCRmapClone(
                iniItems.routingOptions['cellAreaMap'],
                self.cloneMap, self.tmpDir, self.inputDir)
            # TODO: integrate this one with the one coming from the routing module
            cellArea = pcr.ifthen(self.landmask, cellArea)

            self.segmentArea = pcr.areatotal(
                pcr.cover(cellArea, 0.0), self.allocSegments)
            self.segmentArea = pcr.ifthen(self.landmask, self.segmentArea)

        self.report = True
        try:
            self.outDailyTotNC = iniItems.groundwaterOptions['outDailyTotNC'].split(
                ",")
            self.outMonthTotNC = iniItems.groundwaterOptions['outMonthTotNC'].split(
                ",")
            self.outMonthAvgNC = iniItems.groundwaterOptions['outMonthAvgNC'].split(
                ",")
            self.outMonthEndNC = iniItems.groundwaterOptions['outMonthEndNC'].split(
                ",")
            self.outAnnuaTotNC = iniItems.groundwaterOptions['outAnnuaTotNC'].split(
                ",")
            self.outAnnuaAvgNC = iniItems.groundwaterOptions['outAnnuaAvgNC'].split(
                ",")
            self.outAnnuaEndNC = iniItems.groundwaterOptions['outAnnuaEndNC'].split(
                ",")
        except:
            self.report = False
        if self.report == True:
            self.outNCDir = iniItems.outNCDir
            self.netcdfObj = PCR2netCDF(iniItems)
            #
            # daily output in netCDF files:
            if self.outDailyTotNC[0] != "None":
                for var in self.outDailyTotNC:
                    # creating the netCDF files:
                    self.netcdfObj.createNetCDF(str(self.outNCDir)+"/" +
                                                str(var)+"_dailyTot.nc",
                                                var, "undefined")
            # MONTHly output in netCDF files:
            # - cummulative
            if self.outMonthTotNC[0] != "None":
                for var in self.outMonthTotNC:
                    # initiating monthlyVarTot (accumulator variable):
                    vars(self)[var+'MonthTot'] = None
                    # creating the netCDF files:
                    self.netcdfObj.createNetCDF(str(self.outNCDir)+"/" +
                                                str(var)+"_monthTot.nc",
                                                var, "undefined")
            # - average
            if self.outMonthAvgNC[0] != "None":
                for var in self.outMonthAvgNC:
                    # initiating monthlyTotAvg (accumulator variable)
                    vars(self)[var+'MonthTot'] = None
                    # initiating monthlyVarAvg:
                    vars(self)[var+'MonthAvg'] = None
                    # creating the netCDF files:
                    self.netcdfObj.createNetCDF(str(self.outNCDir)+"/" +
                                                str(var)+"_monthAvg.nc",
                                                var, "undefined")
            # - last day of the month
            if self.outMonthEndNC[0] != "None":
                for var in self.outMonthEndNC:
                    # creating the netCDF files:
                    self.netcdfObj.createNetCDF(str(self.outNCDir)+"/" +
                                                str(var)+"_monthEnd.nc",
                                                var, "undefined")
            # YEARly output in netCDF files:
            # - cummulative
            if self.outAnnuaTotNC[0] != "None":
                for var in self.outAnnuaTotNC:
                    # initiating yearly accumulator variable:
                    vars(self)[var+'AnnuaTot'] = None
                    # creating the netCDF files:
                    self.netcdfObj.createNetCDF(str(self.outNCDir)+"/" +
                                                str(var)+"_annuaTot.nc",
                                                var, "undefined")
            # - average
            if self.outAnnuaAvgNC[0] != "None":
                for var in self.outAnnuaAvgNC:
                    # initiating annualyVarAvg:
                    vars(self)[var+'AnnuaAvg'] = None
                    # initiating annualyTotAvg (accumulator variable)
                    vars(self)[var+'AnnuaTot'] = None
                    # creating the netCDF files:
                    self.netcdfObj.createNetCDF(str(self.outNCDir)+"/" +
                                                str(var)+"_annuaAvg.nc",
                                                var, "undefined")
            # - last day of the year
            if self.outAnnuaEndNC[0] != "None":
                for var in self.outAnnuaEndNC:
                    # creating the netCDF files:
                    self.netcdfObj.createNetCDF(str(self.outNCDir)+"/" +
                                                str(var)+"_annuaEnd.nc",
                                                var, "undefined")

        # get initial conditions
        self.getICs(iniItems, spinUp)
예제 #43
0
파일: meteo.py 프로젝트: wk1984/glofrim
    def forcingDownscalingOptions(self, iniItems):

        self.downscalePrecipitationOption = False
        self.downscaleTemperatureOption = False
        self.downscaleReferenceETPotOption = False

        if 'meteoDownscalingOptions' in iniItems.allSections:

            # downscaling options
            if iniItems.meteoDownscalingOptions[
                    'downscalePrecipitation'] == "True":
                self.downscalePrecipitationOption = True
                logger.info(
                    "Precipitation forcing will be downscaled to the cloneMap resolution."
                )

            if iniItems.meteoDownscalingOptions[
                    'downscaleTemperature'] == "True":
                self.downscaleTemperatureOption = True
                logger.info(
                    "Temperature forcing will be downscaled to the cloneMap resolution."
                )

            if iniItems.meteoDownscalingOptions[
                    'downscaleReferenceETPot'] == "True" and self.refETPotMethod != 'Hamon':
                self.downscaleReferenceETPotOption = True
                logger.info(
                    "Reference potential evaporation will be downscaled to the cloneMap resolution."
                )

            # Note that for the Hamon method: referencePotET will be calculated based on temperature,
            # therefore, we do not have to downscale it (particularly if temperature is already provided at high resolution).

        if self.downscalePrecipitationOption or\
           self.downscaleTemperatureOption   or\
           self.downscaleReferenceETPotOption:

            # creating anomaly DEM
            highResolutionDEM = vos.readPCRmapClone(\
               iniItems.meteoDownscalingOptions['highResolutionDEM'],
               self.cloneMap,self.tmpDir,self.inputDir)
            highResolutionDEM = pcr.cover(highResolutionDEM, 0.0)
            highResolutionDEM = pcr.max(highResolutionDEM, 0.0)
            self.meteoDownscaleIds = vos.readPCRmapClone(\
               iniItems.meteoDownscalingOptions['meteoDownscaleIds'],
               self.cloneMap,self.tmpDir,self.inputDir,isLddMap=False,cover=None,isNomMap=True)
            self.cellArea = vos.readPCRmapClone(\
               iniItems.routingOptions['cellAreaMap'],
               self.cloneMap,self.tmpDir,self.inputDir)
            loweResolutionDEM = pcr.areatotal(pcr.cover(highResolutionDEM*self.cellArea, 0.0),\
                                              self.meteoDownscaleIds)/\
                                pcr.areatotal(pcr.cover(self.cellArea, 0.0),\
                                              self.meteoDownscaleIds)
            self.anomalyDEM = highResolutionDEM - loweResolutionDEM  # unit: meter

            # temperature lapse rate (netCDF) file
            self.temperLapseRateNC = vos.getFullPath(iniItems.meteoDownscalingOptions[\
                                        'temperLapseRateNC'],self.inputDir)
            self.temperatCorrelNC  = vos.getFullPath(iniItems.meteoDownscalingOptions[\
                                        'temperatCorrelNC'],self.inputDir)                    # TODO: Remove this criteria.

            # precipitation lapse rate (netCDF) file
            self.precipLapseRateNC = vos.getFullPath(iniItems.meteoDownscalingOptions[\
                                        'precipLapseRateNC'],self.inputDir)
            self.precipitCorrelNC  = vos.getFullPath(iniItems.meteoDownscalingOptions[\
                                        'precipitCorrelNC'],self.inputDir)                    # TODO: Remove this criteria.

        else:
            logger.info("No forcing downscaling is implemented.")

        # forcing smoothing options: - THIS is still experimental. PS: MUST BE TESTED.
        self.forcingSmoothing = False
        if 'meteoDownscalingOptions' in iniItems.allSections and \
           'smoothingWindowsLength' in iniItems.meteoDownscalingOptions.keys():

            if float(iniItems.meteoDownscalingOptions['smoothingWindowsLength']
                     ) > 0.0:
                self.forcingSmoothing = True
                self.smoothingWindowsLength = vos.readPCRmapClone(\
                   iniItems.meteoDownscalingOptions['smoothingWindowsLength'],
                   self.cloneMap,self.tmpDir,self.inputDir)
                msg = "Forcing data are smoothed with 'windowaverage' using the window length:" + str(
                    iniItems.meteoDownscalingOptions['smoothingWindowsLength'])
                logger.info(msg)
예제 #44
0
def build_model(geojson_path, cellsize, model, timestep, name, case_template,
                case_path, fews, fews_config_path, dem_path, river_path,
                region_filter):
    """Prepare a simple WFlow model, anywhere, based on global datasets."""

    # lists below need to stay synchronized, not sure of a better way
    [
        geojson_path, model, timestep, name, case_template, case_path,
        fews_config_path, dem_path, river_path, region_filter
    ] = [
        encode_utf8(p) for p in [
            geojson_path, model, timestep, name, case_template, case_path,
            fews_config_path, dem_path, river_path, region_filter
        ]
    ]

    # fill in the dependent defaults
    if name is None:
        name = 'wflow_{}_case'.format(model)
    if case_template is None:
        case_template = 'wflow_{}_template'.format(model)
    if model == 'hbv':
        if timestep == 'hourly':
            case_template = 'wflow_{}_hourly_template'.format(model)
        else:
            case_template = 'wflow_{}_daily_template'.format(model)

    # assumes it is in decimal degrees, see Geod
    case = os.path.join(case_path, name)
    path_catchment = os.path.join(case, 'data/catchments/catchments.geojson')

    region = hydro_engine_geometry(geojson_path, region_filter)

    # get the centroid of the region, such that we have a point for unit conversion
    centroid = sg.shape(region).centroid
    x, y = centroid.x, centroid.y

    filter_upstream_gt = 1000
    crs = 'EPSG:4326'

    g = Geod(ellps='WGS84')
    # convert to meters in the center of the grid
    # Earth Engine expects meters
    _, _, crossdist_m = g.inv(x, y, x + cellsize, y + cellsize)
    cellsize_m = sqrt(0.5 * crossdist_m**2)

    # start by making case an exact copy of the template
    copycase(case_template, case)

    # create folder structure for data folder
    for d in ['catchments', 'dem', 'rivers']:
        dir_data = os.path.join(case, 'data', d)
        ensure_dir_exists(dir_data)

    # create grid
    path_log = 'wtools_create_grid.log'
    dir_mask = os.path.join(case, 'mask')
    projection = 'EPSG:4326'

    download_catchments(region,
                        path_catchment,
                        geojson_path,
                        region_filter=region_filter)
    cg_extent = path_catchment

    cg.main(path_log,
            dir_mask,
            cg_extent,
            projection,
            cellsize,
            locationid=name,
            snap=True)
    mask_tif = os.path.join(dir_mask, 'mask.tif')

    with rasterio.open(mask_tif) as ds:
        bbox = ds.bounds

    # create static maps
    dir_dest = os.path.join(case, 'staticmaps')
    # use custom inifile, default high res ldd takes too long
    path_inifile = os.path.join(case, 'data/staticmaps.ini')
    path_dem_in = os.path.join(case, 'data/dem/dem.tif')
    dir_lai = os.path.join(case, 'data/parameters/clim')

    if river_path is None:
        # download the global dataset
        river_data_path = os.path.join(case, 'data/rivers/rivers.geojson')
        # raise ValueError("User must supply river_path for now, see hydro-engine#14")
        download_rivers(region,
                        river_data_path,
                        filter_upstream_gt,
                        region_filter=region_filter)
    else:
        # take the local dataset, reproject and clip
        # command line equivalent of
        # ogr2ogr -t_srs EPSG:4326 -f GPKG -overwrite -clipdst xmin ymin xmax ymax rivers.gpkg rivers.shp
        river_data_path = os.path.join(case, 'data/rivers/rivers.gpkg')
        ogr2ogr.main([
            '', '-t_srs', 'EPSG:4326', '-f', 'GPKG', '-overwrite', '-clipdst',
            str(bbox.left),
            str(bbox.bottom),
            str(bbox.right),
            str(bbox.top), river_data_path, river_path
        ])

    if dem_path is None:
        # download the global dem
        download_raster(region,
                        path_dem_in,
                        'dem',
                        cellsize_m,
                        crs,
                        region_filter=region_filter)
    else:
        # warp the local dem onto model grid
        wt.warp_like(dem_path,
                     path_dem_in,
                     mask_tif,
                     format='GTiff',
                     co={'dtype': 'float32'},
                     resampling=warp.Resampling.med)

    other_maps = {
        'sbm': [
            'FirstZoneCapacity', 'FirstZoneKsatVer', 'FirstZoneMinCapacity',
            'InfiltCapSoil', 'M', 'PathFrac', 'WaterFrac', 'thetaS',
            'soil_type', 'landuse'
        ],
        'hbv': [
            'BetaSeepage', 'Cfmax', 'CFR', 'FC', 'K0', 'LP', 'Pcorr', 'PERC',
            'SFCF', 'TT', 'WHC'
        ]
    }

    # TODO rename these in hydro-engine
    newnames = {
        'FirstZoneKsatVer': 'KsatVer',
        'FirstZoneMinCapacity': 'SoilMinThickness',
        'FirstZoneCapacity': 'SoilThickness',
        # 'landuse': 'wflow_landuse',  # restore after issues caused by values in this map are fixed
        'soil_type': 'wflow_soil'
    }

    # destination paths
    path_other_maps = []
    for param in other_maps[model]:
        path = os.path.join(case, 'data/parameters',
                            newnames.get(param, param) + '.tif')
        path_other_maps.append(path)

    for param, path in zip(other_maps[model], path_other_maps):
        if model == 'sbm':
            download_raster(region,
                            path,
                            param,
                            cellsize_m,
                            crs,
                            region_filter=region_filter)
        elif model == 'hbv':
            # these are not yet in the earth engine, use local paths
            if timestep == 'hourly':
                path_staticmaps_global = r'p:\1209286-earth2observe\HBV-GLOBAL\staticmaps_hourly'
            else:
                path_staticmaps_global = r'p:\1209286-earth2observe\HBV-GLOBAL\staticmaps'
            path_in = os.path.join(path_staticmaps_global, param + '.tif')

            # warp the local staticmaps onto model grid
            wt.warp_like(path_in,
                         path,
                         mask_tif,
                         format='GTiff',
                         co={'dtype': 'float32'},
                         resampling=warp.Resampling.med)

    if model == 'sbm':
        ensure_dir_exists(dir_lai)
        for m in range(1, 13):
            mm = str(m).zfill(2)
            path = os.path.join(dir_lai, 'LAI00000.0{}'.format(mm))
            download_raster(region,
                            path,
                            'LAI{}'.format(mm),
                            cellsize_m,
                            crs,
                            region_filter=region_filter)
    else:
        # TODO this creates defaults in static_maps, disable this behavior?
        # or otherwise adapt static_maps for the other models
        dir_lai = None

    # create default folder structure for running wflow
    dir_inmaps = os.path.join(case, 'inmaps')
    ensure_dir_exists(dir_inmaps)
    dir_instate = os.path.join(case, 'instate')
    ensure_dir_exists(dir_instate)
    for d in [
            'instate', 'intbl', 'intss', 'outmaps', 'outstate', 'outsum',
            'runinfo'
    ]:
        dir_run = os.path.join(case, 'run_default', d)
        ensure_dir_exists(dir_run)

    # this is for coastal catchments only, if it is not coastal and no outlets
    # are found, then it will just be the pit of the ldd
    outlets = outlets_coords(path_catchment, river_data_path)

    sm.main(dir_mask,
            dir_dest,
            path_inifile,
            path_dem_in,
            river_data_path,
            path_catchment,
            lai=dir_lai,
            other_maps=path_other_maps,
            outlets=outlets)

    if fews:
        # save default state-files in FEWS-config
        dir_state = os.path.join(case, 'outstate')
        ensure_dir_exists(dir_state)
        if model == 'sbm':
            state_files = [
                'CanopyStorage.map', 'GlacierStore.map', 'ReservoirVolume.map',
                'SatWaterDepth.map', 'Snow.map', 'SnowWater.map',
                'SurfaceRunoff.map', 'SurfaceRunoffDyn.map', 'TSoil.map',
                'UStoreLayerDepth_0.map', 'WaterLevel.map', 'WaterLevelDyn.map'
            ]
        elif model == 'hbv':
            state_files = [
                'DrySnow.map', 'FreeWater.map', 'InterceptionStorage.map',
                'LowerZoneStorage.map', 'SoilMoisture.map',
                'SurfaceRunoff.map', 'UpperZoneStorage.map', 'WaterLevel.map'
            ]
        zip_name = name + '_GA_Historical default.zip'

        zip_loc = os.path.join(fews_config_path, 'ColdStateFiles', zip_name)
        path_csf = os.path.dirname(zip_loc)
        ensure_dir_exists(path_csf)

        mask = pcr.readmap(os.path.join(dir_mask, 'mask.map'))

        with zipfile.ZipFile(zip_loc, mode='w') as zf:
            for state_file in state_files:
                state_path = os.path.join(dir_state, state_file)
                pcr.report(pcr.cover(mask, pcr.scalar(0)), state_path)
                zf.write(state_path,
                         state_file,
                         compress_type=zipfile.ZIP_DEFLATED)
예제 #45
0
    def getReservoirOutflow(self,\
        avgChannelDischarge,length_of_time_step,downstreamDemand):

        # avgOutflow (m3/s)
        avgOutflow = self.avgOutflow
        avgOutflow = pcr.ifthenelse(\
                     avgOutflow > 0.,\
                     avgOutflow,
                     pcr.max(avgChannelDischarge,self.avgInflow,0.001)) # This is needed when new lakes/reservoirs introduced (its avgOutflow is still zero).
        avgOutflow = pcr.areamaximum(avgOutflow,self.waterBodyIds)             	

        # calculate resvOutflow (m2/s) (based on reservoir storage and avgDischarge): 
        # - using reductionFactor in such a way that:
        #   - if relativeCapacity < minResvrFrac : release is terminated
        #   - if relativeCapacity > maxResvrFrac : longterm average
        reductionFactor = \
         pcr.cover(\
         pcr.min(1.,
         pcr.max(0., \
          self.waterBodyStorage - self.minResvrFrac*self.waterBodyCap)/\
             (self.maxResvrFrac - self.minResvrFrac)*self.waterBodyCap),0.0)
        #
        resvOutflow = reductionFactor * avgOutflow * length_of_time_step                      # unit: m3

        # maximum release <= average inflow (especially during dry condition)
        resvOutflow  = pcr.max(0, pcr.min(resvOutflow, self.avgInflow * length_of_time_step)) # unit: m3                                          

        # downstream demand (m3/s)
        # reduce demand if storage < lower limit
        reductionFactor  = vos.getValDivZero(downstreamDemand, self.minResvrFrac*self.waterBodyCap, vos.smallNumber)
        reductionFactor  = pcr.cover(reductionFactor, 0.0)
        downstreamDemand = pcr.min(
                           downstreamDemand,
                           downstreamDemand*reductionFactor)
        # resvOutflow > downstreamDemand
        resvOutflow  = pcr.max(resvOutflow, downstreamDemand * length_of_time_step)           # unit: m3       

        # floodOutflow: additional release if storage > upper limit
        ratioQBankfull = 2.3
        estmStorage  = pcr.max(0.,self.waterBodyStorage - resvOutflow)
        floodOutflow = \
           pcr.max(0.0, estmStorage - self.waterBodyCap) +\
           pcr.cover(\
           pcr.max(0.0, estmStorage - self.maxResvrFrac*\
                                      self.waterBodyCap)/\
              ((1.-self.maxResvrFrac)*self.waterBodyCap),0.0)*\
           pcr.max(0.0,ratioQBankfull*avgOutflow* vos.secondsPerDay()-\
                                      resvOutflow)
        floodOutflow = pcr.max(0.0,
                       pcr.min(floodOutflow,\
                       estmStorage - self.maxResvrFrac*\
                                     self.waterBodyCap*0.75)) # maximum limit of floodOutflow: bring the reservoir storages only to 3/4 of upper limit capacities
        
        # update resvOutflow after floodOutflow
        resvOutflow  = pcr.cover(resvOutflow , 0.0) +\
                       pcr.cover(floodOutflow, 0.0)                                            

        # maximum release if storage > upper limit : bring the reservoir storages only to 3/4 of upper limit capacities
        resvOutflow  = pcr.ifthenelse(self.waterBodyStorage > 
                       self.maxResvrFrac*self.waterBodyCap,\
                       pcr.min(resvOutflow,\
                       pcr.max(0,self.waterBodyStorage - \
                       self.maxResvrFrac*self.waterBodyCap*0.75)),
                       resvOutflow)                                            

        # if storage > upper limit : resvOutflow > avgInflow
        resvOutflow  = pcr.ifthenelse(self.waterBodyStorage > 
                       self.maxResvrFrac*self.waterBodyCap,\
                       pcr.max(0.0, resvOutflow, self.avgInflow),
                       resvOutflow)                                            
        
        # resvOutflow < waterBodyStorage
        resvOutflow = pcr.min(self.waterBodyStorage, resvOutflow)
        
        resvOutflow = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0., resvOutflow)
        resvOutflow = pcr.ifthen(pcr.scalar(self.waterBodyTyp) == 2, resvOutflow)
        return (resvOutflow) # unit: m3  
예제 #46
0
    def readSoilMapOfFAO(self, iniItems, optionDict=None):

        # a dictionary/section of options that will be used
        if optionDict == None: optionDict = iniItems.landSurfaceOptions

        # soil variable names given either in the ini or netCDF file:
        soilParameters = [
            'airEntryValue1', 'airEntryValue2', 'poreSizeBeta1',
            'poreSizeBeta2', 'resVolWC1', 'resVolWC2', 'satVolWC1',
            'satVolWC2', 'KSat1', 'KSat2', 'percolationImp'
        ]
        if optionDict['soilPropertiesNC'] == str(None):
            for var in soilParameters:
                input = optionDict[str(var)]
                vars(self)[var] = \
                               vos.readPCRmapClone(input,self.cloneMap,\
                                             self.tmpDir,self.inputDir)
                vars(self)[var] = pcr.scalar(vars(self)[var])

                if input == "percolationImp":
                    vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 0.75))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        else:
            soilPropertiesNC = vos.getFullPath(\
                               optionDict['soilPropertiesNC'],
                                                self.inputDir)
            for var in soilParameters:
                vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(\
                                    soilPropertiesNC,var, \
                                    cloneMapFileName = self.cloneMap)

                if var == "percolationImp":
                    vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 0.75))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))
                vars(self)[var] = pcr.cover(
                    vars(self)[var], pcr.windowaverage(vars(self)[var], 1.00))

                vars(self)[var] = pcr.cover(vars(self)[var], 0.01)

        # make sure that resVolWC1 <= satVolWC1
        self.resVolWC1 = pcr.min(self.resVolWC1, self.satVolWC1)
        self.resVolWC2 = pcr.min(self.resVolWC2, self.satVolWC2)

        if self.numberOfLayers == 2:
            self.satVolMoistContUpp = self.satVolWC1  # saturated volumetric moisture content (m3.m-3)
            self.satVolMoistContLow = self.satVolWC2
            self.resVolMoistContUpp = self.resVolWC1  # residual volumetric moisture content (m3.m-3)
            self.resVolMoistContLow = self.resVolWC2
            self.airEntryValueUpp = self.airEntryValue1  # air entry value (m) according to soil water retention curve of Clapp & Hornberger (1978)
            self.airEntryValueLow = self.airEntryValue2
            self.poreSizeBetaUpp = self.poreSizeBeta1  # pore size distribution parameter according to Clapp & Hornberger (1978)
            self.poreSizeBetaLow = self.poreSizeBeta2
            self.kSatUpp = self.KSat1  # saturated hydraulic conductivity (m.day-1)
            self.kSatLow = self.KSat2

        if self.numberOfLayers == 3:
            self.satVolMoistContUpp000005 = self.satVolWC1
            self.satVolMoistContUpp005030 = self.satVolWC1
            self.satVolMoistContLow030150 = self.satVolWC2
            self.resVolMoistContUpp000005 = self.resVolWC1
            self.resVolMoistContUpp005030 = self.resVolWC1
            self.resVolMoistContLow030150 = self.resVolWC2
            self.airEntryValueUpp000005 = self.airEntryValue1
            self.airEntryValueUpp005030 = self.airEntryValue1
            self.airEntryValueLow030150 = self.airEntryValue2
            self.poreSizeBetaUpp000005 = self.poreSizeBeta1
            self.poreSizeBetaUpp005030 = self.poreSizeBeta1
            self.poreSizeBetaLow030150 = self.poreSizeBeta2
            self.kSatUpp000005 = self.KSat1
            self.kSatUpp005030 = self.KSat1
            self.kSatLow030150 = self.KSat2

        self.percolationImp = pcr.cover(
            self.percolationImp, 0.0
        )  # fractional area where percolation to groundwater store is impeded (dimensionless)

        # soil thickness and storage variable names
        # as given either in the ini or netCDF file:
        soilStorages = [
            'firstStorDepth', 'secondStorDepth', 'soilWaterStorageCap1',
            'soilWaterStorageCap2'
        ]
        if optionDict['soilPropertiesNC'] == str(None):
            for var in soilStorages:
                input = optionDict[str(var)]
                temp = str(var) + 'Inp'
                vars(self)[temp] = vos.readPCRmapClone(input,\
                                            self.cloneMap,
                                            self.tmpDir,self.inputDir)

                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 0.75))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(vars(self)[temp], 0.0)

        else:
            soilPropertiesNC = vos.getFullPath(\
                               optionDict['soilPropertiesNC'],
                                                self.inputDir)
            for var in soilStorages:
                temp = str(var) + 'Inp'
                vars(self)[temp] = vos.netcdf2PCRobjCloneWithoutTime(\
                                     soilPropertiesNC,var, \
                                     cloneMapFileName = self.cloneMap)
                # extrapolation
                # - TODO: Make a general extrapolation option as a function in the virtualOS.py
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 0.75))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(
                    vars(self)[temp],
                    pcr.windowaverage(vars(self)[temp], 1.05))
                vars(self)[temp] = pcr.cover(vars(self)[temp], 0.0)

        # layer thickness
        if self.numberOfLayers == 2:
            self.thickUpp = (0.30 / 0.30) * self.firstStorDepthInp
            self.thickLow = (1.20 / 1.20) * self.secondStorDepthInp
        if self.numberOfLayers == 3:
            self.thickUpp000005 = (0.05 / 0.30) * self.firstStorDepthInp
            self.thickUpp005030 = (0.25 / 0.30) * self.firstStorDepthInp
            self.thickLow030150 = (1.20 / 1.20) * self.secondStorDepthInp

        # soil storage
        if self.numberOfLayers == 2:
            #~ self.storCapUpp = (0.30/0.30)*self.soilWaterStorageCap1Inp
            #~ self.storCapLow = (1.20/1.20)*self.soilWaterStorageCap2Inp                     # 22 Feb 2014: We can calculate this based on thickness and porosity.
            self.storCapUpp = self.thickUpp * \
                             (self.satVolMoistContUpp - self.resVolMoistContUpp)
            self.storCapLow = self.thickLow * \
                             (self.satVolMoistContLow - self.resVolMoistContLow)
            self.rootZoneWaterStorageCap = self.storCapUpp + \
                                           self.storCapLow                                    # This is called as WMAX in the original pcrcalc script.
        if self.numberOfLayers == 3:
            self.storCapUpp000005 = self.thickUpp000005 * \
                             (self.satVolMoistContUpp000005 - self.resVolMoistContUpp000005)
            self.storCapUpp005030 = self.thickUpp005030 * \
                             (self.satVolMoistContUpp005030 - self.resVolMoistContUpp005030)
            self.storCapLow030150 = self.thickLow030150 * \
                             (self.satVolMoistContLow030150 - self.resVolMoistContLow030150)
            self.rootZoneWaterStorageCap = self.storCapUpp000005 + \
                                           self.storCapUpp005030 + \
                                           self.storCapLow030150
    file_names = [
        '2-year_of_channel_storage.map', '5-year_of_channel_storage.map',
        '10-year_of_channel_storage.map', '25-year_of_channel_storage.map',
        '50-year_of_channel_storage.map', '100-year_of_channel_storage.map',
        '250-year_of_channel_storage.map', '500-year_of_channel_storage.map',
        '1000-year_of_channel_storage.map'
    ]
front_name = ""
if type_of_files != "normal": front_name = type_of_files + "_"
for i_file in range(0, len(file_names)):
    #~ for i_file in range(0, 1):
    file_name = file_names[i_file]
    complete_file_name = input_folder + "/" + front_name + file_name
    extreme_value_map = pcr.cover(
                        vos.readPCRmapClone(complete_file_name, \
                                            clone_map_file, \
                                            tmp_folder, \
                                            None, False, None, False), 0.0)
    # - focus only to the landmask area. We have to do this so that only flood in the landmask that will be downscaled/routed.
    extreme_value_map = pcr.ifthen(landmask, extreme_value_map)
    #
    # - masking out reservoir storage
    if masking_out_reservoirs:
        cell_area = pcr.ifthen(landmask, \
                    pcr.cover(\
                    vos.readPCRmapClone(cell_area_file, \
                                        clone_map_file, \
                                        tmp_folder, \
                                        None, False, None, False), 0.0))
        # read the properties of permanent water bodies
        fracwat            = pcr.ifthen(landmask, \
예제 #48
0
    def readTopo(self, iniItems, optionDict):

        # a dictionary/section of options that will be used
        if optionDict == None: optionDict = iniItems.landSurfaceOptions

        # maps of elevation attributes:
        topoParams = ['tanslope', 'slopeLength', 'orographyBeta']
        if optionDict['topographyNC'] == str(None):
            for var in topoParams:
                input = iniItems.landSurfaceOptions[str(var)]
                vars(self)[var] = vos.readPCRmapClone(input, self.cloneMap,
                                                      self.tmpDir,
                                                      self.inputDir)
                if var != "slopeLength":
                    vars(self)[var] = pcr.cover(vars(self)[var], 0.0)
        else:
            topoPropertiesNC = vos.getFullPath(\
                               optionDict['topographyNC'],
                                                self.inputDir)
            for var in topoParams:
                vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(\
                                    topoPropertiesNC,var, \
                                    cloneMapFileName = self.cloneMap)
                if var != "slopeLength":
                    vars(self)[var] = pcr.cover(vars(self)[var], 0.0)

        #~ self.tanslope = pcr.max(self.tanslope, 0.00001)              # In principle, tanslope can be zero. Zero tanslope will provide zero TCL (no interflow)

        # covering slopeLength with its maximum value
        self.slopeLength = pcr.cover(self.slopeLength,
                                     pcr.mapmaximum(self.slopeLength))

        # maps of relative elevation above flood plains
        dzRel = [
            'dzRel0001', 'dzRel0005', 'dzRel0010', 'dzRel0020', 'dzRel0030',
            'dzRel0040', 'dzRel0050', 'dzRel0060', 'dzRel0070', 'dzRel0080',
            'dzRel0090', 'dzRel0100'
        ]
        if optionDict['topographyNC'] == str(None):
            for i in range(0, len(dzRel)):
                var = dzRel[i]
                input = optionDict[str(var)]
                vars(self)[var] = vos.readPCRmapClone(input, self.cloneMap,
                                                      self.tmpDir,
                                                      self.inputDir)
                vars(self)[var] = pcr.cover(vars(self)[var], 0.0)
                if i > 0:
                    vars(self)[var] = pcr.max(
                        vars(self)[var],
                        vars(self)[dzRel[i - 1]])
        else:
            for i in range(0, len(dzRel)):
                var = dzRel[i]
                vars(self)[var] = vos.netcdf2PCRobjCloneWithoutTime(\
                                    topoPropertiesNC,var, \
                                    cloneMapFileName = self.cloneMap)
                vars(self)[var] = pcr.cover(vars(self)[var], 0.0)
                if i > 0:
                    vars(self)[var] = pcr.max(
                        vars(self)[var],
                        vars(self)[dzRel[i - 1]])
예제 #49
0
def waterAbstractionAndAllocation(water_demand_volume,available_water_volume,allocation_zones,\
                                  zone_area = None,
                                  high_volume_treshold = 1000000.,
                                  debug_water_balance = True,\
                                  extra_info_for_water_balance_reporting = "",
                                  ignore_small_values = True):

    logger.debug("Allocation of abstraction.")

    # demand volume in each cell (unit: m3)
    if ignore_small_values:  # ignore small values to avoid runding error
        cellVolDemand = pcr.rounddown(pcr.max(0.0, water_demand_volume))
    else:
        cellVolDemand = pcr.max(0.0, water_demand_volume)

    # total demand volume in each zone/segment (unit: m3)
    zoneVolDemand = pcr.areatotal(cellVolDemand, allocation_zones)

    # total available water volume in each cell
    if ignore_small_values:  # ignore small values to avoid runding error
        cellAvlWater = pcr.rounddown(pcr.max(0.00, available_water_volume))
    else:
        cellAvlWater = pcr.max(0.00, available_water_volume)

    # total available water volume in each zone/segment (unit: m3)
    # - to minimize numerical errors, separating cellAvlWater
    if not isinstance(high_volume_treshold, types.NoneType):
        # mask: 0 for small volumes ; 1 for large volumes (e.g. in lakes and reservoirs)
        mask = pcr.cover(\
               pcr.ifthen(cellAvlWater > high_volume_treshold, pcr.boolean(1)), pcr.boolean(0))
        zoneAvlWater = pcr.areatotal(pcr.ifthenelse(mask, 0.0, cellAvlWater),
                                     allocation_zones)
        zoneAvlWater += pcr.areatotal(pcr.ifthenelse(mask, cellAvlWater, 0.0),
                                      allocation_zones)
    else:
        zoneAvlWater = pcr.areatotal(cellAvlWater, allocation_zones)

    # total actual water abstraction volume in each zone/segment (unit: m3)
    # - limited to available water
    zoneAbstraction = pcr.min(zoneAvlWater, zoneVolDemand)

    # actual water abstraction volume in each cell (unit: m3)
    cellAbstraction = getValDivZero(\
                      cellAvlWater, zoneAvlWater, smallNumber)*zoneAbstraction
    cellAbstraction = pcr.min(cellAbstraction, cellAvlWater)
    if ignore_small_values:  # ignore small values to avoid runding error
        cellAbstraction = pcr.rounddown(pcr.max(0.00, cellAbstraction))
    # to minimize numerical errors, separating cellAbstraction
    if not isinstance(high_volume_treshold, types.NoneType):
        # mask: 0 for small volumes ; 1 for large volumes (e.g. in lakes and reservoirs)
        mask = pcr.cover(\
               pcr.ifthen(cellAbstraction > high_volume_treshold, pcr.boolean(1)), pcr.boolean(0))
        zoneAbstraction = pcr.areatotal(
            pcr.ifthenelse(mask, 0.0, cellAbstraction), allocation_zones)
        zoneAbstraction += pcr.areatotal(
            pcr.ifthenelse(mask, cellAbstraction, 0.0), allocation_zones)
    else:
        zoneAbstraction = pcr.areatotal(cellAbstraction, allocation_zones)

    # allocation water to meet water demand (unit: m3)
    cellAllocation  = getValDivZero(\
                      cellVolDemand, zoneVolDemand, smallNumber)*zoneAbstraction

    #~ # extraAbstraction to minimize numerical errors:
    #~ zoneDeficitAbstraction = pcr.max(0.0,\
    #~ pcr.areatotal(cellAllocation , allocation_zones) -\
    #~ pcr.areatotal(cellAbstraction, allocation_zones))
    #~ remainingCellAvlWater = pcr.max(0.0, cellAvlWater - cellAbstraction)
    #~ cellAbstraction      += zoneDeficitAbstraction * getValDivZero(\
    #~ remainingCellAvlWater,
    #~ pcr.areatotal(remainingCellAvlWater, allocation_zones),
    #~ smallNumber)
    #~ #
    #~ # extraAllocation to minimize numerical errors:
    #~ zoneDeficitAllocation = pcr.max(0.0,\
    #~ pcr.areatotal(cellAbstraction, allocation_zones) -\
    #~ pcr.areatotal(cellAllocation , allocation_zones))
    #~ remainingCellDemand = pcr.max(0.0, cellVolDemand - cellAllocation)
    #~ cellAllocation     += zoneDeficitAllocation * getValDivZero(\
    #~ remainingCellDemand,
    #~ pcr.areatotal(remainingCellDemand, allocation_zones),
    #~ smallNumber)

    if debug_water_balance and not isinstance(zone_area, types.NoneType):

        zoneAbstraction = pcr.cover(
            pcr.areatotal(cellAbstraction, allocation_zones) / zone_area, 0.0)
        zoneAllocation = pcr.cover(
            pcr.areatotal(cellAllocation, allocation_zones) / zone_area, 0.0)

        waterBalanceCheck([zoneAbstraction],\
                          [zoneAllocation],\
                          [pcr.scalar(0.0)],\
                          [pcr.scalar(0.0)],\
                          'abstraction - allocation per zone/segment (PS: Error here may be caused by rounding error.)' ,\
                           True,\
                           extra_info_for_water_balance_reporting,threshold=1e-4)

    return cellAbstraction, cellAllocation
예제 #50
0
def agriZone_Ep_Sa_beta_frostSamax(self, k):
    """
    - Potential evaporation is decreased by energy used for interception evaporation    
    - Formula for evaporation based on LP
    - Outgoing fluxes are determined based on (value in previous timestep + inflow) 
    and if this leads to negative storage, the outgoing fluxes are corrected to rato --> Eu is 
    no longer taken into account for this correction
    - Qa u is determined from overflow from Sa --> incorporation of beta function
    - Fa is based on storage in Sa
    - Fa is decreased in case of frozen soil
    - Code for ini-file: 12
    """

    JarvisCoefficients.calcEp(self, k)
    self.PotEvaporation = pcr.cover(
        pcr.ifthenelse(self.EpHour >= 0, self.EpHour, 0), 0)

    self.FrDur[k] = pcr.min(
        self.FrDur[k] +
        pcr.ifthenelse(self.Temperature > 0, self.ratFT[k] * self.Temperature,
                       self.Temperature) * self.dayDeg[k],
        0,
    )
    self.Ft = pcr.min(
        pcr.max(
            self.FrDur[k] / (self.FrDur1[k] - self.FrDur0[k]) -
            self.FrDur0[k] / (self.FrDur1[k] - self.FrDur0[k]),
            self.samin[k],
        ),
        1,
    )

    self.samax2 = self.samax[k] * pcr.scalar(self.catchArea) * self.Ft
    self.Qaadd = pcr.max(self.Sa_t[k] + self.Pe - self.samax2, 0)

    self.Sa[k] = self.Sa_t[k] + (self.Pe - self.Qaadd)
    self.SaN = pcr.min(self.Sa[k] / self.samax2, 1)
    self.SuN = self.Su[k] / self.sumax[k]

    self.Ea1 = pcr.max((self.PotEvaporation - self.Ei), 0) * pcr.min(
        self.Sa[k] / (self.samax2 * self.LP[k]), 1)
    self.Qa1 = (self.Pe - self.Qaadd) * (1 - (1 - self.SaN)**self.beta[k])

    self.Fa1 = pcr.ifthenelse(
        self.SaN > 0,
        self.Fmin[k] + (self.Fmax[k] - self.Fmin[k]) * e**(-self.decF[k] *
                                                           (1 - self.SaN)),
        0,
    )

    self.Sa[k] = self.Sa_t[k] + (self.Pe -
                                 self.Qaadd) - self.Qa1 - self.Fa1 - self.Ea1

    self.Sa_diff = pcr.ifthenelse(self.Sa[k] < 0, self.Sa[k], 0)
    self.Qa = (self.Qa1 +
               (self.Qa1 / pcr.ifthenelse(self.Fa1 + self.Ea1 + self.Qa1 > 0,
                                          self.Fa1 + self.Ea1 + self.Qa1, 1)) *
               self.Sa_diff)
    self.Fa = (self.Fa1 +
               (self.Fa1 / pcr.ifthenelse(self.Fa1 + self.Ea1 + self.Qa1 > 0,
                                          self.Fa1 + self.Ea1 + self.Qa1, 1)) *
               self.Sa_diff)
    self.Ea = (self.Ea1 +
               (self.Ea1 / pcr.ifthenelse(self.Fa1 + self.Ea1 + self.Qa1 > 0,
                                          self.Fa1 + self.Ea1 + self.Qa1, 1)) *
               self.Sa_diff)
    self.Sa[k] = self.Sa_t[k] + (self.Pe -
                                 self.Qaadd) - self.Ea - self.Fa - self.Qa
    self.Sa[k] = pcr.ifthenelse(self.Sa[k] < 0, 0, self.Sa[k])
    self.Sa_diff2 = pcr.ifthen(self.Sa[k] < 0, self.Sa[k])

    self.wbSa_[k] = (self.Pe - self.Ea - self.Qa - self.Qaadd - self.Fa -
                     self.Sa[k] + self.Sa_t[k])

    self.Ea_[k] = self.Ea
    self.Qa_[k] = self.Qa + self.Qaadd
    self.Fa_[k] = self.Fa
    self.Ft_[k] = self.Ft
예제 #51
0
    def readSoil(self, iniItems, optionDict=None):

        # a dictionary/section of options that will be used
        if optionDict == None: optionDict = iniItems.landSurfaceOptions

        # default values of soil parameters that are constant/uniform for the entire domain:
        self.clappAddCoeff = pcr.scalar(3.0)  # dimensionless
        self.matricSuctionFC = pcr.scalar(1.0)  # unit: m
        #~ self.matricSuction50 = pcr.scalar(10./3.)  # unit: m
        self.matricSuction50 = pcr.scalar(3.33)  # unit: m
        self.matricSuctionWP = pcr.scalar(156.0)  # unit: m
        self.maxGWCapRise = pcr.scalar(5.0)  # unit: m
        #
        # values defined in the ini/configuration file:
        soilParameterConstants = [
            'clappAddCoeff', 'matricSuctionFC', 'matricSuction50',
            'matricSuctionWP', 'maxGWCapRise'
        ]
        for var in soilParameterConstants:
            if var in list(iniItems.landSurfaceOptions.keys()):
                input = iniItems.landSurfaceOptions[str(var)]
                vars(self)[var] = vos.readPCRmapClone(input,self.cloneMap,\
                                                            self.tmpDir,self.inputDir)

        # read soil parameter based on the FAO soil map:
        self.readSoilMapOfFAO(iniItems, optionDict)

        # assign Campbell's (1974) beta coefficient, as well as degree
        # of saturation at field capacity and corresponding unsaturated hydraulic conductivity
        #
        if self.numberOfLayers == 2:

            self.campbellBetaUpp = self.poreSizeBetaUpp*2.0 + \
                                      self.clappAddCoeff                       # Campbell's (1974) coefficient ; Rens's line: BCB = 2*BCH + BCH_ADD
            self.campbellBetaLow = self.poreSizeBetaLow*2.0 + \
                                      self.clappAddCoeff

            self.effSatAtFieldCapUpp = \
                     (self.matricSuctionFC / self.airEntryValueUpp)**\
                                        (-1.0/ self.poreSizeBetaUpp )          # saturation degree at field capacity       : THEFF_FC = (PSI_FC/PSI_A)**(-1/BCH)
            self.effSatAtFieldCapUpp = pcr.cover(self.effSatAtFieldCapUpp, 1.0)

            self.effSatAtFieldCapLow = \
                     (self.matricSuctionFC / self.airEntryValueLow)**\
                                        (-1.0/ self.poreSizeBetaLow )
            self.effSatAtFieldCapLow = pcr.cover(self.effSatAtFieldCapLow, 1.0)

            self.kUnsatAtFieldCapUpp = pcr.max(0., \
             (self.effSatAtFieldCapUpp ** self.campbellBetaUpp) * self.kSatUpp)  # unsaturated conductivity at field capacity: KTHEFF_FC = max(0,THEFF_FC[TYPE]**BCB*KS1)
            self.kUnsatAtFieldCapLow = pcr.max(0., \
             (self.effSatAtFieldCapLow ** self.campbellBetaLow) * self.kSatLow)
        #
        if self.numberOfLayers == 3:

            self.campbellBetaUpp000005 = self.poreSizeBetaUpp000005*2.0 + \
                                            self.clappAddCoeff
            self.campbellBetaUpp005030 = self.poreSizeBetaUpp005030*2.0 + \
                                            self.clappAddCoeff
            self.campbellBetaLow030150 = self.poreSizeBetaLow030150*2.0 + \
                                            self.clappAddCoeff

            self.effSatAtFieldCapUpp000005 = \
                     (self.matricSuctionFC / self.airEntryValueUpp000005)**\
                                        (-1.0/ self.poreSizeBetaUpp000005)
            self.effSatAtFieldCapUpp005030 = \
                     (self.matricSuctionFC / self.airEntryValueUpp005030)**\
                                        (-1.0/ self.poreSizeBetaUpp005030)
            self.effSatAtFieldCapLow030150 = \
                     (self.matricSuctionFC / self.airEntryValueLow030150)**\
                                        (-1.0/ self.poreSizeBetaLow030150)

            self.kUnsatAtFieldCapUpp000005 = pcr.max(0., \
             (self.effSatAtFieldCapUpp000005 ** self.campbellBetaUpp000005) * self.kSatUpp000005)
            self.kUnsatAtFieldCapUpp005030 = pcr.max(0., \
             (self.effSatAtFieldCapUpp005030 ** self.campbellBetaUpp005030) * self.kSatUpp005030)
            self.kUnsatAtFieldCapLow030150 = pcr.max(0., \
             (self.effSatAtFieldCapLow030150 ** self.campbellBetaLow030150) * self.kSatLow030150)

        # calculate degree of saturation at which transpiration is halved (50)
        # and at wilting point
        #
        if self.numberOfLayers == 2:
            self.effSatAt50Upp = (self.matricSuction50/self.airEntryValueUpp)**\
                                                    (-1.0/self.poreSizeBetaUpp)
            self.effSatAt50Upp = pcr.cover(self.effSatAt50Upp, 1.0)
            self.effSatAt50Low = (self.matricSuction50/self.airEntryValueLow)**\
                                                    (-1.0/self.poreSizeBetaLow)
            self.effSatAt50Low = pcr.cover(self.effSatAt50Low, 1.0)
            self.effSatAtWiltPointUpp = pcr.cover(\
                                 (self.matricSuctionWP/self.airEntryValueUpp)**\
                                                    (-1.0/self.poreSizeBetaUpp), 1.0)
            self.effSatAtWiltPointLow = pcr.cover(\
                                 (self.matricSuctionWP/self.airEntryValueLow)**\
                                                    (-1.0/self.poreSizeBetaLow), 1.0)
        if self.numberOfLayers == 3:
            self.effSatAt50Upp000005 = (self.matricSuction50/self.airEntryValueUpp000005)**\
                                                          (-1.0/self.poreSizeBetaUpp000005)
            self.effSatAt50Upp005030 = (self.matricSuction50/self.airEntryValueUpp005030)**\
                                                          (-1.0/self.poreSizeBetaUpp005030)
            self.effSatAt50Low030150 = (self.matricSuction50/self.airEntryValueLow030150)**\
                                                          (-1.0/self.poreSizeBetaLow030150)
            self.effSatAtWiltPointUpp000005 = \
                                       (self.matricSuctionWP/self.airEntryValueUpp000005)**\
                                                          (-1.0/self.poreSizeBetaUpp000005)
            self.effSatAtWiltPointUpp005030 = \
                                       (self.matricSuctionWP/self.airEntryValueUpp005030)**\
                                                          (-1.0/self.poreSizeBetaUpp005030)
            self.effSatAtWiltPointLow030150 = \
                                       (self.matricSuctionWP/self.airEntryValueLow030150)**\
                                                          (-1.0/self.poreSizeBetaLow030150)

        # calculate interflow parameter (TCL):
        #
        if self.numberOfLayers == 2:
            self.interflowConcTime = (self.kSatLow * self.tanslope*2.0) / \
                     (self.slopeLength * (1.- self.effSatAtFieldCapLow) * \
                    (self.satVolMoistContLow - self.resVolMoistContLow))    # TCL = Duration*(2*KS2*TANSLOPE)/(LSLOPE*(1-THEFF2_FC)*(THETASAT2-THETARES2))
        #
        if self.numberOfLayers == 3:
            self.interflowConcTime = (self.kSatLow030150 * self.tanslope*2.0) / \
                     (self.slopeLength * (1.-self.effSatAtFieldCapLow030150) * \
             (self.satVolMoistContLow030150 - self.resVolMoistContLow030150))

        self.interflowConcTime = pcr.max(
            0.0, pcr.cover(self.interflowConcTime, 0.0))
예제 #52
0
    def initial(self):
        """ initial part of the lakes module
        """

        # ************************************************************
        # ***** LAKES
        # ************************************************************
        settings = LisSettings.instance()
        option = settings.options
        binding = settings.binding
        maskinfo = MaskInfo.instance()

        if option['simulateLakes']:

            LakeSitesC = loadmap('LakeSites')
            LakeSitesC[LakeSitesC < 1] = 0
            LakeSitesC[self.var.IsChannel == 0] = 0
            # Get rid of any lakes that are not part of the channel network

            # mask lakes sites when using sub-catchments mask
            self.var.LakeSitesCC = np.compress(LakeSitesC > 0, LakeSitesC)
            self.var.LakeIndex = np.nonzero(LakeSitesC)[0]

            if self.var.LakeSitesCC.size == 0:
                warnings.warn(
                    LisfloodWarning(
                        'There are no lakes. Lakes simulation won\'t run'))
                option['simulateLakes'] = False
                option['repsimulateLakes'] = False
                return
            # break if no lakes

            self.var.IsStructureKinematic = np.where(
                LakeSitesC > 0, np.bool8(1), self.var.IsStructureKinematic)
            # Add lake locations to structures map (used to modify LddKinematic
            # and to calculate LddStructuresKinematic)

            # PCRaster part
            # -----------------------
            LakeSitePcr = loadmap('LakeSites', pcr=True)
            LakeSitePcr = pcraster.ifthen(
                (pcraster.defined(LakeSitePcr)
                 & pcraster.boolean(decompress(self.var.IsChannel))),
                LakeSitePcr)
            IsStructureLake = pcraster.boolean(LakeSitePcr)
            # additional structure map only for lakes to calculate water balance
            self.var.IsUpsOfStructureLake = pcraster.downstream(
                self.var.LddKinematic, pcraster.cover(IsStructureLake, 0))
            # Get all pixels just upstream of lakes
            # -----------------------

            self.var.LakeInflowOldCC = np.bincount(
                self.var.downstruct,
                weights=self.var.ChanQ)[self.var.LakeIndex]
            # for Modified Puls Method the Q(inflow)1 has to be used.
            # It is assumed that this is the same as Q(inflow)2 for the first timestep
            # has to be checked if this works in forecasting mode!

            LakeArea = pcraster.lookupscalar(str(binding['TabLakeArea']),
                                             LakeSitePcr)
            LakeAreaC = compressArray(LakeArea)
            self.var.LakeAreaCC = np.compress(LakeSitesC > 0, LakeAreaC)

            # Surface area of each lake [m2]
            LakeA = pcraster.lookupscalar(str(binding['TabLakeA']),
                                          LakeSitePcr)
            LakeAC = compressArray(LakeA) * loadmap('LakeMultiplier')
            self.var.LakeACC = np.compress(LakeSitesC > 0, LakeAC)
            # Lake parameter A (suggested  value equal to outflow width in [m])
            # multiplied with the calibration parameter LakeMultiplier

            LakeInitialLevelValue = loadmap('LakeInitialLevelValue')
            if np.max(LakeInitialLevelValue) == -9999:
                LakeAvNetInflowEstimate = pcraster.lookupscalar(
                    str(binding['TabLakeAvNetInflowEstimate']), LakeSitePcr)
                LakeAvNetC = compressArray(LakeAvNetInflowEstimate)
                self.var.LakeAvNetCC = np.compress(LakeSitesC > 0, LakeAvNetC)

                LakeStorageIniM3CC = self.var.LakeAreaCC * np.sqrt(
                    self.var.LakeAvNetCC / self.var.LakeACC)
                # Initial lake storage [m3]  based on: S = LakeArea * H = LakeArea
                # * sqrt(Q/a)
                self.var.LakeLevelCC = LakeStorageIniM3CC / self.var.LakeAreaCC
            else:
                self.var.LakeLevelCC = np.compress(LakeSitesC > 0,
                                                   LakeInitialLevelValue)
                LakeStorageIniM3CC = self.var.LakeAreaCC * self.var.LakeLevelCC
                # Initial lake storage [m3]  based on: S = LakeArea * H

                self.var.LakeAvNetCC = np.compress(LakeSitesC > 0,
                                                   loadmap('PrevDischarge'))

            LakePrevInflowValue = loadmap('LakePrevInflowValue')
            if np.max(LakeInitialLevelValue) == -9999:
                self.var.LakeInflowOldCC = np.bincount(
                    self.var.downstruct,
                    weights=self.var.ChanQ)[self.var.LakeIndex]
            else:
                self.var.LakeInflowOldCC = np.compress(LakeSitesC > 0,
                                                       LakePrevInflowValue)

            # Repeatedly used expressions in lake routine

            # NEW Lake Routine using Modified Puls Method (see Maniak, p.331ff)
            # (Qin1 + Qin2)/2 - (Qout1 + Qout2)/2 = (S2 - S1)/dtime
            # changed into:
            # (S2/dtime + Qout2/2) = (S1/dtime + Qout1/2) - Qout1 + (Qin1 + Qin2)/2
            # outgoing discharge (Qout) are linked to storage (S) by elevation.
            # Now some assumption to make life easier:
            # 1.) storage volume is increase proportional to elevation: S = A * H
            #      H: elevation, A: area of lake
            # 2.) outgoing discharge = c * b * H **2.0 (c: weir constant, b: width)
            #      2.0 because it fits to a parabolic cross section see Aigner 2008
            #      (and it is much easier to calculate (that's the main reason)
            # c for a perfect weir with mu=0.577 and Poleni: 2/3 mu * sqrt(2*g) = 1.7
            # c for a parabolic weir: around 1.8
            # because it is a imperfect weir: C = c* 0.85 = 1.5
            # results in a formular : Q = 1.5 * b * H ** 2 = a*H**2 -> H =
            # sqrt(Q/a)
            self.var.LakeFactor = self.var.LakeAreaCC / (
                self.var.DtRouting * np.sqrt(self.var.LakeACC))

            #  solving the equation  (S2/dtime + Qout2/2) = (S1/dtime + Qout1/2) - Qout1 + (Qin1 + Qin2)/2
            #  SI = (S2/dtime + Qout2/2) =  (A*H)/DtRouting + Q/2 = A/(DtRouting*sqrt(a)  * sqrt(Q) + Q/2
            #  -> replacement: A/(DtRouting*sqrt(a)) = Lakefactor, Y = sqrt(Q)
            #  Y**2 + 2*Lakefactor*Y-2*SI=0
            # solution of this quadratic equation:
            # Q=sqr(-LakeFactor+sqrt(sqr(LakeFactor)+2*SI))

            self.var.LakeFactorSqr = np.square(self.var.LakeFactor)
            # for faster calculation inside dynamic section

            LakeStorageIndicator = LakeStorageIniM3CC / self.var.DtRouting + self.var.LakeAvNetCC / 2
            # SI = S/dt + Q/2

            LakePrevOutflowValue = loadmap('LakePrevOutflowValue')
            if np.max(LakePrevOutflowValue) == -9999:
                self.var.LakeOutflowCC = np.square(
                    -self.var.LakeFactor +
                    np.sqrt(self.var.LakeFactorSqr + 2 * LakeStorageIndicator))
                # solution of quadratic equation
                # it is as easy as this because:
                # 1. storage volume is increase proportional to elevation
                # 2. Q= a *H **2.0  (if you choose Q= a *H **1.5 you have to solve
                # the formula of Cardano)
            else:
                self.var.LakeOutflowCC = np.compress(LakeSitesC > 0,
                                                     LakePrevOutflowValue)

            self.var.LakeStorageM3CC = LakeStorageIniM3CC.copy()
            self.var.LakeStorageM3BalanceCC = LakeStorageIniM3CC.copy()

            self.var.LakeStorageIniM3 = maskinfo.in_zero()
            self.var.LakeLevel = maskinfo.in_zero()
            self.var.LakeInflowOld = maskinfo.in_zero()
            self.var.LakeOutflow = maskinfo.in_zero()
            np.put(self.var.LakeStorageIniM3, self.var.LakeIndex,
                   LakeStorageIniM3CC)
            self.var.LakeStorageM3 = self.var.LakeStorageIniM3.copy()
            np.put(self.var.LakeLevel, self.var.LakeIndex,
                   self.var.LakeLevelCC)
            np.put(self.var.LakeInflowOld, self.var.LakeIndex,
                   self.var.LakeInflowOldCC)
            np.put(self.var.LakeOutflow, self.var.LakeIndex,
                   self.var.LakeOutflowCC)

            self.var.EWLakeCUMM3 = maskinfo.in_zero()
os.remove(tempCloneMap)
msg = ' all done'
logger.info(msg)
print
print

# set the global clone maps
clone_map_file = "/projects/0/dfguu/users/edwinhs/data/HydroSHEDS/hydro_basin_without_lakes/integrating_ldd/version_9_december_2016/merged_ldd.map"
pcr.setclone(clone_map_file)

# boolean maps to mask out permanent water bodies (lakes and reservoirs):
reservoirs_30sec_file = "/scratch/shared/edwinsut/reservoirs_and_lakes_30sec/grand_reservoirs_v1_1.boolean.map"
msg = "Set the (high resolution) reservoirs based on the file: " + str(
    reservoirs_30sec_file)
logger.info(msg)
reservoirs_30sec = pcr.cover(pcr.readmap(reservoirs_30sec_file),
                             pcr.boolean(0.0))
lakes_30sec_file = "/scratch/shared/edwinsut/reservoirs_and_lakes_30sec/glwd1_lakes.boolean.map"
msg = "Set the (high resolution) lakes based on the file: " + str(
    lakes_30sec_file)
logger.info(msg)
lakes_30sec = pcr.cover(pcr.readmap(lakes_30sec_file), pcr.boolean(0.0))
# cells that do not belong lakes and reservoirs
non_permanent_water_bodies = pcr.ifthenelse(reservoirs_30sec, pcr.boolean(0.0),
                                            pcr.boolean(1.0))
non_permanent_water_bodies = pcr.ifthenelse(lakes_30sec, pcr.boolean(0.0),
                                            non_permanent_water_bodies)
#~ pcr.aguila(non_permanent_water_bodies)

# Convert pcraster files to a netcdt file:
msg = "Convert pcraster maps to a netcdf file."
logger.info(msg)
예제 #54
0
def main():
    ### Read input arguments #####
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option('-q', '--quiet',
                      dest='verbose', default=True, action='store_false',
                      help='do not print status messages to stdout')
    parser.add_option('-i', '--ini', dest='inifile',
                      default='hand_contour_inun.ini', nargs=1,
                      help='ini configuration file')
    parser.add_option('-f', '--flood_map',
                      nargs=1, dest='flood_map',
                      help='Flood map file (NetCDF point time series file')
    parser.add_option('-v', '--flood_variable',
                      nargs=1, dest='flood_variable',
                      default='water_level',
                      help='variable name of flood water level')
    parser.add_option('-b', '--bankfull_map',
                      dest='bankfull_map', default='',
                      help='Map containing bank full level (is subtracted from flood map, in NetCDF)')
    parser.add_option('-c', '--catchment',
                      dest='catchment_strahler', default=7, type='int',
                      help='Strahler order threshold >= are selected as catchment boundaries')
    parser.add_option('-t', '--time',
                      dest='time', default='',
                      help='time in YYYYMMDDHHMMSS, overrides time in NetCDF input if set')
    # parser.add_option('-s', '--hand_strahler',
    #                   dest='hand_strahler', default=7, type='int',
    #                   help='Strahler order threshold >= selected as riverine')
    parser.add_option('-m', '--max_strahler',
                      dest = 'max_strahler', default=1000, type='int',
                      help='Maximum Strahler order to loop over')
    parser.add_option('-d', '--destination',
                      dest='dest_path', default='inun',
                      help='Destination path')
    parser.add_option('-H', '--hand_file_prefix',
                      dest='hand_file_prefix', default='',
                      help='optional HAND file prefix of already generated HAND files')
    (options, args) = parser.parse_args()

    if not os.path.exists(options.inifile):
        print 'path to ini file cannot be found'
        sys.exit(1)
    options.dest_path = os.path.abspath(options.dest_path)

    if not(os.path.isdir(options.dest_path)):
        os.makedirs(options.dest_path)

    # set up the logger
    flood_name = os.path.split(options.flood_map)[1].split('.')[0]
    # case_name = 'inun_{:s}_hand_{:02d}_catch_{:02d}'.format(flood_name, options.hand_strahler, options.catchment_strahler)
    case_name = 'inun_{:s}_catch_{:02d}'.format(flood_name, options.catchment_strahler)
    logfilename = os.path.join(options.dest_path, 'hand_contour_inun.log')
    logger, ch = inun_lib.setlogger(logfilename, 'HAND_INUN', options.verbose)
    logger.info('$Id: $')
    logger.info('Flood map: {:s}'.format(options.flood_map))
    logger.info('Bank full map: {:s}'.format(options.bankfull_map))
    logger.info('Destination path: {:s}'.format(options.dest_path))
    # read out ini file
    ### READ CONFIG FILE
    # open config-file
    config = inun_lib.open_conf(options.inifile)
    
    # read settings
    options.dem_file = inun_lib.configget(config, 'HighResMaps',
                                  'dem_file',
                                  True)
    options.ldd_file = inun_lib.configget(config, 'HighResMaps',
                                'ldd_file',
                                 True)
    options.stream_file = inun_lib.configget(config, 'HighResMaps',
                                'stream_file',
                                 True)
    options.riv_length_fact_file = inun_lib.configget(config, 'wflowResMaps',
                                'riv_length_fact_file',
                                 True)
    options.ldd_wflow = inun_lib.configget(config, 'wflowResMaps',
                                'ldd_wflow',
                                True)
    options.riv_width_file = inun_lib.configget(config, 'wflowResMaps',
                                'riv_width_file',
                                 True)
    options.file_format = inun_lib.configget(config, 'file_settings',
                                'file_format', 0, datatype='int')
    options.out_format = inun_lib.configget(config, 'file_settings',
                                'out_format', 0, datatype='int')
    options.latlon = inun_lib.configget(config, 'file_settings',
                                 'latlon', 0, datatype='int')
    options.x_tile = inun_lib.configget(config, 'tiling',
                                  'x_tile', 10000, datatype='int')
    options.y_tile = inun_lib.configget(config, 'tiling',
                                  'y_tile', 10000, datatype='int')
    options.x_overlap = inun_lib.configget(config, 'tiling',
                                  'x_overlap', 1000, datatype='int')
    options.y_overlap = inun_lib.configget(config, 'tiling',
                                  'y_overlap', 1000, datatype='int')
    options.iterations = inun_lib.configget(config, 'inundation',
                                  'iterations', 20, datatype='int')
    options.initial_level = inun_lib.configget(config, 'inundation',
                                  'initial_level', 32., datatype='float')
    options.flood_volume_type = inun_lib.configget(config, 'inundation',
                                  'flood_volume_type', 0, datatype='int')

    # options.area_multiplier = inun_lib.configget(config, 'inundation',
    #                               'area_multiplier', 1., datatype='float')
    logger.info('DEM file: {:s}'.format(options.dem_file))
    logger.info('LDD file: {:s}'.format(options.ldd_file))
    logger.info('Columns per tile: {:d}'.format(options.x_tile))
    logger.info('Rows per tile: {:d}'.format(options.y_tile))
    logger.info('Columns overlap: {:d}'.format(options.x_overlap))
    logger.info('Rows overlap: {:d}'.format(options.y_overlap))
    metadata_global = {}
    # add metadata from the section [metadata]
    meta_keys = config.options('metadata_global')
    for key in meta_keys:
        metadata_global[key] = config.get('metadata_global', key)
    # add a number of metadata variables that are mandatory
    metadata_global['config_file'] = os.path.abspath(options.inifile)
    metadata_var = {}
    metadata_var['units'] = 'm'
    metadata_var['standard_name'] = 'water_surface_height_above_reference_datum'
    metadata_var['long_name'] = 'flooding'
    metadata_var['comment'] = 'water_surface_reference_datum_altitude is given in file {:s}'.format(options.dem_file)
    if not os.path.exists(options.dem_file):
        logger.error('path to dem file {:s} cannot be found'.format(options.dem_file))
        sys.exit(1)
    if not os.path.exists(options.ldd_file):
        logger.error('path to ldd file {:s} cannot be found'.format(options.ldd_file))
        sys.exit(1)

    # Read extent from a GDAL compatible file
    try:
        extent = inun_lib.get_gdal_extent(options.dem_file)
    except:
        msg = 'Input file {:s} not a gdal compatible file'.format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    try:
        x, y = inun_lib.get_gdal_axes(options.dem_file, logging=logger)
        srs = inun_lib.get_gdal_projection(options.dem_file, logging=logger)
    except:
        msg = 'Input file {:s} not a gdal compatible file'.format(options.dem_file)
        inun_lib.close_with_error(logger, ch, msg)
        sys.exit(1)

    # read history from flood file
    if options.file_format == 0:
        a = nc.Dataset(options.flood_map, 'r')
        metadata_global['history'] = 'Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}'.format(os.path.abspath(options.flood_map), a.history)
        a.close()
    else:
        metadata_global['history'] = 'Created by: $Id: $, boundary conditions from {:s},\nhistory: {:s}'.format(os.path.abspath(options.flood_map), 'PCRaster file, no history')

    # first write subcatch maps and hand maps
    ############### TODO ######
    # setup a HAND file for each strahler order

    max_s = inun_lib.define_max_strahler(options.stream_file, logging=logger)
    stream_max = np.minimum(max_s, options.max_strahler)

    for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
        dem_name = os.path.split(options.dem_file)[1].split('.')[0]
        if os.path.isfile('{:s}_{:02d}.tif'.format(options.hand_file_prefix, hand_strahler)):
            hand_file = '{:s}_{:02d}.tif'.format(options.hand_file_prefix, hand_strahler)
        else:
            logger.info('No HAND files with HAND prefix were found, checking {:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))
            hand_file = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))
        if not(os.path.isfile(hand_file)):
        # hand file does not exist yet! Generate it, otherwise skip!
            logger.info('HAND file {:s} not found, start setting up...please wait...'.format(hand_file))
            hand_file_tmp = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif.tmp'.format(dem_name, hand_strahler))
            ds_hand, band_hand = inun_lib.prepare_gdal(hand_file_tmp, x, y, logging=logger, srs=srs)
            # band_hand = ds_hand.GetRasterBand(1)

            # Open terrain data for reading
            ds_dem, rasterband_dem = inun_lib.get_gdal_rasterband(options.dem_file)
            ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
            ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)
            n = 0
            for x_loop in range(0, len(x), options.x_tile):
                x_start = np.maximum(x_loop, 0)
                x_end = np.minimum(x_loop + options.x_tile, len(x))
                # determine actual overlap for cutting
                for y_loop in range(0, len(y), options.y_tile):
                    x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
                    x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
                    n += 1
                    # print('tile {:001d}:'.format(n))
                    y_start = np.maximum(y_loop, 0)
                    y_end = np.minimum(y_loop + options.y_tile, len(y))
                    y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
                    y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
                    # cut out DEM
                    logger.debug('Computing HAND for xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}'.format(x_start, x_end,y_start, y_end))
                    terrain = rasterband_dem.ReadAsArray(x_start - x_overlap_min,
                                                         y_start - y_overlap_min,
                                                         (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                         (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                         )

                    drainage = rasterband_ldd.ReadAsArray(x_start - x_overlap_min,
                                                         y_start - y_overlap_min,
                                                         (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                         (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                         )
                    stream = rasterband_stream.ReadAsArray(x_start - x_overlap_min,
                                                           y_start - y_overlap_min,
                                                           (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                           (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                           )
                    # write to temporary file
                    terrain_temp_file = os.path.join(options.dest_path, 'terrain_temp.map')
                    drainage_temp_file = os.path.join(options.dest_path, 'drainage_temp.map')
                    stream_temp_file = os.path.join(options.dest_path, 'stream_temp.map')
                    if rasterband_dem.GetNoDataValue() is not None:
                        inun_lib.gdal_writemap(terrain_temp_file, 'PCRaster',
                                          np.arange(0, terrain.shape[1]),
                                          np.arange(0, terrain.shape[0]),
                                          terrain, rasterband_dem.GetNoDataValue(),
                                          gdal_type=gdal.GDT_Float32,
                                          logging=logger)
                    else:
                        # in case no nodata value is found
                        logger.warning('No nodata value found in {:s}. assuming -9999'.format(options.dem_file))
                        inun_lib.gdal_writemap(terrain_temp_file, 'PCRaster',
                                          np.arange(0, terrain.shape[1]),
                                          np.arange(0, terrain.shape[0]),
                                          terrain, -9999.,
                                          gdal_type=gdal.GDT_Float32,
                                          logging=logger)

                    inun_lib.gdal_writemap(drainage_temp_file, 'PCRaster',
                                      np.arange(0, terrain.shape[1]),
                                      np.arange(0, terrain.shape[0]),
                                      drainage, rasterband_ldd.GetNoDataValue(),
                                      gdal_type=gdal.GDT_Int32,
                                      logging=logger)
                    inun_lib.gdal_writemap(stream_temp_file, 'PCRaster',
                                      np.arange(0, terrain.shape[1]),
                                      np.arange(0, terrain.shape[0]),
                                      stream, rasterband_ldd.GetNoDataValue(),
                                      gdal_type=gdal.GDT_Int32,
                                      logging=logger)
                    # read as pcr objects
                    pcr.setclone(terrain_temp_file)
                    terrain_pcr = pcr.readmap(terrain_temp_file)
                    drainage_pcr = pcr.lddrepair(pcr.ldd(pcr.readmap(drainage_temp_file)))  # convert to ldd type map
                    stream_pcr = pcr.scalar(pcr.readmap(stream_temp_file))  # convert to ldd type map

                    #check if the highest stream order of the tile is below the hand_strahler
                    # if the highest stream order of the tile is smaller than hand_strahler, than DEM values are taken instead of HAND values.
                    max_stream_tile = inun_lib.define_max_strahler(stream_temp_file, logging=logger)
                    if max_stream_tile < hand_strahler:
                        hand_pcr = terrain_pcr
                        logger.info('For this tile, DEM values are used instead of HAND because there is no stream order larger than {:02d}'.format(hand_strahler))
                    else:
                    # compute streams
                        stream_ge, subcatch = inun_lib.subcatch_stream(drainage_pcr, hand_strahler, stream=stream_pcr) # generate streams
                        # compute basins
                        stream_ge_dummy, subcatch = inun_lib.subcatch_stream(drainage_pcr, options.catchment_strahler, stream=stream_pcr) # generate streams
                        basin = pcr.boolean(subcatch)
                        hand_pcr, dist_pcr = inun_lib.derive_HAND(terrain_pcr, drainage_pcr, 3000,
                                                                  rivers=pcr.boolean(stream_ge), basin=basin)
                    # convert to numpy
                    hand = pcr.pcr2numpy(hand_pcr, -9999.)
                    # cut relevant part
                    if y_overlap_max == 0:
                        y_overlap_max = -hand.shape[0]
                    if x_overlap_max == 0:
                        x_overlap_max = -hand.shape[1]
                    hand_cut = hand[0+y_overlap_min:-y_overlap_max, 0+x_overlap_min:-x_overlap_max]

                    band_hand.WriteArray(hand_cut, x_start, y_start)
                    os.unlink(terrain_temp_file)
                    os.unlink(drainage_temp_file)
                    os.unlink(stream_temp_file)
                    band_hand.FlushCache()
            ds_dem = None
            ds_ldd = None
            ds_stream = None
            band_hand.SetNoDataValue(-9999.)
            ds_hand = None
            logger.info('Finalizing {:s}'.format(hand_file))
            # rename temporary file to final hand file
            os.rename(hand_file_tmp, hand_file)
        else:
            logger.info('HAND file {:s} already exists...skipping...'.format(hand_file))

    #####################################################################################
    #  HAND file has now been prepared, moving to flood mapping part                    #
    #####################################################################################
    # set the clone
    pcr.setclone(options.ldd_wflow)
    # read wflow ldd as pcraster object
    ldd_pcr = pcr.readmap(options.ldd_wflow)
    xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(options.riv_width_file, 'GTiff', logging=logger)

    # determine cell length in meters using ldd_pcr as clone (if latlon=True, values are converted to m2
    x_res, y_res, reallength_wflow = pcrut.detRealCellLength(pcr.scalar(ldd_pcr), not(bool(options.latlon)))
    cell_surface_wflow = pcr.pcr2numpy(x_res * y_res, 0)

    if options.flood_volume_type == 0:
        # load the staticmaps needed to estimate volumes across all
        # xax, yax, riv_length, fill_value = inun_lib.gdal_readmap(options.riv_length_file, 'GTiff', logging=logger)
        # riv_length = np.ma.masked_where(riv_length==fill_value, riv_length)
        xax, yax, riv_width, fill_value = inun_lib.gdal_readmap(options.riv_width_file, 'GTiff', logging=logger)
        riv_width[riv_width == fill_value] = 0

        # read river length factor file (multiplier)
        xax, yax, riv_length_fact, fill_value = inun_lib.gdal_readmap(options.riv_length_fact_file, 'GTiff', logging=logger)
        riv_length_fact = np.ma.masked_where(riv_length_fact==fill_value, riv_length_fact)
        drain_length = wflow_lib.detdrainlength(ldd_pcr, x_res, y_res)

        # compute river length in each cell
        riv_length = pcr.pcr2numpy(drain_length, 0) * riv_length_fact
        # riv_length_pcr = pcr.numpy2pcr(pcr.Scalar, riv_length, 0)

    flood_folder = os.path.join(options.dest_path, case_name)
    flood_vol_map = os.path.join(flood_folder, '{:s}_vol.tif'.format(os.path.split(options.flood_map)[1].split('.')[0]))
    if not(os.path.isdir(flood_folder)):
        os.makedirs(flood_folder)
    if options.out_format == 0:
        inun_file_tmp = os.path.join(flood_folder, '{:s}.tif.tmp'.format(case_name))
        inun_file = os.path.join(flood_folder, '{:s}.tif'.format(case_name))
    else:
        inun_file_tmp = os.path.join(flood_folder, '{:s}.nc.tmp'.format(case_name))
        inun_file = os.path.join(flood_folder, '{:s}.nc'.format(case_name))

    hand_temp_file = os.path.join(flood_folder, 'hand_temp.map')
    drainage_temp_file = os.path.join(flood_folder, 'drainage_temp.map')
    stream_temp_file = os.path.join(flood_folder, 'stream_temp.map')
    flood_vol_temp_file = os.path.join(flood_folder, 'flood_warp_temp.tif')
    # load the data with river levels and compute the volumes
    if options.file_format == 0:
        # assume we need the maximum value in a NetCDF time series grid
        logger.info('Reading flood from {:s} NetCDF file'.format(options.flood_map))
        a = nc.Dataset(options.flood_map, 'r')
        if options.latlon == 0:
            xax = a.variables['x'][:]
            yax = a.variables['y'][:]
        else:
            xax = a.variables['lon'][:]
            yax = a.variables['lat'][:]
        if options.time == '':
            time_list = nc.num2date(a.variables['time'][:], units = a.variables['time'].units, calendar=a.variables['time'].calendar)
            time = [time_list[len(time_list)/2]]
        else:
            time = [dt.datetime.strptime(options.time, '%Y%m%d%H%M%S')]

        flood_series = a.variables[options.flood_variable][:]
        flood_data = flood_series.max(axis=0)
        if np.ma.is_masked(flood_data):
            flood = flood_data.data
            flood[flood_data.mask] = 0
        if yax[-1] > yax[0]:
            yax = np.flipud(yax)
            flood = np.flipud(flood)
        a.close()
    elif options.file_format == 1:
        logger.info('Reading flood from {:s} PCRaster file'.format(options.flood_map))
        xax, yax, flood, flood_fill_value = inun_lib.gdal_readmap(options.flood_map, 'PCRaster', logging=logger)
        flood = np.ma.masked_equal(flood, flood_fill_value)
        if options.time == '':
            options.time = '20000101000000'
        time = [dt.datetime.strptime(options.time, '%Y%m%d%H%M%S')]

        flood[flood==flood_fill_value] = 0.
    # load the bankfull depths
    if options.bankfull_map == '':
        bankfull = np.zeros(flood.shape)
    else:
        if options.file_format == 0:
            logger.info('Reading bankfull from {:s} NetCDF file'.format(options.bankfull_map))
            a = nc.Dataset(options.bankfull_map, 'r')
            xax = a.variables['x'][:]
            yax = a.variables['y'][:]

            bankfull_series = a.variables[options.flood_variable][:]
            bankfull_data = bankfull_series.max(axis=0)
            if np.ma.is_masked(bankfull_data):
                bankfull = bankfull_data.data
                bankfull[bankfull_data.mask] = 0
            if yax[-1] > yax[0]:
                yax = np.flipud(yax)
                bankfull = np.flipud(bankfull)
            a.close()
        elif options.file_format == 1:
            logger.info('Reading bankfull from {:s} PCRaster file'.format(options.bankfull_map))
            xax, yax, bankfull, bankfull_fill_value = inun_lib.gdal_readmap(options.bankfull_map, 'PCRaster', logging=logger)
            bankfull = np.ma.masked_equal(bankfull, bankfull_fill_value)
#     flood = bankfull*2
    # res_x = 2000
    # res_y = 2000
    # subtract the bankfull water level to get flood levels (above bankfull)
    flood_vol = np.maximum(flood-bankfull, 0)
    if options.flood_volume_type == 0:
        flood_vol_m = riv_length*riv_width*flood_vol/cell_surface_wflow  # volume expressed in meters water disc
        flood_vol_m_pcr = pcr.numpy2pcr(pcr.Scalar, flood_vol_m, 0)
    else:
        flood_vol_m = flood_vol/cell_surface_wflow
    flood_vol_m_data = flood_vol_m.data
    flood_vol_m_data[flood_vol_m.mask] = -999.
    logger.info('Saving water layer map to {:s}'.format(flood_vol_map))
    # write to a tiff file
    inun_lib.gdal_writemap(flood_vol_map, 'GTiff', xax, yax, np.maximum(flood_vol_m_data, 0), -999., logging=logger)
    # this is placed later in the hand loop
    # ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
    ds_ldd, rasterband_ldd = inun_lib.get_gdal_rasterband(options.ldd_file)
    ds_stream, rasterband_stream = inun_lib.get_gdal_rasterband(options.stream_file)

    logger.info('Preparing flood map in {:s} ...please wait...'.format(inun_file))
    if options.out_format == 0:
        ds_inun, band_inun = inun_lib.prepare_gdal(inun_file_tmp, x, y, logging=logger, srs=srs)
        # band_inun = ds_inun.GetRasterBand(1)
    else:
        ds_inun, band_inun = inun_lib.prepare_nc(inun_file_tmp, time, x, np.flipud(y), metadata=metadata_global,
                                                 metadata_var=metadata_var, logging=logger)
    # loop over all the tiles
    n = 0
    for x_loop in range(0, len(x), options.x_tile):
        x_start = np.maximum(x_loop, 0)
        x_end = np.minimum(x_loop + options.x_tile, len(x))
        # determine actual overlap for cutting
        for y_loop in range(0, len(y), options.y_tile):
            x_overlap_min = x_start - np.maximum(x_start - options.x_overlap, 0)
            x_overlap_max = np.minimum(x_end + options.x_overlap, len(x)) - x_end
            n += 1
            # print('tile {:001d}:'.format(n))
            y_start = np.maximum(y_loop, 0)
            y_end = np.minimum(y_loop + options.y_tile, len(y))
            y_overlap_min = y_start - np.maximum(y_start - options.y_overlap, 0)
            y_overlap_max = np.minimum(y_end + options.y_overlap, len(y)) - y_end
            x_tile_ax = x[x_start - x_overlap_min:x_end + x_overlap_max]
            y_tile_ax = y[y_start - y_overlap_min:y_end + y_overlap_max]
            # cut out DEM
            logger.debug('handling xmin: {:d} xmax: {:d} ymin {:d} ymax {:d}'.format(x_start, x_end, y_start, y_end))


            drainage = rasterband_ldd.ReadAsArray(x_start - x_overlap_min,
                                                 y_start - y_overlap_min,
                                                 (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                 (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                 )
            stream = rasterband_stream.ReadAsArray(x_start - x_overlap_min,
                                                   y_start - y_overlap_min,
                                                   (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                                   (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                                   )

            # stream_max = np.minimum(stream.max(), options.max_strahler)


            inun_lib.gdal_writemap(drainage_temp_file, 'PCRaster',
                              x_tile_ax,
                              y_tile_ax,
                              drainage, rasterband_ldd.GetNoDataValue(),
                              gdal_type=gdal.GDT_Int32,
                              logging=logger)
            inun_lib.gdal_writemap(stream_temp_file, 'PCRaster',
                              x_tile_ax,
                              y_tile_ax,
                              stream, rasterband_stream.GetNoDataValue(),
                              gdal_type=gdal.GDT_Int32,
                              logging=logger)


            # read as pcr objects
            pcr.setclone(stream_temp_file)
            drainage_pcr = pcr.lddrepair(pcr.ldd(pcr.readmap(drainage_temp_file)))  # convert to ldd type map
            stream_pcr = pcr.scalar(pcr.readmap(stream_temp_file))  # convert to ldd type map

            # warp of flood volume to inundation resolution
            inun_lib.gdal_warp(flood_vol_map, stream_temp_file, flood_vol_temp_file, gdal_interp=gdalconst.GRA_NearestNeighbour) # ,
            x_tile_ax, y_tile_ax, flood_meter, fill_value = inun_lib.gdal_readmap(flood_vol_temp_file, 'GTiff', logging=logger)
            # make sure that the option unittrue is on !! (if unitcell was is used in another function)
            x_res_tile, y_res_tile, reallength = pcrut.detRealCellLength(pcr.scalar(stream_pcr), not(bool(options.latlon)))
            cell_surface_tile = pcr.pcr2numpy(x_res_tile * y_res_tile, 0)

            # convert meter depth to volume [m3]
            flood_vol = pcr.numpy2pcr(pcr.Scalar, flood_meter*cell_surface_tile, fill_value)

            # first prepare a basin map, belonging to the lowest order we are looking at
            inundation_pcr = pcr.scalar(stream_pcr) * 0
            for hand_strahler in range(options.catchment_strahler, stream_max + 1, 1):
                # hand_temp_file = os.path.join(flood_folder, 'hand_temp.map')
                if os.path.isfile(os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))):
                    hand_file = os.path.join(options.dest_path, '{:s}_hand_strahler_{:02d}.tif'.format(dem_name, hand_strahler))
                else:
                    hand_file = '{:s}_{:02d}.tif'.format(options.hand_file_prefix, hand_strahler)
                ds_hand, rasterband_hand = inun_lib.get_gdal_rasterband(hand_file)
                hand = rasterband_hand.ReadAsArray(x_start - x_overlap_min,
                                             y_start - y_overlap_min,
                                             (x_end + x_overlap_max) - (x_start - x_overlap_min),
                                             (y_end + y_overlap_max) - (y_start - y_overlap_min)
                                             )
                print('len x-ax: {:d} len y-ax {:d} x-shape {:d} y-shape {:d}'.format(len(x_tile_ax), len(y_tile_ax), hand.shape[1], hand.shape[0]))

                inun_lib.gdal_writemap(hand_temp_file, 'PCRaster',
                          x_tile_ax,
                          y_tile_ax,
                          hand, rasterband_hand.GetNoDataValue(),
                          gdal_type=gdal.GDT_Float32,
                          logging=logger)

                hand_pcr = pcr.readmap(hand_temp_file)

                stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(drainage_pcr, options.catchment_strahler, stream=stream_pcr)
                # stream_ge_hand, subcatch_hand = inun_lib.subcatch_stream(drainage_pcr, hand_strahler, stream=stream_pcr)
                stream_ge, subcatch = inun_lib.subcatch_stream(drainage_pcr,
                                                               options.catchment_strahler,
                                                               stream=stream_pcr,
                                                               basin=pcr.boolean(pcr.cover(subcatch_hand, 0)),
                                                               assign_existing=True,
                                                               min_strahler=hand_strahler,
                                                               max_strahler=hand_strahler) # generate subcatchments, only within basin for HAND
                flood_vol_strahler = pcr.ifthenelse(pcr.boolean(pcr.cover(subcatch, 0)), flood_vol, 0) # mask the flood volume map with the created subcatch map for strahler order = hand_strahler

                inundation_pcr_step = inun_lib.volume_spread(drainage_pcr, hand_pcr,
                                                             pcr.subcatchment(drainage_pcr, subcatch), # to make sure backwater effects can occur from higher order rivers to lower order rivers
                                                             flood_vol_strahler,
                                                             volume_thres=0.,
                                                             iterations=options.iterations,
                                                             cell_surface=pcr.numpy2pcr(pcr.Scalar, cell_surface_tile, -9999),
                                                             logging=logger,
                                                             order=hand_strahler) # 1166400000.
                # use maximum value of inundation_pcr_step and new inundation for higher strahler order
                inundation_pcr = pcr.max(inundation_pcr, inundation_pcr_step)
            inundation = pcr.pcr2numpy(inundation_pcr, -9999.)
            # cut relevant part
            if y_overlap_max == 0:
                y_overlap_max = -inundation.shape[0]
            if x_overlap_max == 0:
                x_overlap_max = -inundation.shape[1]
            inundation_cut = inundation[0+y_overlap_min:-y_overlap_max, 0+x_overlap_min:-x_overlap_max]
            # inundation_cut
            if options.out_format == 0:
                band_inun.WriteArray(inundation_cut, x_start, y_start)
                band_inun.FlushCache()
            else:
                # with netCDF, data is up-side-down.
                inun_lib.write_tile_nc(band_inun, inundation_cut, x_start, y_start)
            # clean up
            os.unlink(flood_vol_temp_file)
            os.unlink(drainage_temp_file)
            os.unlink(hand_temp_file)
            os.unlink(stream_temp_file)     #also remove temp stream file from output folder

            # if n == 35:
            #     band_inun.SetNoDataValue(-9999.)
            #     ds_inun = None
            #     sys.exit(0)
    # os.unlink(flood_vol_map)

    logger.info('Finalizing {:s}'.format(inun_file))
    # add the metadata to the file and band
    # band_inun.SetNoDataValue(-9999.)
    # ds_inun.SetMetadata(metadata_global)
    # band_inun.SetMetadata(metadata_var)
    if options.out_format == 0:
        ds_inun = None
        ds_hand = None
    else:
        ds_inun.close()

    ds_ldd = None
    # rename temporary file to final hand file
    if os.path.isfile(inun_file):
        # remove an old result if available
        os.unlink(inun_file)
    os.rename(inun_file_tmp, inun_file)

    logger.info('Done! Thank you for using hand_contour_inun.py')
    logger, ch = inun_lib.closeLogger(logger, ch)
    del logger, ch
    sys.exit(0)
def main():

    # output folder
    clean_out_folder = True
    if os.path.exists(out_folder):
        if clean_out_folder:
            shutil.rmtree(out_folder)
            os.makedirs(out_folder)
    else:
        os.makedirs(out_folder)
    os.chdir(out_folder)
    os.system("pwd")

    # tmp folder
    tmp_folder = out_folder + "/tmp/"
    if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
    os.makedirs(tmp_folder)

    # set the clone map
    print("set the clone map")
    pcr.setclone(global_clone_map_file)

    # read ldd map
    print("define the ldd")
    # ~ ldd_map = pcr.readmap(global_ldd_inp_file)
    ldd_map     = pcr.lddrepair(pcr.lddrepair(pcr.ldd(vos.readPCRmapClone(v                = global_ldd_inp_file, \
                                                                          cloneMapFileName = global_clone_map_file, \
                                                                          tmpDir           = tmp_folder, \
                                                                          absolutePath     = None, \
                                                                          isLddMap         = True, \
                                                                          cover            = None, \
                                                                          isNomMap         = False))))

    # ~ # - extend ldd (not needed)
    # ~ ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5)))

    # define the landmask
    print("define the landmask based on the ldd input")
    # ~ landmask = pcr.defined(pcr.readmap(global_ldd_inp_file))
    landmask = pcr.defined(ldd_map)
    landmask = pcr.ifthen(landmask, landmask)

    # save ldd files used
    # - global ldd
    cmd = "cp " + str(global_ldd_inp_file) + " ."
    print(cmd)
    os.system(cmd)
    # - ldd map that is used
    pcr.report(ldd_map, "lddmap_used.map")

    # make catchment map
    print("make catchment map")
    catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map))

    # read global subdomain file
    print("read global subdomain file")
    global_subdomain_map = vos.readPCRmapClone(
        v=global_subdomain_file,
        cloneMapFileName=global_clone_map_file,
        tmpDir=tmp_folder,
        absolutePath=None,
        isLddMap=False,
        cover=None,
        isNomMap=True)

    # set initial subdomain
    print("assign subdomains to all catchments")
    subdomains_initial = pcr.areamajority(global_subdomain_map, catchment_map)
    subdomains_initial = pcr.ifthen(landmask, subdomains_initial)

    pcr.aguila(subdomains_initial)

    pcr.report(subdomains_initial, "global_subdomains_initial.map")

    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0])))
    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])))

    print("Checking all subdomains, avoid too large subdomains")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])

    # clone code that will be assigned
    assigned_number = 0

    subdomains_final = pcr.ifthen(
        pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0))

    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the landmask %s" % (str(nr))
        print(msg)

        mask_selected_boolean = pcr.ifthen(subdomains_initial == nr,
                                           pcr.boolean(1.0))

        process_this_clone = False
        if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(mask_selected_boolean)), 1,
                         1)[0] > 0:
            process_this_clone = True

        # ~ if nr == 1: pcr.aguila(mask_selected_boolean)

        # - initial check value
        check_ok = True

        if process_this_clone:
            xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
            area_in_degree2 = (xmax - xmin) * (ymax - ymin)

            # ~ print(str(area_in_degree2))

            # check whether the size of bounding box is ok
            reference_area_in_degree2 = 2500.
            if area_in_degree2 > 1.50 * reference_area_in_degree2:
                check_ok = False
            if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False

        if check_ok == True and process_this_clone == True:

            msg = "Clump is not needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # assign the clone code
            assigned_number = assigned_number + 1

            # update global landmask for river and land
            mask_selected_nominal = pcr.ifthen(mask_selected_boolean,
                                               pcr.nominal(assigned_number))
            subdomains_final = pcr.cover(subdomains_final,
                                         mask_selected_nominal)

        if check_ok == False and process_this_clone == True:

            msg = "Clump is needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # make clump
            clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean))

            # merge clumps that are close together
            clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0)
            clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids)
            # ~ pcr.aguila(clump_ids)

            # minimimum and maximum values
            min_clump_id = int(
                pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0])
            max_clump_id = int(
                pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0])

            for clump_id in range(min_clump_id, max_clump_id + 1, 1):

                msg = "Processing the clump %s of %s from the landmask %s" % (
                    str(clump_id), str(max_clump_id), str(nr))
                msg = "\n\n" + str(msg) + "\n\n"
                print(msg)

                # identify mask based on the clump
                mask_selected_boolean_from_clump = pcr.ifthen(
                    clump_ids == pcr.nominal(clump_id), mask_selected_boolean)
                mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean_from_clump,
                    mask_selected_boolean_from_clump)

                # check whether the clump is empty
                check_mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean, mask_selected_boolean_from_clump)
                check_if_empty = float(
                    pcr.cellvalue(
                        pcr.mapmaximum(
                            pcr.scalar(
                                pcr.defined(
                                    check_mask_selected_boolean_from_clump))),
                        1)[0])

                if check_if_empty == 0.0:

                    msg = "Map is empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                else:

                    msg = "Map is NOT empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                    # assign the clone code
                    assigned_number = assigned_number + 1

                    # update global landmask for river and land
                    mask_selected_nominal = pcr.ifthen(
                        mask_selected_boolean_from_clump,
                        pcr.nominal(assigned_number))
                    subdomains_final = pcr.cover(subdomains_final,
                                                 mask_selected_nominal)

    # ~ # kill all aguila processes if exist
    # ~ os.system('killall aguila')

    pcr.aguila(subdomains_final)

    print("")
    print("")
    print("")

    print("The subdomain map is READY.")

    pcr.report(subdomains_final, "global_subdomains_final.map")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])
    print(num_of_masks)

    print("")
    print("")
    print("")

    print("Making the clone and landmask maps for all subdomains")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])

    # clone and mask folders
    clone_folder = out_folder + "/clone/"
    if os.path.exists(clone_folder): shutil.rmtree(clone_folder)
    os.makedirs(clone_folder)
    mask_folder = out_folder + "/mask/"
    if os.path.exists(mask_folder): shutil.rmtree(mask_folder)
    os.makedirs(mask_folder)

    print("")
    print("")

    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the subdomain %s" % (str(nr))
        print(msg)

        # set the global clone
        pcr.setclone(global_clone_map_file)

        mask_selected_boolean = pcr.ifthen(subdomains_final == nr,
                                           pcr.boolean(1.0))

        mask_selected_nominal = pcr.ifthen(subdomains_final == nr,
                                           pcr.nominal(nr))
        mask_file = "mask/mask_%s.map" % (str(nr))
        pcr.report(mask_selected_nominal, mask_file)

        xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
        area_in_degree2 = (xmax - xmin) * (ymax - ymin)

        print(
            str(nr) + " ; " + str(area_in_degree2) + " ; " +
            str((xmax - xmin)) + " ; " + str((ymax - ymin)))

        # cellsize in arcdegree
        cellsize = cellsize_in_arcmin / 60.

        # number of rows and cols
        num_rows = int(round(ymax - ymin) / cellsize)
        num_cols = int(round(xmax - xmin) / cellsize)

        # make the clone map using mapattr
        clonemap_mask_file = "clone/clonemap_mask_%s.map" % (str(nr))
        cmd = "mapattr -s -R %s -C %s -B -P yb2t -x %s -y %s -l %s %s" % (
            str(num_rows), str(num_cols), str(xmin), str(ymax), str(cellsize),
            clonemap_mask_file)
        print(cmd)
        os.system(cmd)

        # set the local landmask for the clump
        pcr.setclone(clonemap_mask_file)
        local_mask = vos.readPCRmapClone(v = mask_file, \
                                         cloneMapFileName = clonemap_mask_file,
                                         tmpDir = tmp_folder, \
                                         absolutePath = None, isLddMap = False, cover = None, isNomMap = True)
        local_mask_boolean = pcr.defined(local_mask)
        local_mask_boolean = pcr.ifthen(local_mask_boolean, local_mask_boolean)
        pcr.report(local_mask_boolean, mask_file)

    print("")
    print("")
    print("")

    print(num_of_masks)
예제 #56
0
 def dynamic(self):
     #####################
     # * dynamic section #
     #####################
     #-evaluation of the current date: return current month and the time step used
     #-reading in fluxes over land and water area for current time step [m/d]
     # and read in reservoir demand and surface water extraction [m3]
     try:
         self.landSurfaceQ = clippedRead.get(
             pcrm.generateNameT(landSurfaceQFileName,
                                self.currentTimeStep()))
     except:
         pass
     try:
         self.potWaterSurfaceQ = clippedRead.get(
             pcrm.generateNameT(waterSurfaceQFileName,
                                self.currentTimeStep()))
     except:
         pass
     #-surface water extraction and reservoir demand currently set to zero, should
     # be computed automatically and updated to reservoirs
     self.potSurfaceWaterExtraction = pcr.spatial(pcr.scalar(0.))
     #self.waterBodies.demand=  #self.reservoirDemandTSS.assignID(self.waterBodies.ID,self.currentTimeStep(),0.)*self.timeSec
     #-initialization of cumulative values of actual water extractions
     self.actWaterSurfaceQ = pcr.spatial(pcr.scalar(0.))
     self.actSurfaceWaterExtraction = pcr.spatial(pcr.scalar(0.))
     #-definition of sub-loop for routing scheme - explicit scheme has to satisfy Courant condition
     timeLimit= pcr.cellvalue(pcr.mapminimum((pcr.cover(pcr.ifthen(self.waterBodies.distribution == 0,\
      self.channelLength/self.flowVelocity),\
       self.timeSec/self.nrIterDefault)*self.timeSec/self.nrIterDefault)**0.5),1)[0]
     nrIter = int(self.timeSec / timeLimit)
     nrIter = min(nrIter, int(self.timeSec / 300.))
     while float(self.timeSec / nrIter) % 1 <> 0:
         nrIter += 1
     deltaTime = self.timeSec / nrIter
     #-sub-loop for current time step
     if self.currentDate.day == 1 or nrIter >= 24:
         print '\n*\tprocessing %s, currently using %d substeps of %d seconds\n' % \
          (self.currentDate.date(),nrIter,deltaTime)
     #-update discharge and storage
     for nrICur in range(nrIter):
         #-initializing discharge for the current sub-timestep and fill in values
         # for channels and at outlets of waterbodies
         # * channels *
         estQ= pcr.ifthenelse((self.actualStorage > 0.) & (self.waterBodies.distribution == 0) ,\
          (self.wettedArea/self.alphaQ)**(1./self.betaQ),0.)
         #estQ= pcr.ifthenelse((self.actualStorage > 0.) & (self.waterBodies.distribution == 0) ,\
         #0.5*(self.Q+(self.wettedArea/self.alphaQ)**(1./self.betaQ)),0.)
         #estQ= pcr.min(estQ,self.actualStorage/deltaTime)
         self.report(estQ, 'results/qest')
         self.Q = pcr.spatial(pcr.scalar(0.))
         self.Q= pcr.ifthenelse(self.waterBodies.distribution == 0,\
          pcr.kinematic(self.channelLDD,estQ,0.,self.alphaQ,\
           self.betaQ,1,deltaTime,self.channelLength),self.Q)
         # * water bodies *
         self.waterBodies.dischargeUpdate()
         self.Q = self.waterBodies.returnMapValue(self.Q,
                                                  self.waterBodies.actualQ)
         #-fluxes and resulting change in storage: first the local fluxes are evaluated
         # and aggregated over the water bodies where applicable; this includes the specific runoff [m/day/m2]
         # from input and the estimated extraction from surface water as volume per day [m3/day];
         # specific runoff from the land surface is always positive whereas the fluxes over the water surface
         # are potential, including discharge, and are adjusted to match the availabe storage; to this end,
         # surface water storage and fluxes over water bodies are totalized and assigned to the outlet;
         # discharge is updated in a separate step, after vertical fluxes are compared to the actual storage
         deltaActualStorage= ((self.landFraction*self.landSurfaceQ+\
          self.waterFraction*self.potWaterSurfaceQ)*self.cellArea-\
          self.potSurfaceWaterExtraction)*float(self.duration)/nrIter
         deltaActualStorage= pcr.ifthenelse(self.waterBodies.distribution != 0,\
          pcr.ifthenelse(self.waterBodies.location != 0,\
           pcr.areatotal(deltaActualStorage,self.waterBodies.distribution),0),\
            deltaActualStorage)
         adjustmentRatio= pcr.ifthenelse(deltaActualStorage < 0.,\
          pcr.min(1.,-self.actualStorage/deltaActualStorage),1.)
         self.actWaterSurfaceQ += adjustmentRatio * self.potWaterSurfaceQ
         self.actSurfaceWaterExtraction += adjustmentRatio * self.actSurfaceWaterExtraction
         deltaActualStorage *= adjustmentRatio
         #-local water balance check
         if testLocalWaterBalance:
             differenceActualStorage = self.actualStorage
             differenceActualStorage += deltaActualStorage
         #-overall water balance check: net input
         self.cumulativeDeltaStorage += pcr.catchmenttotal(
             deltaActualStorage, self.LDD)
         #-update storage first with local changes, then balance discharge with storage and update storage
         # with lateral flow and return value to water bodies
         self.actualStorage += deltaActualStorage
         self.actualStorage = pcr.max(0., self.actualStorage)
         self.Q = pcr.min(self.Q, self.actualStorage / deltaTime)
         deltaActualStorage = (-self.Q +
                               pcr.upstream(self.LDD, self.Q)) * deltaTime
         deltaActualStorage= pcr.ifthenelse(self.waterBodies.distribution != 0,\
          pcr.ifthenelse(self.waterBodies.location != 0,\
           pcr.areatotal(deltaActualStorage,self.waterBodies.distribution),0),\
            deltaActualStorage)
         self.actualStorage += deltaActualStorage
         self.actualStorage = pcr.max(0., self.actualStorage)
         self.waterBodies.actualStorage = self.waterBodies.retrieveMapValue(
             self.actualStorage)
         #-flooded fraction returned
         floodedFraction,floodedDepth,\
           self.wettedArea,self.alphaQ= self.kinAlphaComposite(self.actualStorage,self.floodplainMask)
         self.wettedArea= self.waterBodies.returnMapValue(self.wettedArea,\
          self.waterBodies.channelWidth+2.*self.waterBodies.updateWaterHeight())
         self.waterFraction= pcr.ifthenelse(self.waterBodies.distribution == 0,\
          pcr.max(self.waterFractionMask,floodedFraction),self.waterFractionMask)
         self.landFraction = pcr.max(0., 1. - self.waterFraction)
         self.flowVelocity = pcr.ifthenelse(self.wettedArea > 0,
                                            self.Q / self.wettedArea, 0.)
         #-local water balance check
         if testLocalWaterBalance:
             differenceActualStorage += deltaActualStorage
             differenceActualStorage -= self.actualStorage
             totalDifference = pcr.cellvalue(
                 pcr.maptotal(differenceActualStorage), 1)[0]
             minimumDifference = pcr.cellvalue(
                 pcr.mapminimum(differenceActualStorage), 1)[0]
             maximumDifference = pcr.cellvalue(
                 pcr.mapmaximum(differenceActualStorage), 1)[0]
             if abs(totalDifference) > 1.e-3:
                 print 'water balance error: total %e; min %e; max %e' %\
                  (totalDifference,minimumDifference,maximumDifference)
                 if reportLocalWaterBalance:
                     pcr.report(differenceActualStorage,
                                'mbe_%s.map' % self.currentDate.date())
         #-overall water balance check: updating cumulative discharge and total storage [m3]
         self.totalDischarge += self.Q * deltaTime
         self.totalStorage = pcr.catchmenttotal(self.actualStorage,
                                                self.LDD)
     #-check on occurrence of last day and report mass balance
     if self.currentDate == self.endDate:
         #-report initial maps
         pcr.report(self.Q, self.QIniMap)
         pcr.report(self.actualStorage, self.actualStorageIniMap)
         #-return relative and absolute water balance error per cell and
         # as total at basin outlets
         self.totalDischarge= pcr.ifthen((self.waterBodies.distribution == 0) | \
          (self.waterBodies.location != 0),self.totalDischarge)
         self.cumulativeDeltaStorage= pcr.ifthen((self.waterBodies.distribution == 0) | \
          (self.waterBodies.location != 0),self.cumulativeDeltaStorage)
         massBalanceError= self.totalStorage+self.totalDischarge-\
          self.cumulativeDeltaStorage
         relMassBalanceError = 1. + pcr.ifthenelse(
             self.cumulativeDeltaStorage <> 0.,
             massBalanceError / self.cumulativeDeltaStorage, 0.)
         totalMassBalanceError= pcr.cellvalue(pcr.maptotal(pcr.ifthen(self.basinOutlet,\
          massBalanceError)),1)[0]
         totalCumulativeDeltaStorage= pcr.cellvalue(pcr.maptotal(pcr.ifthen(self.basinOutlet,\
          self.cumulativeDeltaStorage)),1)[0]
         if totalCumulativeDeltaStorage > 0:
             totalRelativeMassBalanceError = 1. + totalMassBalanceError / totalCumulativeDeltaStorage
         else:
             totalRelativeMassBalanceError = 1.
         #-report maps and echo value
         pcr.report(massBalanceError, mbeFileName)
         pcr.report(relMassBalanceError, mbrFileName)
         print '\n*\ttotal global mass balance error [m3]: %8.3g' % totalMassBalanceError
         print '\n*\trelative global mass balance error [-]: %5.3f' % totalRelativeMassBalanceError
         #-echo to screen: total mass balance error and completion of run
         print '\trun completed'
     #-end of day: return states and fluxes
     #-get surface water attributes?
     if getSurfaceWaterAttributes:
         #-compute the following secondary variables:
         # surface water area [m2]: area given dynamic surface water fraction
         # residence time [days]: volume over discharge, assigned -1 in case discharge is zero
         # surface water depth [m], weighed by channel and floodplain volume
         surfaceWaterArea = self.waterFraction * self.cellArea
         surfaceWaterArea= pcr.ifthenelse(self.waterBodies.distribution != 0,\
           pcr.ifthenelse(self.waterBodies.location != 0,\
            pcr.areatotal(surfaceWaterArea,self.waterBodies.distribution),0),\
             surfaceWaterArea)
         surfaceWaterResidenceTime = pcr.ifthenelse(
             self.Q > 0., self.actualStorage / (self.Q * self.timeSec), -1)
         surfaceWaterDepth= pcr.ifthenelse(self.actualStorage > 0.,\
          pcr.max(0.,self.actualStorage-self.channelStorageCapacity)**2/\
           (self.actualStorage*surfaceWaterArea),0.)
         surfaceWaterDepth+= pcr.ifthenelse(self.actualStorage > 0.,\
          pcr.min(self.channelStorageCapacity,self.actualStorage)**2/(self.waterFractionMask*\
          self.cellArea*self.actualStorage),0.)
         #-reports: values at outlet of lakes or reservoirs are assigned to their full extent
         self.report(pcr.ifthenelse(self.waterBodies.distribution != 0,\
          pcr.areamaximum(surfaceWaterArea,self.waterBodies.distribution),surfaceWaterArea),\
           surfaceWaterAreaFileName)
         self.report(pcr.ifthenelse(self.waterBodies.distribution != 0,\
          pcr.areamaximum(surfaceWaterResidenceTime,self.waterBodies.distribution),surfaceWaterResidenceTime),\
           surfaceWaterResidenceTimeFileName)
         self.report(pcr.ifthenelse(self.waterBodies.distribution != 0,\
          pcr.areamaximum(surfaceWaterDepth,self.waterBodies.distribution),surfaceWaterDepth),\
           surfaceWaterDepthFileName)
     #-reports on standard output: values at outlet of lakes or reservoirs are assigned to their full extent
     self.report(
         pcr.ifthenelse(
             self.waterBodies.distribution != 0,
             pcr.areamaximum(self.flowVelocity,
                             self.waterBodies.distribution),
             self.flowVelocity), flowVelocityFileName)
     self.report(
         pcr.ifthenelse(
             self.waterBodies.distribution != 0,
             pcr.areamaximum(self.Q, self.waterBodies.distribution),
             self.Q), QFileName)
     self.report(pcr.ifthenelse(self.waterBodies.distribution == 0,\
      floodedFraction,0.),floodedFractionFileName)
     self.report(pcr.ifthenelse(self.waterBodies.distribution == 0,\
      floodedDepth,0.),floodedDepthFileName)
     self.report(self.actualStorage, actualStorageFileName)
     #-update date for time step and report relevant daily output
     self.currentDate = self.currentDate + datetime.timedelta(self.duration)
예제 #57
0
    def update(self, landSurface, routing, currTimeStep):

        if self.debugWaterBalance:
            preStorGroundwater = self.storGroundwater
            preStorGroundwaterFossil = self.storGroundwaterFossil

        # get riverbed infiltration from the previous time step (from routing)
        self.surfaceWaterInf = routing.riverbedExchange/routing.cellArea     # m
        self.storGroundwater += self.surfaceWaterInf

        # get net recharge (percolation-capRise) and update storage:
        self.storGroundwater = pcr.max(0.,
                                       self.storGroundwater + landSurface.gwRecharge)

        # potential groundwater abstraction (unit: m)
        potGroundwaterAbstract = landSurface.totalPotentialGrossDemand -\
            landSurface.allocSurfaceWaterAbstract

        if self.usingAllocSegments == False or self.limitAbstraction:

            # Note: For simplicity, no network for a run with limitAbstraction.

            #logger.info("Groundwater abstraction is only to satisfy local demand. No network for distributing groundwater.")

            # nonFossil groundwater abstraction (unit: m) to fulfill water demand
            # - assumption: Groundwater is only abstracted to satisfy local demand.
            self.nonFossilGroundwaterAbs = \
                pcr.max(0.0,
                        pcr.min(self.storGroundwater,
                                potGroundwaterAbstract))
            #
            self.allocNonFossilGroundwater = self.nonFossilGroundwaterAbs

        if self.usingAllocSegments and self.limitAbstraction == False:

            # Note: Incorporating distribution network of groundwater source is possible only if limitAbstraction = False.

            #logger.info("Using groundwater source allocation.")

            # gross/potential demand volume in each cell (unit: m3)
            cellVolGrossDemand = potGroundwaterAbstract*routing.cellArea

            # total gross demand volume in each segment/zone (unit: m3)
            segTtlGrossDemand = pcr.areatotal(
                cellVolGrossDemand, self.allocSegments)

            # total available groundwater water volume in each cell - ignore small values (less than 1 m3)
            cellAvlGroundwater = pcr.max(
                0.00, self.storGroundwater * routing.cellArea)
            cellAvlGroundwater = pcr.rounddown(cellAvlGroundwater/1.)*1.

            # total available surface water volume in each segment/zone  (unit: m3)
            segAvlGroundwater = pcr.areatotal(
                cellAvlGroundwater, self.allocSegments)
            segAvlGroundwater = pcr.max(0.00,  segAvlGroundwater)

            # total actual surface water abstraction volume in each segment/zone (unit: m3)
            #
            # - not limited to available water - ignore small values (less than 1 m3)
            segActGroundwaterAbs = pcr.max(0.0,
                                           pcr.rounddown(segTtlGrossDemand))
            #
            # - limited to available water
            segActGroundwaterAbs = pcr.min(
                segAvlGroundwater, segActGroundwaterAbs)

            # actual surface water abstraction volume in each cell (unit: m3)
            volActGroundwaterAbstract = vos.getValDivZero(
                cellAvlGroundwater, segAvlGroundwater, vos.smallNumber) * \
                segActGroundwaterAbs
            volActGroundwaterAbstract = pcr.min(
                cellAvlGroundwater, volActGroundwaterAbstract)              # unit: m3

            # actual non fossil groundwater abstraction volume in meter (unit: m)
            self.nonFossilGroundwaterAbs = pcr.ifthen(self.landmask, volActGroundwaterAbstract) /\
                routing.cellArea                        # unit: m

            # allocation non fossil groundwater abstraction volume to each cell (unit: m3)
            self.volAllocGroundwaterAbstract = vos.getValDivZero(
                cellVolGrossDemand, segTtlGrossDemand, vos.smallNumber) *\
                segActGroundwaterAbs                                          # unit: m3

            # allocation surface water abstraction in meter (unit: m)
            self.allocNonFossilGroundwater = pcr.ifthen(self.landmask, self.volAllocGroundwaterAbstract) /\
                routing.cellArea                    # unit: m

            if self.debugWaterBalance == str('True'):

                abstraction = pcr.cover(pcr.areatotal(
                    self.nonFossilGroundwaterAbs * routing.cellArea, self.allocSegments)/self.segmentArea, 0.0)
                allocation = pcr.cover(pcr.areatotal(
                    self.allocNonFossilGroundwater*routing.cellArea, self.allocSegments)/self.segmentArea, 0.0)

                vos.waterBalanceCheck([abstraction],
                                      [allocation],
                                      [pcr.scalar(0.0)],
                                      [pcr.scalar(0.0)],
                                      'non fossil groundwater abstraction - allocation per zone/segment (PS: Error here may be caused by rounding error.)',
                                      True,
                                      "", threshold=5e-4)

        # update storGoundwater after self.nonFossilGroundwaterAbs
        self.storGroundwater = pcr.max(
            0., self.storGroundwater - self.nonFossilGroundwaterAbs)

        # unmetDemand (m), satisfied by fossil gwAbstractions           # TODO: Include desalinization
        self.unmetDemand = pcr.max(0.0,
                                   potGroundwaterAbstract -
                                   self.allocNonFossilGroundwater)              # m (equal to zero if limitAbstraction = True)

        if self.limitAbstraction:
            pass
            #logger.info("No fossil groundwater abstraction is allowed")
            # TODO: check that self.unmetDemand = 0.0

        # correcting unmetDemand with available fossil groundwater
        # Note: For simplicity, limitFossilGroundwaterAbstraction can only be combined with local source assumption
        if self.usingAllocSegments == False and self.limitFossilGroundwaterAbstraction:
            self.unmetDemand = pcr.min(
                pcr.max(0.0, self.storGroundwaterFossil), self.unmetDemand)

        # calculate the average groundwater abstraction (m/day) from the last 365 days:
        totalAbstraction = self.unmetDemand + self.nonFossilGroundwaterAbs
        deltaAbstraction = totalAbstraction - self.avgAbstraction
        self.avgAbstraction = self.avgAbstraction +\
            deltaAbstraction /\
            pcr.min(365., pcr.max(1.0, routing.timestepsToAvgDischarge))
        self.avgAbstraction = pcr.max(0.0, self.avgAbstraction)

        # update storGroundwaterFossil after unmetDemand
        self.storGroundwaterFossil -= self.unmetDemand

        # calculate baseflow and update storage:
        self.baseflow = pcr.max(0.,
                                pcr.min(self.storGroundwater,
                                        self.recessionCoeff *
                                        self.storGroundwater))
        self.storGroundwater = pcr.max(0.,
                                       self.storGroundwater - self.baseflow)
        # PS: baseflow must be calculated at the end (to ensure the availability of storGroundwater to support nonFossilGroundwaterAbs)

        if self.debugWaterBalance:
            vos.waterBalanceCheck([self.surfaceWaterInf,
                                   landSurface.gwRecharge],
                                  [self.baseflow,
                                   self.nonFossilGroundwaterAbs],
                                  [preStorGroundwater],
                                  [self.storGroundwater],
                                  'storGroundwater',
                                  True,
                                  currTimeStep.fulldate, threshold=1e-4)

        if self.debugWaterBalance:
            vos.waterBalanceCheck([pcr.scalar(0.0)],
                                  [self.unmetDemand],
                                  [preStorGroundwaterFossil],
                                  [self.storGroundwaterFossil],
                                  'storGroundwaterFossil',
                                  True,
                                  currTimeStep.fulldate, threshold=1e-3)

        if self.debugWaterBalance and self.limitFossilGroundwaterAbstraction:
            vos.waterBalanceCheck([pcr.scalar(0.0)],
                                  [self.unmetDemand],
                                  [pcr.max(0.0,  preStorGroundwaterFossil)],
                                  [pcr.max(0.0, self.storGroundwaterFossil)],
                                  'storGroundwaterFossil (with limitFossilGroundwaterAbstraction)',
                                  True,
                                  currTimeStep.fulldate, threshold=1e-3)

        if self.debugWaterBalance and landSurface.limitAbstraction:
            vos.waterBalanceCheck([potGroundwaterAbstract],
                                  [self.nonFossilGroundwaterAbs],
                                  [pcr.scalar(0.)],
                                  [pcr.scalar(0.)],
                                  'non fossil groundwater abstraction',
                                  True,
                                  currTimeStep.fulldate, threshold=1e-4)

        if self.debugWaterBalance:
            vos.waterBalanceCheck([self.unmetDemand, self.allocNonFossilGroundwater, landSurface.allocSurfaceWaterAbstract],
                                  [landSurface.totalPotentialGrossDemand],
                                  [pcr.scalar(0.)],
                                  [pcr.scalar(0.)],
                                  'water demand allocation (from surface water, groundwater and unmetDemand)',
                                  True,
                                  currTimeStep.fulldate, threshold=1e-4)

        if self.report == True:
            timeStamp = datetime.datetime(currTimeStep.year,
                                          currTimeStep.month,
                                          currTimeStep.day,
                                          0)
            # writing daily output to netcdf files
            timestepPCR = currTimeStep.timeStepPCR
            if self.outDailyTotNC[0] != "None":
                for var in self.outDailyTotNC:
                    self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/" +
                                               str(var)+"_dailyTot.nc",
                                               var,
                                               pcr2numpy(
                                                   self.__getattribute__(var), vos.MV),
                                               timeStamp, timestepPCR-1)

            # writing monthly output to netcdf files
            # -cummulative
            if self.outMonthTotNC[0] != "None":
                for var in self.outMonthTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.day == 1:
                        vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var+'MonthTot'] += vars(self)[var]

                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/" +
                                                   str(var)+"_monthTot.nc",
                                                   var,
                                                   pcr2numpy(self.__getattribute__(var+'MonthTot'),
                                                             vos.MV), timeStamp, currTimeStep.monthIdx-1)
            # -average
            if self.outMonthAvgNC[0] != "None":
                for var in self.outMonthAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outMonthTotNC:

                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the month
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.day == 1:
                            vars(self)[var+'MonthTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var+'MonthTot'] += vars(self)[var]

                    # calculating average & reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        vars(self)[var+'MonthAvg'] = vars(self)[var+'MonthTot'] /\
                            currTimeStep.day
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/" +
                                                   str(var)+"_monthAvg.nc",
                                                   var,
                                                   pcr2numpy(self.__getattribute__(var+'MonthAvg'),
                                                             vos.MV), timeStamp, currTimeStep.monthIdx-1)
            #
            # -last day of the month
            if self.outMonthEndNC[0] != "None":
                for var in self.outMonthEndNC:
                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/" +
                                                   str(var)+"_monthEnd.nc",
                                                   var,
                                                   pcr2numpy(
                                                       self.__getattribute__(var), vos.MV),
                                                   timeStamp, currTimeStep.monthIdx-1)

            # writing yearly output to netcdf files
            # -cummulative
            if self.outAnnuaTotNC[0] != "None":
                for var in self.outAnnuaTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.doy == 1:
                        vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var+'AnnuaTot'] += vars(self)[var]

                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/" +
                                                   str(var)+"_annuaTot.nc",
                                                   var,
                                                   pcr2numpy(self.__getattribute__(var+'AnnuaTot'),
                                                             vos.MV), timeStamp, currTimeStep.annuaIdx-1)
            # -average
            if self.outAnnuaAvgNC[0] != "None":
                for var in self.outAnnuaAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outAnnuaTotNC:
                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the year
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.doy == 1:
                            vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var+'AnnuaTot'] += vars(self)[var]
                    #
                    # calculating average & reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        vars(self)[var+'AnnuaAvg'] = vars(self)[var+'AnnuaTot'] /\
                            currTimeStep.doy
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/" +
                                                   str(var)+"_annuaAvg.nc",
                                                   var,
                                                   pcr2numpy(self.__getattribute__(var+'AnnuaAvg'),
                                                             vos.MV), timeStamp, currTimeStep.annuaIdx-1)
            #
            # -last day of the year
            if self.outAnnuaEndNC[0] != "None":
                for var in self.outAnnuaEndNC:
                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/" +
                                                   str(var)+"_annuaEnd.nc",
                                                   var,
                                                   pcr2numpy(
                                                       self.__getattribute__(var), vos.MV),
                                                   timeStamp, currTimeStep.annuaIdx-1)
예제 #58
0
    def read_forcings(self, currTimeStep):

        #-----------------------------------------------------------------------
        # NOTE: RvB 13/07/2016 hard-coded reference to the variable names
        # preciptiation, temperature and evapotranspiration have been replaced
        # by the variable names used in the netCDF and passed from the ini file
        #-----------------------------------------------------------------------

        # method for finding time indexes in the precipitation netdf file:
        # - the default one
        method_for_time_index = None
        # - based on the ini/configuration file (if given)
        if 'time_index_method_for_precipitation_netcdf' in list(self.iniItems.meteoOptions.keys()) and\
                                                           self.iniItems.meteoOptions['time_index_method_for_precipitation_netcdf'] != "None":
            method_for_time_index = self.iniItems.meteoOptions[
                'time_index_method_for_precipitation_netcdf']

        # reading precipitation:
        if self.precipitation_set_per_year:
            #~ print currTimeStep.year
            nc_file_per_year = self.preFileNC % (float(
                currTimeStep.year), float(currTimeStep.year))
            self.precipitation = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, self.preVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
        else:
            self.precipitation = vos.netcdf2PCRobjClone(\
                                      self.preFileNC, self.preVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)

        #-----------------------------------------------------------------------
        # NOTE: RvB 13/07/2016 added to automatically update precipitation
        self.precipitation = self.preConst + self.preFactor * pcr.ifthen(
            self.landmask, self.precipitation)
        #-----------------------------------------------------------------------

        # make sure that precipitation is always positive
        self.precipitation = pcr.max(0., self.precipitation)
        self.precipitation = pcr.cover(self.precipitation, 0.0)

        # ignore very small values of precipitation (less than 0.00001 m/day or less than 0.01 kg.m-2.day-1 )
        if self.usingDailyTimeStepForcingData:
            self.precipitation = pcr.rounddown(
                self.precipitation * 100000.) / 100000.

        # method for finding time index in the temperature netdf file:
        # - the default one
        method_for_time_index = None
        # - based on the ini/configuration file (if given)
        if 'time_index_method_for_temperature_netcdf' in list(self.iniItems.meteoOptions.keys()) and\
                                                         self.iniItems.meteoOptions['time_index_method_for_temperature_netcdf'] != "None":
            method_for_time_index = self.iniItems.meteoOptions[
                'time_index_method_for_temperature_netcdf']

        # reading temperature
        if self.temperature_set_per_year:
            nc_file_per_year = self.tmpFileNC % (int(
                currTimeStep.year), int(currTimeStep.year))
            self.temperature = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, self.tmpVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
        else:
            self.temperature = vos.netcdf2PCRobjClone(\
                                 self.tmpFileNC,self.tmpVarName,\
                                 str(currTimeStep.fulldate),
                                 useDoy = method_for_time_index,
                                 cloneMapFileName=self.cloneMap,\
                                 LatitudeLongitude = True)

        #-----------------------------------------------------------------------
        # NOTE: RvB 13/07/2016 added to automatically update temperature
        self.temperature = self.tmpConst + self.tmpFactor * pcr.ifthen(
            self.landmask, self.temperature)
        #-----------------------------------------------------------------------

        # Downscaling precipitation and temperature
        if self.downscalePrecipitationOption:
            self.downscalePrecipitation(currTimeStep)
        if self.downscaleTemperatureOption:
            self.downscaleTemperature(currTimeStep)

        # calculate or obtain referencePotET
        if self.refETPotMethod == 'Hamon':            self.referencePotET = \
   refPotET.HamonPotET(self.temperature,\
                       currTimeStep.doy,\
                       self.latitudes)
        if self.refETPotMethod == 'Input':

            # method for finding time indexes in the precipitation netdf file:
            # - the default one
            method_for_time_index = None
            # - based on the ini/configuration file (if given)
            if 'time_index_method_for_ref_pot_et_netcdf' in list(self.iniItems.meteoOptions.keys()) and\
                                                            self.iniItems.meteoOptions['time_index_method_for_ref_pot_et_netcdf'] != "None":
                method_for_time_index = self.iniItems.meteoOptions[
                    'time_index_method_for_ref_pot_et_netcdf']

            if self.refETPotFileNC_set_per_year:
                nc_file_per_year = self.etpFileNC % (int(
                    currTimeStep.year), int(currTimeStep.year))
                self.referencePotET = vos.netcdf2PCRobjClone(\
                                      nc_file_per_year, self.refETPotVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName = self.cloneMap,\
                                      LatitudeLongitude = True)
            else:
                self.referencePotET = vos.netcdf2PCRobjClone(\
                                      self.etpFileNC,self.refETPotVarName,\
                                      str(currTimeStep.fulldate),
                                      useDoy = method_for_time_index,
                                      cloneMapFileName=self.cloneMap,\
                                      LatitudeLongitude = True)
            #-----------------------------------------------------------------------
            # NOTE: RvB 13/07/2016 added to automatically update reference potential evapotranspiration
            self.referencePotET = self.refETPotConst + self.refETPotFactor * pcr.ifthen(
                self.landmask, self.referencePotET)
            #-----------------------------------------------------------------------

        # Downscaling referenceETPot (based on temperature)
        if self.downscaleReferenceETPotOption: self.downscaleReferenceETPot()

        # smoothing:
        if self.forcingSmoothing == True:
            logger.debug("Forcing data are smoothed.")
            self.precipitation = pcr.windowaverage(self.precipitation,
                                                   self.smoothingWindowsLength)
            self.temperature = pcr.windowaverage(self.temperature,
                                                 self.smoothingWindowsLength)
            self.referencePotET = pcr.windowaverage(
                self.referencePotET, self.smoothingWindowsLength)

        # rounding temperature values to minimize numerical errors (note only to minimize, not remove)
        self.temperature = pcr.roundoff(self.temperature * 1000.) / 1000.

        # ignore snow by setting temperature to 25 deg C
        if self.ignore_snow: self.temperature = pcr.spatial(pcr.scalar(25.))

        # define precipitation, temperature and referencePotET ONLY at landmask area (for reporting):
        self.precipitation = pcr.ifthen(self.landmask, self.precipitation)
        self.temperature = pcr.ifthen(self.landmask, self.temperature)
        self.referencePotET = pcr.ifthen(self.landmask, self.referencePotET)

        # make sure precipitation and referencePotET are always positive:
        self.precipitation = pcr.max(0.0, self.precipitation)
        self.referencePotET = pcr.max(0.0, self.referencePotET)

        if self.report == True:
            timeStamp = datetime.datetime(currTimeStep.year,\
                                          currTimeStep.month,\
                                          currTimeStep.day,\
                                          0)
            # writing daily output to netcdf files
            timestepPCR = currTimeStep.timeStepPCR
            if self.outDailyTotNC[0] != "None":
                for var in self.outDailyTotNC:
                    self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_dailyTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,timestepPCR-1)

            # writing monthly output to netcdf files
            # -cummulative
            if self.outMonthTotNC[0] != "None":
                for var in self.outMonthTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.day == 1:                        \
                                               vars(self)[var+'MonthTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthTot'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            # -average
            if self.outMonthAvgNC[0] != "None":
                for var in self.outMonthAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outMonthTotNC:

                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the month
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.day == 1:                            \
                                                       vars(self)[var+'MonthTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'MonthTot'] += vars(self)[var]

                    # calculating average & reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        vars(self)[var+'MonthAvg'] = vars(self)[var+'MonthTot']/\
                                                     currTimeStep.day
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'MonthAvg'),\
                           vos.MV),timeStamp,currTimeStep.monthIdx-1)
            #
            # -last day of the month
            if self.outMonthEndNC[0] != "None":
                for var in self.outMonthEndNC:
                    # reporting at the end of the month:
                    if currTimeStep.endMonth == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_monthEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.monthIdx-1)

            # writing yearly output to netcdf files
            # -cummulative
            if self.outAnnuaTotNC[0] != "None":
                for var in self.outAnnuaTotNC:

                    # introduce variables at the beginning of simulation or
                    #     reset variables at the beginning of the month
                    if currTimeStep.timeStepPCR == 1 or \
                       currTimeStep.doy == 1:                        \
                                               vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)

                    # accumulating
                    vars(self)[var + 'AnnuaTot'] += vars(self)[var]

                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaTot.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaTot'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            # -average
            if self.outAnnuaAvgNC[0] != "None":
                for var in self.outAnnuaAvgNC:
                    # only if a accumulator variable has not been defined:
                    if var not in self.outAnnuaTotNC:
                        # introduce accumulator at the beginning of simulation or
                        #     reset accumulator at the beginning of the year
                        if currTimeStep.timeStepPCR == 1 or \
                           currTimeStep.doy == 1:                            \
                                                       vars(self)[var+'AnnuaTot'] = pcr.scalar(0.0)
                        # accumulating
                        vars(self)[var + 'AnnuaTot'] += vars(self)[var]
                    #
                    # calculating average & reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        vars(self)[var+'AnnuaAvg'] = vars(self)[var+'AnnuaTot']/\
                                                     currTimeStep.doy
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaAvg.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var+'AnnuaAvg'),\
                           vos.MV),timeStamp,currTimeStep.annuaIdx-1)
            #
            # -last day of the year
            if self.outAnnuaEndNC[0] != "None":
                for var in self.outAnnuaEndNC:
                    # reporting at the end of the year:
                    if currTimeStep.endYear == True:
                        self.netcdfObj.data2NetCDF(str(self.outNCDir)+"/"+ \
                                         str(var)+"_annuaEnd.nc",\
                                         var,\
                          pcr2numpy(self.__getattribute__(var),vos.MV),\
                                         timeStamp,currTimeStep.annuaIdx-1)
landmask  = pcr.ifthen(pcr.defined(ldd), pcr.boolean(1.0))
# - cell area
cell_area = vos.readPCRmapClone(input_files['cell_area_05min'],
                          clone_map_file,
                          output_files['tmp_folder'])


# read the hydrological year 
msg = "Reading the hydrological year types" + ":"
logger.info(msg)
hydro_year_type = pcr.nominal(\
                  vos.readPCRmapClone(input_files['hydro_year_05min'],
                                      input_files['clone_map_05min'],
                                      output_files['tmp_folder'],
                                      None, False, None, True))
hydro_year_type = pcr.cover(hydro_year_type, pcr.nominal(1.0))




# loop through every year
msg = "Merging two hydrologyical years for the following variables:"
logger.info(msg)
#
for i_year in range(str_year, end_year + 1):
    
    for var in variable_names:
        
        msg = "Merging for the variable " + str(var) + " for the year " + str(i_year)
        logger.info(msg)
        
예제 #60
0
owners_lut = pd.DataFrame(data=data)


# succession restrictions
veg_friction = lookup_scalar(veg_class, veg_lut[['nr', 'friction']])
flow_path_friction = pcr.ifthenelse(flow_path == 1, pcr.scalar(3), pcr.scalar(1))

# implementation restrictions
hwat_free_friction = 10 * pcr.scalar(pcr.boolean(hwat_free))
buildings_friction = 20 * pcr.scalar(pcr.boolean(building_area))
directives_friction = 5 * pcr.ifthenelse(pcr.scalar(directives) < 2.5, 
                                         pcr.scalar(1), pcr.scalar(2))
waste_friction = 20 * pcr.scalar(waste_area)
owner_friction = lookup_scalar(owner_type, owners_lut[['owner_type', 'friction']])

msr_friction = pcr.cover(hwat_free_friction, 0) +\
               pcr.cover(buildings_friction, 0) +\
               pcr.cover(waste_friction, 0) +\
               pcr.cover(directives_friction, 0) +\
               pcr.cover(owner_friction, 0)
msr_friction = pcr.ifthen(pcr.defined(outline), msr_friction)

pcr.report(veg_friction, 'veg_friction.map')
pcr.report(flow_path_friction, 'flow_path_friction.map')
pcr.report(buildings_friction, 'buildings_friction.map')
pcr.report(directives_friction, 'directives_friction.map')
pcr.report(waste_friction, 'waste_friction.map')
pcr.report(owner_friction, 'owner_friction.map')
pcr.report(msr_friction, 'msr_friction.map')
pcr.aguila(msr_friction)