def volume_spread(ldd,
                  hand,
                  subcatch,
                  volume,
                  volume_thres=0.,
                  cell_surface=1.,
                  iterations=15,
                  logging=logging,
                  order=0):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unitcell")
    dem_min = pcr.areaminimum(hand,
                              subcatch)  # minimum elevation in subcatchments
    dem_norm = hand - dem_min
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch) * pcr.areaaverage(
        cell_surface, subcatch)  # area_multiplier
    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    depth_catch = volume_catch / surface  # meters water disc averaged over subcatchment
    # ilt(depth_catch, 'depth_catch_{:02d}.map'.format(order))
    # pcr.report(volume, 'volume_{:02d}.map'.format(order))
    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        logging.debug('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max) / 2
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0),
                                              subcatch)
        error = pcr.cover((depth_catch - average_depth_catch) / depth_catch,
                          depth_catch * 0)
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    inundation = pcr.max(dem_av - dem_norm, 0)
    pcr.setglobaloption('unittrue')
    return inundation
Exemple #2
0
    def downscalePrecipitation(self,
                               currTimeStep,
                               useFactor=True,
                               minCorrelationCriteria=0.85):

        preSlope = 0.001 * vos.netcdf2PCRobjClone(\
                           self.precipLapseRateNC, 'precipitation',\
                           currTimeStep.month, useDoy = "Yes",\
                           cloneMapFileName=self.cloneMap,\
                           LatitudeLongitude = True)
        preSlope = pcr.cover(preSlope, 0.0)
        preSlope = pcr.max(0., preSlope)

        preCriteria = vos.netcdf2PCRobjClone(\
                     self.precipitCorrelNC, 'precipitation',\
                     currTimeStep.month, useDoy = "Yes",\
                     cloneMapFileName=self.cloneMap,\
                     LatitudeLongitude = True)
        preSlope = pcr.ifthenelse(preCriteria > minCorrelationCriteria,\
                   preSlope, 0.0)
        preSlope = pcr.cover(preSlope, 0.0)

        if useFactor == True:
            factor = pcr.max(0.,
                             self.precipitation + preSlope * self.anomalyDEM)
            factor = factor / \
                     pcr.areaaverage(factor, self.meteoDownscaleIds)
            factor = pcr.cover(factor, 1.0)
            self.precipitation = factor * self.precipitation
        else:
            self.precipitation = self.precipitation + preSlope * self.anomalyDEM

        self.precipitation = pcr.max(0.0, self.precipitation)
Exemple #3
0
    def downscaleTemperature(self,
                             currTimeStep,
                             useFactor=False,
                             maxCorrelationCriteria=-0.75,
                             zeroCelciusInKelvin=273.15):

        tmpSlope = 1.000 * vos.netcdf2PCRobjClone(\
                           self.temperLapseRateNC, 'temperature',\
                           currTimeStep.month, useDoy = "Yes",\
                           cloneMapFileName=self.cloneMap,\
                           LatitudeLongitude = True)
        tmpSlope = pcr.min(0., tmpSlope)  # must be negative
        tmpCriteria = vos.netcdf2PCRobjClone(\
                      self.temperatCorrelNC, 'temperature',\
                      currTimeStep.month, useDoy = "Yes",\
                      cloneMapFileName=self.cloneMap,\
                      LatitudeLongitude = True)
        tmpSlope = pcr.ifthenelse(tmpCriteria < maxCorrelationCriteria,\
                   tmpSlope, 0.0)
        tmpSlope = pcr.cover(tmpSlope, 0.0)

        if useFactor == True:
            temperatureInKelvin = self.temperature + zeroCelciusInKelvin
            factor = pcr.max(0.0,
                             temperatureInKelvin + tmpSlope * self.anomalyDEM)
            factor = factor / \
                     pcr.areaaverage(factor, self.meteoDownscaleIds)
            factor = pcr.cover(factor, 1.0)
            self.temperature = factor * temperatureInKelvin - zeroCelciusInKelvin
        else:
            self.temperature = self.temperature + tmpSlope * self.anomalyDEM
Exemple #4
0
def volume_spread(ldd, hand, subcatch, volume, volume_thres=0., cell_surface=1., iterations=15, logging=logging, order=0, neg_HAND=None):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
        neg_HAND -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than
        stream (for example when there are natural embankments)
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unitcell")
    dem_min = pcr.areaminimum(hand, subcatch)  # minimum elevation in subcatchments
    dem_norm = hand - dem_min
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch)*pcr.areaaverage(cell_surface, subcatch) # area_multiplier
    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    depth_catch = volume_catch/surface  # meters water disc averaged over subcatchment
    # ilt(depth_catch, 'depth_catch_{:02d}.map'.format(order))
    # pcr.report(volume, 'volume_{:02d}.map'.format(order))
    if neg_HAND == 1:
        dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(-32.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(-32.)
    else:
        dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(0.)
    for n in range(iterations):
        logging.debug('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max)/2
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0), subcatch)
        error = pcr.cover((depth_catch-average_depth_catch)/depth_catch, depth_catch*0)
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    inundation = pcr.max(dem_av - dem_norm, 0)
    pcr.setglobaloption('unittrue')
    return inundation
Exemple #5
0
    def downscaleReferenceETPot(self, zeroCelciusInKelvin=273.15):

        temperatureInKelvin = self.temperature + zeroCelciusInKelvin
        factor = pcr.max(0.0, temperatureInKelvin)
        factor = factor / \
                 pcr.areaaverage(factor, self.meteoDownscaleIds)
        factor = pcr.cover(factor, 1.0)
        self.referencePotET = pcr.max(0.0, factor * self.referencePotET)
Exemple #6
0
def volume_spread(ldd,
                  hand,
                  subcatch,
                  volume,
                  volume_thres=0.,
                  area_multiplier=1.,
                  iterations=15):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unittrue")
    dem_min = pcr.areaminimum(hand,
                              subcatch)  # minimum elevation in subcatchments
    # pcr.report(dem_min, 'dem_min.map')
    dem_norm = hand - dem_min
    # pcr.report(dem_norm, 'dem_norm.map')
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch) * area_multiplier
    pcr.report(surface, 'surface.map')

    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    # pcr.report(volume_catch, 'volume_catch.map')

    depth_catch = volume_catch / surface
    pcr.report(depth_catch, 'depth_catch.map')

    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        print('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max) / 2
        # pcr.report(dem_av, 'dem_av00.{:03d}'.format(n + 1))
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0),
                                              subcatch)
        # pcr.report(average_depth_catch, 'depth_c0.{:03d}'.format(n + 1))
        error = pcr.cover((depth_catch - average_depth_catch) / depth_catch,
                          depth_catch * 0)
        # pcr.report(error, 'error000.{:03d}'.format(n + 1))
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    # error_abs = np.abs(error)  # TODO: not needed probably, remove
    inundation = pcr.max(dem_av - dem_norm, 0)
    return inundation
Exemple #7
0
def spatialInterpolation2PCR(fieldArray, pcrType, MV):
    #-interpolates the field array to the full extent
    field = pcr.numpy2pcr(pcrType, fieldArray, MV)
    cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field)))
    zoneID = pcr.spreadzone(cellID, 0, 1)
    if pcrType == pcr.Scalar:
        field = pcr.areaaverage(field, zoneID)
    else:
        field = pcr.areamajority(field, zoneID)
    return field
Exemple #8
0
    def sample(self, expression):
        """
    Sampling the current values of 'expression' at the given locations for the current timestep
    """

        arrayRowPos = self._userModel.currentTimeStep(
        ) - self._userModel.firstTimeStep()

        #if isinstance(expression, float):
        #  expression = pcraster.scalar(expression)

        try:
            # store the data type for tss file header
            if self._spatialDatatype == None:
                self._spatialDatatype = str(expression.dataType())
        except AttributeError as e:
            datatype, sep, tail = str(e).partition(" ")
            msg = "Argument must be a PCRaster map, type %s given. If necessary use data conversion functions like scalar()" % (
                datatype)
            raise AttributeError(msg)

        if self._spatialIdGiven:
            if expression.dataType() == pcraster.Scalar or expression.dataType(
            ) == pcraster.Directional:
                tmp = pcraster.areaaverage(pcraster.spatial(expression),
                                           pcraster.spatial(self._spatialId))
            else:
                tmp = pcraster.areamajority(pcraster.spatial(expression),
                                            pcraster.spatial(self._spatialId))

            col = 0
            for cellIndex in self._sampleAddresses:
                value, valid = pcraster.cellvalue(tmp, cellIndex)
                if not valid:
                    value = Decimal("NaN")

                self._sampleValues[arrayRowPos][col] = value
                col += 1
        else:
            if expression.dataType() == pcraster.Scalar or expression.dataType(
            ) == pcraster.Directional:
                tmp = pcraster.maptotal(pcraster.spatial(expression))\
                      / pcraster.maptotal(pcraster.scalar(pcraster.defined(pcraster.spatial(expression))))
            else:
                tmp = pcraster.mapmaximum(pcraster.maptotal(pcraster.areamajority(pcraster.spatial(expression),\
                      pcraster.spatial(pcraster.nominal(1)))))

            value, valid = pcraster.cellvalue(tmp, 1)
            if not valid:
                value = Decimal("NaN")

            self._sampleValues[arrayRowPos] = value

        if self._userModel.currentTimeStep() == self._userModel.nrTimeSteps():
            self._writeTssFile()
Exemple #9
0
def xyzWAQUA2Pcr(xyzFile, gridIDMap, cloneFile):
    """read an WAQUA .xyz file into a raster map

    xyzFile:    x,y,z,m,n,id column file with a one line header
    gridIDmap:  pcraster map with the rgf grid cell IDs
    cloneFile:  pcraster clonefile to be used in col2map
    """

    xyz = np.loadtxt(xyzFile, delimiter=',', skiprows=1)
    xyzPointMap = col2map(xyz, cloneFile, args='-S -a -m -999999')
    xyzAreaMap = pcr.areaaverage(xyzPointMap, pcr.nominal(gridIDMap))
    return xyzAreaMap
Exemple #10
0
  def sample(self, expression):
    """
    Sampling the current values of 'expression' at the given locations for the current timestep
    """

    arrayRowPos = self._userModel.currentTimeStep() - self._userModel.firstTimeStep()

    #if isinstance(expression, float):
    #  expression = pcraster.scalar(expression)

    try:
      # store the data type for tss file header
      if self._spatialDatatype == None:
        self._spatialDatatype = str(expression.dataType())
    except AttributeError as e:
      datatype, sep, tail = str(e).partition(" ")
      msg = "Argument must be a PCRaster map, type %s given. If necessary use data conversion functions like scalar()" % (datatype)
      raise AttributeError(msg)

    if self._spatialIdGiven:
      if expression.dataType() == pcraster.Scalar or expression.dataType() == pcraster.Directional:
        tmp = pcraster.areaaverage(pcraster.spatial(expression), pcraster.spatial(self._spatialId))
      else:
        tmp = pcraster.areamajority(pcraster.spatial(expression), pcraster.spatial(self._spatialId))

      col = 0
      for cellIndex in self._sampleAddresses:
        value, valid = pcraster.cellvalue(tmp, cellIndex)
        if not valid:
          value = Decimal("NaN")

        self._sampleValues[arrayRowPos][col] = value
        col += 1
    else:
      if expression.dataType() == pcraster.Scalar or expression.dataType() == pcraster.Directional:
         tmp = pcraster.maptotal(pcraster.spatial(expression))\
               / pcraster.maptotal(pcraster.scalar(pcraster.defined(pcraster.spatial(expression))))
      else:
         tmp = pcraster.mapmaximum(pcraster.maptotal(pcraster.areamajority(pcraster.spatial(expression),\
               pcraster.spatial(pcraster.nominal(1)))))

      value, valid = pcraster.cellvalue(tmp, 1)
      if not valid:
        value = Decimal("NaN")

      self._sampleValues[arrayRowPos] = value

    if self._userModel.currentTimeStep() == self._userModel.nrTimeSteps():
       self._writeTssFile()
Exemple #11
0
def volume_spread(ldd, hand, subcatch, volume, volume_thres=0., area_multiplier=1., iterations=15):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unittrue")
    dem_min = pcr.areaminimum(hand, subcatch)  # minimum elevation in subcatchments
    # pcr.report(dem_min, 'dem_min.map')
    dem_norm = hand - dem_min
    # pcr.report(dem_norm, 'dem_norm.map')
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch)*area_multiplier
    pcr.report(surface, 'surface.map')

    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    # pcr.report(volume_catch, 'volume_catch.map')

    depth_catch = volume_catch/surface
    pcr.report(depth_catch, 'depth_catch.map')

    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        print('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max)/2
        # pcr.report(dem_av, 'dem_av00.{:03d}'.format(n + 1))
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0), subcatch)
        # pcr.report(average_depth_catch, 'depth_c0.{:03d}'.format(n + 1))
        error = pcr.cover((depth_catch-average_depth_catch)/depth_catch, depth_catch*0)
        # pcr.report(error, 'error000.{:03d}'.format(n + 1))
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    # error_abs = np.abs(error)  # TODO: not needed probably, remove
    inundation = pcr.max(dem_av - dem_norm, 0)
    return inundation
Exemple #12
0
def interpolategauges(inputmap, method):
    """"
    Interpolate time series gauge data onto a grid using different methods
    inputmap: map with points data for a single timestep
    method: string indicating the method
        inv
        pol
        
    input: inputmap, method
    returns: interpolated map
    """

    if method == "inv":
        result = pcr.inversedistance(1, inputmap, 3, 0, 0)
    elif method == "pol":
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)
    else:
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)

    return result
Exemple #13
0
def interpolategauges(inputmap, method):
    """"
    Interpolate time series gauge data onto a grid using different methods
    inputmap: map with points data for a single timestep
    method: string indicating the method
        inv
        pol
        
    input: inputmap, method
    returns: interpolated map
    """

    if method == "inv":
        result = pcr.inversedistance(1, inputmap, 3, 0, 0)
    elif method == "pol":
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)
    else:
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)

    return result
Exemple #14
0
def areastat(Var, Area):
    """
    Calculate several statistics of *Var* for each unique id in *Area*

    Input:
        - Var
        - Area

    Output:
        - Standard_Deviation,Average,Max,Min

    """
    Avg = pcr.areaaverage(Var, Area)
    Sq = (Var - Avg) ** 2
    N = pcr.areatotal(pcr.spatial(pcr.cellarea()), Area) / pcr.cellarea()
    Sd = (pcr.areatotal(Sq, Area) / N) ** 0.5
    Max = pcr.areamaximum(Var, Area)
    Min = pcr.areaminimum(Var, Area)

    return Sd, Avg, Max, Min
Exemple #15
0
def areastat(Var, Area):
    """
    Calculate several statistics of *Var* for each unique id in *Area*

    Input:
        - Var
        - Area

    Output:
        - Standard_Deviation,Average,Max,Min

    """
    Avg = pcr.areaaverage(Var, Area)
    Sq = (Var - Avg) ** 2
    N = pcr.areatotal(pcr.spatial(pcr.cellarea()), Area) / pcr.cellarea()
    Sd = (pcr.areatotal(Sq, Area) / N) ** 0.5
    Max = pcr.areamaximum(Var, Area)
    Min = pcr.areaminimum(Var, Area)

    return Sd, Avg, Max, Min
Exemple #16
0
def area_percentile(inmap, area, n, order, percentile):
    """
  calculates percentile of inmap per area
  n is the number of points in each area,
  order, the sorter order of inmap per area (output of
  areaorder(inmap,area))
  n is the output of pcr.areatotal(pcr.spatial(pcr.scalar(1.0)),area)

  Input:
      - inmap
      - area map
      - n
      - order (riverorder)
      - percentile

  Output:
      - percentile map

  """
    i = pcr.rounddown((n * percentile) / 100.0 + 0.5)  # index in order map
    perc = pcr.ifthen(i == order, inmap)

    return pcr.areaaverage(perc, area)
Exemple #17
0
def area_percentile(inmap, area, n, order, percentile):
    """
  calculates percentile of inmap per area
  n is the number of points in each area,
  order, the sorter order of inmap per area (output of
  areaorder(inmap,area))
  n is the output of pcr.areatotal(pcr.spatial(pcr.scalar(1.0)),area)

  Input:
      - inmap
      - area map
      - n
      - order (riverorder)
      - percentile

  Output:
      - percentile map

  """
    i = pcr.rounddown((n * percentile) / 100.0 + 0.5)  # index in order map
    perc = pcr.ifthen(i == order, inmap)

    return pcr.areaaverage(perc, area)
Exemple #18
0
    def getParameterFiles(self,currTimeStep,cellArea,ldd,\
                               initial_condition_dictionary = None,\
                               currTimeStepInDateTimeFormat = False):

        # parameters for Water Bodies: fracWat
        #                              waterBodyIds
        #                              waterBodyOut
        #                              waterBodyArea
        #                              waterBodyTyp
        #                              waterBodyCap

        # cell surface area (m2) and ldd
        self.cellArea = cellArea
        ldd = pcr.ifthen(self.landmask, ldd)

        # date used for accessing/extracting water body information
        if currTimeStepInDateTimeFormat:
            date_used = currTimeStep
            year_used = currTimeStep.year
        else:
            date_used = currTimeStep.fulldate
            year_used = currTimeStep.year
        if self.onlyNaturalWaterBodies == True:
            date_used = self.dateForNaturalCondition
            year_used = self.dateForNaturalCondition[0:4]

        # fracWat = fraction of surface water bodies (dimensionless)
        self.fracWat = pcr.scalar(0.0)

        if self.useNetCDF:
            self.fracWat = vos.netcdf2PCRobjClone(self.ncFileInp,'fracWaterInp', \
                           date_used, useDoy = 'yearly',\
                           cloneMapFileName = self.cloneMap)
        else:
            self.fracWat = vos.readPCRmapClone(\
                           self.fracWaterInp+str(year_used)+".map",
                           self.cloneMap,self.tmpDir,self.inputDir)

        self.fracWat = pcr.cover(self.fracWat, 0.0)
        self.fracWat = pcr.max(0.0, self.fracWat)
        self.fracWat = pcr.min(1.0, self.fracWat)

        self.waterBodyIds = pcr.nominal(0)  # waterBody ids
        self.waterBodyOut = pcr.boolean(0)  # waterBody outlets
        self.waterBodyArea = pcr.scalar(0.)  # waterBody surface areas

        # water body ids
        if self.useNetCDF:
            self.waterBodyIds = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyIds', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)
        else:
            self.waterBodyIds = vos.readPCRmapClone(\
                self.waterBodyIdsInp+str(year_used)+".map",\
                self.cloneMap,self.tmpDir,self.inputDir,False,None,True)
        #
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.nominal(self.waterBodyIds))

        # water body outlets (correcting outlet positions)
        wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd)
        self.waterBodyOut = pcr.ifthen(wbCatchment ==\
                            pcr.areamaximum(wbCatchment, \
                            self.waterBodyIds),\
                            self.waterBodyIds) # = outlet ids           # This may give more than two outlets, particularly if there are more than one cells that have largest upstream areas
        # - make sure that there is only one outlet for each water body
        self.waterBodyOut = pcr.ifthen(\
                            pcr.areaorder(pcr.scalar(self.waterBodyOut), \
                            self.waterBodyOut) == 1., self.waterBodyOut)
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            self.waterBodyOut)

        # TODO: Please also consider endorheic lakes!

        # correcting water body ids
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.subcatchment(ldd,self.waterBodyOut))

        # boolean map for water body outlets:
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyOut) > 0.,\
                            pcr.boolean(1))

        # reservoir surface area (m2):
        if self.useNetCDF:
            resSfArea = 1000. * 1000. * \
                        vos.netcdf2PCRobjClone(self.ncFileInp,'resSfAreaInp', \
                        date_used, useDoy = 'yearly',\
                        cloneMapFileName = self.cloneMap)
        else:
            resSfArea = 1000. * 1000. * vos.readPCRmapClone(
                   self.resSfAreaInp+str(year_used)+".map",\
                   self.cloneMap,self.tmpDir,self.inputDir)
        resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds)
        resSfArea = pcr.cover(resSfArea, 0.)

        # water body surface area (m2): (lakes and reservoirs)
        self.waterBodyArea = pcr.max(pcr.areatotal(\
                             pcr.cover(\
                             self.fracWat*self.cellArea, 0.0), self.waterBodyIds),
                             pcr.areaaverage(\
                             pcr.cover(resSfArea, 0.0) ,       self.waterBodyIds))
        self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.,\
                             self.waterBodyArea)

        # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0)
        self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0.,
                                       self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyOut)

        # water body types:
        # - 2 = reservoirs (regulated discharge)
        # - 1 = lakes (weirFormula)
        # - 0 = non lakes or reservoirs (e.g. wetland)
        self.waterBodyTyp = pcr.nominal(0)

        if self.useNetCDF:
            self.waterBodyTyp = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyTyp', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)
        else:
            self.waterBodyTyp = vos.readPCRmapClone(
                self.waterBodyTypInp+str(year_used)+".map",\
                self.cloneMap,self.tmpDir,self.inputDir,False,None,True)

        # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs
        #
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.areamajority(self.waterBodyTyp,\
                                             self.waterBodyIds)     # choose only one type: either lake or reservoir
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyTyp)

        # correcting lakes and reservoirs ids and outlets
        self.waterBodyIds = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut)

        # reservoir maximum capacity (m3):
        self.resMaxCap = pcr.scalar(0.0)
        self.waterBodyCap = pcr.scalar(0.0)

        if self.useNetCDF:
            self.resMaxCap = 1000. * 1000. * \
                             vos.netcdf2PCRobjClone(self.ncFileInp,'resMaxCapInp', \
                             date_used, useDoy = 'yearly',\
                             cloneMapFileName = self.cloneMap)
        else:
            self.resMaxCap = 1000. * 1000. * vos.readPCRmapClone(\
                self.resMaxCapInp+str(year_used)+".map", \
                self.cloneMap,self.tmpDir,self.inputDir)

        self.resMaxCap = pcr.ifthen(self.resMaxCap > 0,\
                                    self.resMaxCap)
        self.resMaxCap = pcr.areaaverage(self.resMaxCap,\
                                         self.waterBodyIds)

        # water body capacity (m3): (lakes and reservoirs)
        self.waterBodyCap = pcr.cover(
            self.resMaxCap, 0.0)  # Note: Most of lakes have capacities > 0.
        self.waterBodyCap = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyCap)

        # correcting water body types:                                  # Reservoirs that have zero capacities will be assumed as lakes.
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp)
        self.waterBodyTyp = pcr.ifthenelse(self.waterBodyCap > 0.,\
                                           self.waterBodyTyp,\
                 pcr.ifthenelse(pcr.scalar(self.waterBodyTyp) == 2,\
                                           pcr.nominal(1),\
                                           self.waterBodyTyp))

        # final corrections:
        self.waterBodyTyp = pcr.ifthen(self.waterBodyArea > 0.,\
                                       self.waterBodyTyp)                     # make sure that all lakes and/or reservoirs have surface areas
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp)                     # make sure that only types 1 and 2 will be considered in lake/reservoir functions
        self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                            self.waterBodyIds)                                # make sure that all lakes and/or reservoirs have ids
        self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.,\
                                                  self.waterBodyOut)          # make sure that all lakes and/or reservoirs have outlets

        # for a natural run (self.onlyNaturalWaterBodies == True)
        # which uses only the year 1900, assume all reservoirs are lakes
        if self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition:
            logger.info(
                "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes."
            )
            self.waterBodyTyp = \
             pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                        pcr.nominal(1))

        # check that all lakes and/or reservoirs have types, ids, surface areas and outlets:
        test = pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) &\
               pcr.defined(self.waterBodyIds) & pcr.boolean(pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds))
        a, b, c = vos.getMinMaxMean(
            pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0))
        threshold = 1e-3
        if abs(a) > threshold or abs(b) > threshold:
            logger.warning(
                "Missing information in some lakes and/or reservoirs.")

        # at the beginning of simulation period (timeStepPCR = 1)
        # - we have to define/get the initial conditions
        #
        if initial_condition_dictionary != None and currTimeStep.timeStepPCR == 1:
            self.getICs(initial_condition_dictionary)

        # For each new reservoir (introduced at the beginning of the year)
        # initiating storage, average inflow and outflow
        # PS: THIS IS NOT NEEDED FOR OFFLINE MODFLOW RUN!
        #
        try:
            self.waterBodyStorage = pcr.cover(self.waterBodyStorage, 0.0)
            self.avgInflow = pcr.cover(self.avgInflow, 0.0)
            self.avgOutflow = pcr.cover(self.avgOutflow, 0.0)
            self.waterBodyStorage = pcr.ifthen(self.landmask,
                                               self.waterBodyStorage)
            self.avgInflow = pcr.ifthen(self.landmask, self.avgInflow)
            self.avgOutflow = pcr.ifthen(self.landmask, self.avgOutflow)
        except:
            # PS: FOR OFFLINE MODFLOW RUN!
            pass
        # TODO: Remove try and except

        # cropping only in the landmask region:
        self.fracWat = pcr.ifthen(self.landmask, self.fracWat)
        self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut)
        self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea)
        self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp)
        self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap)
Exemple #19
0
def complexreservoir(
    waterlevel,
    ReserVoirLocs,
    LinkedReserVoirLocs,
    ResArea,
    ResThreshold,
    ResStorFunc,
    ResOutflowFunc,
    sh,
    hq,
    res_b,
    res_e,
    inflow,
    precip,
    pet,
    ReservoirComplexAreas,
    JDOY,
    timestepsecs=86400,
):

    mv = -999.0

    inflow = pcr.ifthen(pcr.boolean(ReserVoirLocs), inflow)

    prec_av = pcr.ifthen(
        pcr.boolean(ReserVoirLocs), pcr.areaaverage(precip, ReservoirComplexAreas)
    )
    pet_av = pcr.ifthen(
        pcr.boolean(ReserVoirLocs), pcr.areaaverage(pet, ReservoirComplexAreas)
    )

    np_reslocs = pcr.pcr2numpy(ReserVoirLocs, 0.0)
    np_linkedreslocs = pcr.pcr2numpy(LinkedReserVoirLocs, 0.0)

    _outflow = []
    nr_loop = np.max([int(timestepsecs / 21600), 1])
    for n in range(0, nr_loop):
        np_waterlevel = pcr.pcr2numpy(waterlevel, np.nan)
        np_waterlevel_lower = np_waterlevel.copy()

        for val in np.unique(np_linkedreslocs):
            if val > 0:
                np_waterlevel_lower[np_linkedreslocs == val] = np_waterlevel[
                    np.where(np_reslocs == val)
                ]

        diff_wl = np_waterlevel - np_waterlevel_lower
        diff_wl[np.isnan(diff_wl)] = mv
        np_waterlevel_lower[np.isnan(np_waterlevel_lower)] = mv

        pcr_diff_wl = pcr.numpy2pcr(pcr.Scalar, diff_wl, mv)
        pcr_wl_lower = pcr.numpy2pcr(pcr.Scalar, np_waterlevel_lower, mv)

        storage_start = pcr.ifthenelse(
            ResStorFunc == 1,
            ResArea * waterlevel,
            lookupResFunc(ReserVoirLocs, waterlevel, sh, "0-1"),
        )

        outflow = pcr.ifthenelse(
            ResOutflowFunc == 1,
            lookupResRegMatr(ReserVoirLocs, waterlevel, hq, JDOY),
            pcr.ifthenelse(
                pcr_diff_wl >= 0,
                pcr.max(res_b * (waterlevel - ResThreshold) ** res_e, 0),
                pcr.min(-1 * res_b * (pcr_wl_lower - ResThreshold) ** res_e, 0),
            ),
        )

        np_outflow = pcr.pcr2numpy(outflow, np.nan)
        np_outflow_linked = np_reslocs * 0.0

        with np.errstate(invalid="ignore"):
            if np_outflow[np_outflow < 0] is not None:
                np_outflow_linked[
                    np.in1d(np_reslocs, np_linkedreslocs[np_outflow < 0]).reshape(
                        np_linkedreslocs.shape
                    )
                ] = np_outflow[np_outflow < 0]

        outflow_linked = pcr.numpy2pcr(pcr.Scalar, np_outflow_linked, 0.0)

        fl_nr_loop = float(nr_loop)
        storage = (
            storage_start
            + (inflow * timestepsecs / fl_nr_loop)
            + (prec_av / fl_nr_loop / 1000.0) * ResArea
            - (pet_av / fl_nr_loop / 1000.0) * ResArea
            - (pcr.cover(outflow, 0.0) * timestepsecs / fl_nr_loop)
            + (pcr.cover(outflow_linked, 0.0) * timestepsecs / fl_nr_loop)
        )

        waterlevel = pcr.ifthenelse(
            ResStorFunc == 1,
            waterlevel + (storage - storage_start) / ResArea,
            lookupResFunc(ReserVoirLocs, storage, sh, "1-0"),
        )

        np_outflow_nz = np_outflow * 0.0
        with np.errstate(invalid="ignore"):
            np_outflow_nz[np_outflow > 0] = np_outflow[np_outflow > 0]
        _outflow.append(np_outflow_nz)

    outflow_av_temp = np.average(_outflow, 0)
    outflow_av_temp[np.isnan(outflow_av_temp)] = mv
    outflow_av = pcr.numpy2pcr(pcr.Scalar, outflow_av_temp, mv)

    return waterlevel, outflow_av, prec_av, pet_av, storage
Exemple #20
0
def simplereservoir(
    storage,
    inflow,
    ResArea,
    maxstorage,
    target_perc_full,
    maximum_Q,
    demand,
    minimum_full_perc,
    ReserVoirLocs,
    precip,
    pet,
    ReservoirSimpleAreas,
    timestepsecs=86400,
):
    """

    :param storage: initial storage m^3
    :param inflow: inflow m^3/s
    :param maxstorage: maximum storage (above which water is spilled) m^3
    :param target_perc_full: target fraction full (of max storage) -
    :param maximum_Q: maximum Q to release m^3/s if below spillway
    :param demand: water demand (all combined) m^3/s
    :param minimum_full_perc: target minimum full fraction (of max storage) -
    :param ReserVoirLocs: map with reservoir locations
    :param timestepsecs: timestep of the model in seconds (default = 86400)
    :return: storage (m^3), outflow (m^3/s), PercentageFull (0-1), Release (m^3/sec)
    """

    inflow = pcr.ifthen(pcr.boolean(ReserVoirLocs), inflow)

    prec_av = pcr.cover(
        pcr.ifthen(
            pcr.boolean(ReserVoirLocs), pcr.areaaverage(precip, ReservoirSimpleAreas)
        ),
        pcr.scalar(0.0),
    )
    pet_av = pcr.cover(
        pcr.ifthen(
            pcr.boolean(ReserVoirLocs), pcr.areaaverage(pet, ReservoirSimpleAreas)
        ),
        pcr.scalar(0.0),
    )

    oldstorage = storage
    storage = (
        storage
        + (inflow * timestepsecs)
        + (prec_av / 1000.0) * ResArea
        - (pet_av / 1000.0) * ResArea
    )

    percfull = ((storage + oldstorage) * 0.5) / maxstorage
    # first determine minimum (environmental) flow using a simple sigmoid curve to scale for target level
    fac = sCurve(percfull, a=minimum_full_perc, c=30.0)
    demandRelease = pcr.min(fac * demand * timestepsecs, storage)
    storage = storage - demandRelease

    # Re-determine percfull
    percfull = ((storage + oldstorage) * 0.5) / maxstorage

    wantrel = pcr.max(0.0, storage - (maxstorage * target_perc_full))
    # Assume extra maximum Q if spilling
    overflowQ = (percfull - 1.0) * (storage - maxstorage)
    torelease = pcr.min(wantrel, overflowQ + maximum_Q * timestepsecs)
    storage = storage - torelease
    outflow = (torelease + demandRelease) / timestepsecs
    percfull = storage / maxstorage

    return storage, outflow, percfull, prec_av, pet_av, demandRelease / timestepsecs
                                      pcr.max(0.0, lake_reservoir_volume - reservoir_capacity), 0.0)
     #~ pcr.aguila(lake_reservoir_overbank_volume)
     #
     # transfer 75% of overbank volume to the downstream (several cells downstream)
     transfer_to_downstream = pcr.cover(\
                              pcr.ifthen(pcr.scalar(water_body_outlet) > 0., lake_reservoir_overbank_volume * 0.50), 0.0)
     transfer_to_downstream = pcr.upstream(ldd_map_low_resolution, transfer_to_downstream)
     transfer_to_downstream = pcr.upstream(ldd_map_low_resolution, transfer_to_downstream)
     transfer_to_downstream = pcr.upstream(ldd_map_low_resolution, transfer_to_downstream)
     extreme_value_map      = transfer_to_downstream + \
                              pcr.ifthenelse(pcr.cover(pcr.scalar(water_body_id), 0.0) > 0.00, 0.00, extreme_value_map) 
     #
     # the remaining overbank volume (50%) will be distributed to the shores
     lake_reservoir_overbank_volume = lake_reservoir_overbank_volume * 0.50                         
     land_area = cell_area * pcr.max(0.0, 1.0 - fracwat)
     land_area_average = pcr.areaaverage(land_area, water_body_id) 
     land_area_weight  = pcr.ifthenelse( land_area < land_area_average, 0.0, land_area_average)
     distributed_lake_reservoir_overbank_volume = pcr.cover(\
                                                  lake_reservoir_overbank_volume * land_area_weight / pcr.max(0.00, pcr.areatotal(land_area_weight, water_body_id)), 0.0)
     extreme_value_map = pcr.ifthenelse(pcr.cover(pcr.scalar(water_body_id), 0.0) > 0.00, distributed_lake_reservoir_overbank_volume, extreme_value_map)
 #
 # - cover the rests to zero (so they will not contribute to any flood/inundation)
 extreme_value_map = pcr.cover(extreme_value_map, 0.0)
 #
 # - make sure that we have positive extreme values - this is not necessary, but to make sure
 extreme_value_map = pcr.max(extreme_value_map, 0.0)
 #
 # - make sure that extreme value maps increasing over return period - this is not necessary, but to make sure
 if i_file >  0: extreme_value_map = pcr.max(previous_return_period_map, extreme_value_map) 
 previous_return_period_map = extreme_value_map
 #
class TimeoutputTimeseries(object):
    """
  Class to create pcrcalc timeoutput style timeseries
  """
    def __init__(self, tssFilename, model, idMap=None, noHeader=False):
        """

    """

        if not isinstance(tssFilename, str):
            raise Exception(
                "timeseries output filename must be of type string")

        self._outputFilename = tssFilename
        self._maxId = 1
        self._spatialId = None
        self._spatialDatatype = None
        self._spatialIdGiven = False
        self._userModel = model
        self._writeHeader = not noHeader
        # array to store the timestep values
        self._sampleValues = None

        _idMap = False
        if isinstance(idMap, str) or isinstance(idMap,
                                                pcraster._pcraster.Field):
            _idMap = True

        nrRows = self._userModel.nrTimeSteps() - self._userModel.firstTimeStep(
        ) + 1

        if _idMap:
            self._spatialId = idMap
            if isinstance(idMap, str):
                self._spatialId = pcraster.readmap(idMap)

            _allowdDataTypes = [
                pcraster.Nominal, pcraster.Ordinal, pcraster.Boolean
            ]
            if self._spatialId.dataType() not in _allowdDataTypes:
                raise Exception(
                    "idMap must be of type Nominal, Ordinal or Boolean")

            if self._spatialId.isSpatial():
                self._maxId, valid = pcraster.cellvalue(
                    pcraster.mapmaximum(pcraster.ordinal(self._spatialId)), 1)
            else:
                self._maxId = 1

            # cell indices of the sample locations
            self._sampleAddresses = []
            for cellId in range(1, self._maxId + 1):
                self._sampleAddresses.append(self._getIndex(cellId))

            self._spatialIdGiven = True
            nrCols = self._maxId
            self._sampleValues = [[Decimal("NaN")] * nrCols
                                  for _ in [0] * nrRows]
        else:
            self._sampleValues = [[Decimal("NaN")] * 1 for _ in [0] * nrRows]

    def _getIndex(self, cellId):
        """
    returns the cell index of a sample location
    """
        nrCells = pcraster.clone().nrRows() * pcraster.clone().nrCols()
        found = False
        cell = 1
        index = 0

        while found == False:
            if pcraster.cellvalue(self._spatialId,
                                  cell)[1] == True and pcraster.cellvalue(
                                      self._spatialId, cell)[0] == cellId:
                index = cell
                found = True
            cell += 1
            if cell > nrCells:
                raise RuntimeError(
                    "could not find a cell with the index number %d" %
                    (cellId))

        return index

    def sample(self, expression):
        """
    Sampling the current values of 'expression' at the given locations for the current timestep
    """

        arrayRowPos = self._userModel.currentTimeStep(
        ) - self._userModel.firstTimeStep()

        #if isinstance(expression, float):
        #  expression = pcraster.scalar(expression)

        try:
            # store the data type for tss file header
            if self._spatialDatatype == None:
                self._spatialDatatype = str(expression.dataType())
        except AttributeError, e:
            datatype, sep, tail = str(e).partition(" ")
            msg = "Argument must be a PCRaster map, type %s given. If necessary use data conversion functions like scalar()" % (
                datatype)
            raise AttributeError(msg)

        if self._spatialIdGiven:
            if expression.dataType() == pcraster.Scalar or expression.dataType(
            ) == pcraster.Directional:
                tmp = pcraster.areaaverage(pcraster.spatial(expression),
                                           pcraster.spatial(self._spatialId))
            else:
                tmp = pcraster.areamajority(pcraster.spatial(expression),
                                            pcraster.spatial(self._spatialId))

            col = 0
            for cellIndex in self._sampleAddresses:
                value, valid = pcraster.cellvalue(tmp, cellIndex)
                if not valid:
                    value = Decimal("NaN")

                self._sampleValues[arrayRowPos][col] = value
                col += 1
        else:
            if expression.dataType() == pcraster.Scalar or expression.dataType(
            ) == pcraster.Directional:
                tmp = pcraster.maptotal(pcraster.spatial(expression))\
                      / pcraster.maptotal(pcraster.scalar(pcraster.defined(pcraster.spatial(expression))))
            else:
                tmp = pcraster.mapmaximum(pcraster.maptotal(pcraster.areamajority(pcraster.spatial(expression),\
                      pcraster.spatial(pcraster.nominal(1)))))

            value, valid = pcraster.cellvalue(tmp, 1)
            if not valid:
                value = Decimal("NaN")

            self._sampleValues[arrayRowPos] = value

        if self._userModel.currentTimeStep() == self._userModel.nrTimeSteps():
            self._writeTssFile()
Exemple #23
0
    def set_river_package(self, discharge, currTimeStep):

        logger.info("Set the river package.")
        
        # - surface water river bed/bottom elevation and conductance 
        need_to_define_surface_water_bed = False
        if currTimeStep == None:
            # this is for a steady state simulation (no currTimeStep define)
            need_to_define_surface_water_bed = True
        else:    
            # only at the first month of the model simulation or the first month of the year
            if self.firstMonthOfSimulation or currTimeStep.month == 1:
                need_to_define_surface_water_bed = True
                self.firstMonthOfSimulation = False          # This part becomes False as we don't need it anymore. 

        if need_to_define_surface_water_bed:

            logger.info("Estimating the surface water bed elevation and surface water bed conductance.")
        
            #~ # - for lakes and resevoirs, alternative 1: make the bottom elevation deep --- Shall we do this? 
            #~ additional_depth = 500.
            #~ surface_water_bed_elevation = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
                                                     #~ self.dem_riverbed - additional_depth)
            #
            # - for lakes and resevoirs, estimate bed elevation from dem and bankfull depth
            surface_water_bed_elevation  = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, self.dem_average)
            surface_water_bed_elevation  = pcr.areaaverage(surface_water_bed_elevation, self.WaterBodies.waterBodyIds)
            surface_water_bed_elevation -= pcr.areamaximum(self.bankfull_depth, self.WaterBodies.waterBodyIds) 
            #
            surface_water_bed_elevation  = pcr.cover(surface_water_bed_elevation, self.dem_riverbed)
            #~ surface_water_bed_elevation = self.dem_riverbed # This is an alternative, if we do not want to introduce very deep bottom elevations of lakes and/or reservoirs.   
            #
            # rounding values for surface_water_bed_elevation
            self.surface_water_bed_elevation = pcr.roundup(surface_water_bed_elevation * 1000.)/1000.
            #
            # - river bed condutance (unit: m2/day)
            bed_surface_area = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
                                                     self.WaterBodies.fracWat * self.cellAreaMap)   # TODO: Incorporate the concept of dynamicFracWat # I have problem with the convergence if I use this one. 
            bed_surface_area = pcr.min(bed_surface_area,\
                               pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
                                          pcr.areaaverage(self.bankfull_width * self.channelLength, self.WaterBodies.waterBodyIds)))
            bed_surface_area = pcr.cover(bed_surface_area, \
                                         self.bankfull_width * self.channelLength)
            #~ bed_surface_area = self.bankfull_width * self.channelLength
            bed_conductance = (1.0/self.bed_resistance) * bed_surface_area
            bed_conductance = pcr.ifthenelse(bed_conductance < 1e-20, 0.0, \
                                             bed_conductance) 
            self.bed_conductance = pcr.cover(bed_conductance, 0.0)
             

            logger.info("Estimating outlet widths of lakes and/or reservoirs.")
            # - 'channel width' for lakes and reservoirs 
            channel_width = pcr.areamaximum(self.bankfull_width, self.WaterBodies.waterBodyIds)
            self.channel_width = pcr.cover(channel_width, self.bankfull_width)
        

        logger.info("Estimating surface water elevation.")
        
        # - convert discharge value to surface water elevation (m)
        river_water_height = (self.channel_width**(-3/5)) * (discharge**(3/5)) * ((self.gradient)**(-3/10)) *(self.manningsN**(3/5))
        surface_water_elevation = self.dem_riverbed + \
                                  river_water_height
        #
        # - calculating water level (unit: m) above the flood plain   # TODO: Improve this concept (using Rens's latest innundation scheme) 
        #----------------------------------------------------------
        water_above_fpl  = pcr.max(0.0, surface_water_elevation - self.dem_floodplain)  # unit: m, water level above the floodplain (not distributed)
        water_above_fpl *= self.bankfull_depth * self.bankfull_width / self.cellAreaMap  # unit: m, water level above the floodplain (distributed within the cell)
        # TODO: Improve this concept using Rens's latest scheme
        #
        # - corrected surface water elevation
        surface_water_elevation = pcr.ifthenelse(surface_water_elevation > self.dem_floodplain, \
                                                                           self.dem_floodplain + water_above_fpl, \
                                                                           surface_water_elevation)
        # - surface water elevation for lakes and reservoirs:
        lake_reservoir_water_elevation = pcr.ifthen(self.WaterBodies.waterBodyOut, surface_water_elevation)
        lake_reservoir_water_elevation = pcr.areamaximum(lake_reservoir_water_elevation, self.WaterBodies.waterBodyIds)
        lake_reservoir_water_elevation = pcr.cover(lake_reservoir_water_elevation, \
                                         pcr.areaaverage(surface_water_elevation, self.WaterBodies.waterBodyIds))
        # - maximum and minimum values for lake_reservoir_water_elevation
        lake_reservoir_water_elevation = pcr.min(self.dem_floodplain, lake_reservoir_water_elevation)
        lake_reservoir_water_elevation = pcr.max(self.surface_water_bed_elevation, lake_reservoir_water_elevation)
        # - smoothing
        lake_reservoir_water_elevation = pcr.areaaverage(surface_water_elevation, self.WaterBodies.waterBodyIds)
        # 
        # - merge lake and reservoir water elevation
        surface_water_elevation = pcr.cover(lake_reservoir_water_elevation, surface_water_elevation)
        #
        # - covering the missing values and rounding
        surface_water_elevation = pcr.cover(surface_water_elevation, self.surface_water_bed_elevation)
        surface_water_elevation = pcr.rounddown(surface_water_elevation * 1000.)/1000.
        #
        # - make sure that HRIV >= RBOT ; no infiltration if HRIV = RBOT (and h < RBOT)  
        self.surface_water_elevation = pcr.max(surface_water_elevation, self.surface_water_bed_elevation)
        #
        # - pass the values to the RIV package 
        self.pcr_modflow.setRiver(self.surface_water_elevation, self.surface_water_bed_elevation, self.bed_conductance, 1)
Exemple #24
0
# make sure that the clone of input DEM is consistent with the aforementioned clone:
ids_3sec = output_file
dem_3sec = output_folder + "/" + tile_code + ".map"
cmd = "mapattr -c " + ids_3sec + " " + dem_3sec
print(cmd)
os.system(cmd)
# - check
cmd = "mapattr -p " + ids_3sec + " " + dem_3sec
print(cmd)
os.system(cmd)

# do the upscaling/averaging from 3 arc second DEM to 30 arc second values:
pcr.setclone(ids_3sec)
msg = "Upscaling in progress for the tile " + tile_code
print(msg)
dem_30sec = pcr.areaaverage(dem_3sec, ids_3sec)
output_file = output_folder + "/" + tile_code + ".30sec.3sec.map"
pcr.report(dem_30sec, output_file)
# - The cell size will be still 3 arc second.

# then resample (using gdalwarp) to 30 arc second file:
input_file = output_file
output_file = output_folder + "/" + tile_code + ".30sec.tif"
cmd = "gdalwarp -tr " + cellsize_30sec + " " + cellsize_30sec + " " + input_file + " " + output_file
print(cmd)
os.system(cmd)
# - this is still a tif file

# convert it to pcraster
input_file = output_file
output_file = output_folder + "/" + tile_code + ".30sec.map"
Exemple #25
0
    def getParameterFiles(
        self, currTimeStep, cellArea, ldd, initial_condition_dictionary=None
    ):

        # parameters for Water Bodies: fracWat
        #                              waterBodyIds
        #                              waterBodyOut
        #                              waterBodyArea
        #                              waterBodyTyp
        #                              waterBodyCap

        # cell surface area (m2) and ldd
        self.cellArea = cellArea
        ldd = pcr.ifthen(self.landmask, ldd)

        # date used for accessing/extracting water body information
        date_used = currTimeStep.fulldate
        year_used = currTimeStep.year
        if self.onlyNaturalWaterBodies == True:
            date_used = self.dateForNaturalCondition
            year_used = self.dateForNaturalCondition[0:4]

        # fracWat = fraction of surface water bodies (dimensionless)
        self.fracWat = pcr.scalar(0.0)

        if self.useNetCDF:
            self.fracWat = vos.netcdf2PCRobjClone(
                self.ncFileInp,
                "fracWaterInp",
                date_used,
                useDoy="yearly",
                cloneMapFileName=self.cloneMap,
            )
        else:
            self.fracWat = vos.readPCRmapClone(
                self.fracWaterInp + str(year_used) + ".map",
                self.cloneMap,
                self.tmpDir,
                self.inputDir,
            )

        self.fracWat = pcr.cover(self.fracWat, 0.0)
        self.fracWat = pcr.max(0.0, self.fracWat)
        self.fracWat = pcr.min(1.0, self.fracWat)

        self.waterBodyIds = pcr.nominal(0)  # waterBody ids
        self.waterBodyOut = pcr.boolean(0)  # waterBody outlets
        self.waterBodyArea = pcr.scalar(0.0)  # waterBody surface areas

        # water body ids
        if self.useNetCDF:
            self.waterBodyIds = vos.netcdf2PCRobjClone(
                self.ncFileInp,
                "waterBodyIds",
                date_used,
                useDoy="yearly",
                cloneMapFileName=self.cloneMap,
            )
        else:
            self.waterBodyIds = vos.readPCRmapClone(
                self.waterBodyIdsInp + str(year_used) + ".map",
                self.cloneMap,
                self.tmpDir,
                self.inputDir,
                False,
                None,
                True,
            )
        #
        self.waterBodyIds = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0.0, pcr.nominal(self.waterBodyIds)
        )

        # water body outlets (correcting outlet positions)
        wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd)
        self.waterBodyOut = pcr.ifthen(
            wbCatchment == pcr.areamaximum(wbCatchment, self.waterBodyIds),
            self.waterBodyIds,
        )  # = outlet ids
        self.waterBodyOut = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0.0, self.waterBodyOut
        )
        # TODO: Please also consider endorheic lakes!

        # correcting water body ids
        self.waterBodyIds = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0.0,
            pcr.subcatchment(ldd, self.waterBodyOut),
        )

        # boolean map for water body outlets:
        self.waterBodyOut = pcr.ifthen(
            pcr.scalar(self.waterBodyOut) > 0.0, pcr.boolean(1)
        )

        # reservoir surface area (m2):
        if self.useNetCDF:
            resSfArea = (
                1000.0
                * 1000.0
                * vos.netcdf2PCRobjClone(
                    self.ncFileInp,
                    "resSfAreaInp",
                    date_used,
                    useDoy="yearly",
                    cloneMapFileName=self.cloneMap,
                )
            )
        else:
            resSfArea = (
                1000.0
                * 1000.0
                * vos.readPCRmapClone(
                    self.resSfAreaInp + str(year_used) + ".map",
                    self.cloneMap,
                    self.tmpDir,
                    self.inputDir,
                )
            )
        resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds)
        resSfArea = pcr.cover(resSfArea, 0.0)

        # water body surface area (m2): (lakes and reservoirs)
        self.waterBodyArea = pcr.max(
            pcr.areatotal(
                pcr.cover(self.fracWat * self.cellArea, 0.0), self.waterBodyIds
            ),
            pcr.areaaverage(pcr.cover(resSfArea, 0.0), self.waterBodyIds),
        )
        self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.0, self.waterBodyArea)

        # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0)
        self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0.0, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(
            pcr.boolean(self.waterBodyIds), self.waterBodyOut
        )

        # water body types:
        # - 2 = reservoirs (regulated discharge)
        # - 1 = lakes (weirFormula)
        # - 0 = non lakes or reservoirs (e.g. wetland)
        self.waterBodyTyp = pcr.nominal(0)

        if self.useNetCDF:
            self.waterBodyTyp = vos.netcdf2PCRobjClone(
                self.ncFileInp,
                "waterBodyTyp",
                date_used,
                useDoy="yearly",
                cloneMapFileName=self.cloneMap,
            )
        else:
            self.waterBodyTyp = vos.readPCRmapClone(
                self.waterBodyTypInp + str(year_used) + ".map",
                self.cloneMap,
                self.tmpDir,
                self.inputDir,
                False,
                None,
                True,
            )

        # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs
        #
        self.waterBodyTyp = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0, pcr.nominal(self.waterBodyTyp)
        )
        self.waterBodyTyp = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0, pcr.nominal(self.waterBodyTyp)
        )
        self.waterBodyTyp = pcr.areamajority(
            self.waterBodyTyp, self.waterBodyIds
        )  # choose only one type: either lake or reservoir
        self.waterBodyTyp = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0, pcr.nominal(self.waterBodyTyp)
        )
        self.waterBodyTyp = pcr.ifthen(
            pcr.boolean(self.waterBodyIds), self.waterBodyTyp
        )

        # correcting lakes and reservoirs ids and outlets
        self.waterBodyIds = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds
        )
        self.waterBodyOut = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut
        )

        # reservoir maximum capacity (m3):
        self.resMaxCap = pcr.scalar(0.0)
        self.waterBodyCap = pcr.scalar(0.0)

        if self.useNetCDF:
            self.resMaxCap = (
                1000.0
                * 1000.0
                * vos.netcdf2PCRobjClone(
                    self.ncFileInp,
                    "resMaxCapInp",
                    date_used,
                    useDoy="yearly",
                    cloneMapFileName=self.cloneMap,
                )
            )
        else:
            self.resMaxCap = (
                1000.0
                * 1000.0
                * vos.readPCRmapClone(
                    self.resMaxCapInp + str(year_used) + ".map",
                    self.cloneMap,
                    self.tmpDir,
                    self.inputDir,
                )
            )

        self.resMaxCap = pcr.ifthen(self.resMaxCap > 0, self.resMaxCap)
        self.resMaxCap = pcr.areaaverage(self.resMaxCap, self.waterBodyIds)

        # water body capacity (m3): (lakes and reservoirs)
        self.waterBodyCap = pcr.cover(
            self.resMaxCap, 0.0
        )  # Note: Most of lakes have capacities > 0.
        self.waterBodyCap = pcr.ifthen(
            pcr.boolean(self.waterBodyIds), self.waterBodyCap
        )

        # correcting water body types:                                  # Reservoirs that have zero capacities will be assumed as lakes.
        self.waterBodyTyp = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0.0, self.waterBodyTyp
        )
        self.waterBodyTyp = pcr.ifthenelse(
            self.waterBodyCap > 0.0,
            self.waterBodyTyp,
            pcr.ifthenelse(
                pcr.scalar(self.waterBodyTyp) == 2, pcr.nominal(1), self.waterBodyTyp
            ),
        )

        # final corrections:
        self.waterBodyTyp = pcr.ifthen(
            self.waterBodyArea > 0.0, self.waterBodyTyp
        )  # make sure that all lakes and/or reservoirs have surface areas
        self.waterBodyTyp = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0.0, self.waterBodyTyp
        )  # make sure that only types 1 and 2 will be considered in lake/reservoir functions
        self.waterBodyIds = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0.0, self.waterBodyIds
        )  # make sure that all lakes and/or reservoirs have ids
        self.waterBodyOut = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0.0, self.waterBodyOut
        )  # make sure that all lakes and/or reservoirs have outlets

        # for a natural run (self.onlyNaturalWaterBodies == True)
        # which uses only the year 1900, assume all reservoirs are lakes
        if (
            self.onlyNaturalWaterBodies == True
            and date_used == self.dateForNaturalCondition
        ):
            logger.info(
                "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes."
            )
            self.waterBodyTyp = pcr.ifthen(
                pcr.scalar(self.waterBodyTyp) > 0.0, pcr.nominal(1)
            )

        # check that all lakes and/or reservoirs have types, ids, surface areas and outlets:
        test = (
            pcr.defined(self.waterBodyTyp)
            & pcr.defined(self.waterBodyArea)
            & pcr.defined(self.waterBodyIds)
            & pcr.boolean(
                pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds)
            )
        )
        a, b, c = vos.getMinMaxMean(pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0))
        threshold = 1e-3
        if abs(a) > threshold or abs(b) > threshold:
            logger.warning("Missing information in some lakes and/or reservoirs.")

        # at the beginning of simulation period (timeStepPCR = 1)
        # - we have to define/get the initial conditions
        #
        if currTimeStep.timeStepPCR == 1:
            self.getICs(initial_condition_dictionary)

        # For each new reservoir (introduced at the beginning of the year)
        # initiating storage, average inflow and outflow
        #
        self.waterBodyStorage = pcr.cover(self.waterBodyStorage, 0.0)
        self.avgInflow = pcr.cover(self.avgInflow, 0.0)
        self.avgOutflow = pcr.cover(self.avgOutflow, 0.0)

        # cropping only in the landmask region:
        self.fracWat = pcr.ifthen(self.landmask, self.fracWat)
        self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut)
        self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea)
        self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp)
        self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap)
        self.waterBodyStorage = pcr.ifthen(self.landmask, self.waterBodyStorage)
        self.avgInflow = pcr.ifthen(self.landmask, self.avgInflow)
        self.avgOutflow = pcr.ifthen(self.landmask, self.avgOutflow)
Exemple #26
0
def main():
    """
        
    :ivar masterdem: digital elevation model
    :ivar dem: digital elevation model
    :ivar river: optional river map
    """

    # Default values
    strRiver = 8
    masterdem = "dem.map"
    step1dir = "step1"
    step2dir = "step2"
    workdir = "."
    inifile = "wflow_prepare.ini"
    recreate = False
    snapgaugestoriver = False

    try:
        opts, args = getopt.getopt(sys.argv[1:], "W:hI:f",['version'])
    except getopt.error as msg:
        usage(msg)

    for o, a in opts:
        if o == "-W":
            workdir = a
        if o == "-I":
            inifile = a
        if o == "-h":
            usage()
        if o == "-f":
            recreate = True
        if o == "--version":
            import wflow
            print("wflow version: ", wflow.__version__)
            sys.exit(0)

    pcr.setglobaloption("unitcell")
    os.chdir(workdir)

    config = OpenConf(workdir + "/" + inifile)

    masterdem = configget(config, "files", "masterdem", "dem.map")
    pcr.setclone(masterdem)

    strRiver = int(configget(config, "settings", "riverorder", "4"))

    try:
        gauges_x = config.get("settings", "gauges_x")
        gauges_y = config.get("settings", "gauges_y")
    except:
        print("gauges_x and  gauges_y are required entries in the ini file")
        sys.exit(1)

    step1dir = configget(config, "directories", "step1dir", "step1")
    step2dir = configget(config, "directories", "step2dir", "step2")
    # upscalefactor = float(config.get("settings","upscalefactor"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35")
    )
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(configget(config, "settings", "lddoutflowdepth", "1E35"))

    initialscale = int(configget(config, "settings", "initialscale", "1"))
    csize = float(configget(config, "settings", "cellsize", "1"))

    snapgaugestoriver = bool(
        int(configget(config, "settings", "snapgaugestoriver", "1"))
    )
    lddglobaloption = configget(config, "settings", "lddglobaloption", "lddout")
    pcr.setglobaloption(lddglobaloption)
    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    # X/Y coordinates of the gauges the system    
    X = np.fromstring(gauges_x, sep=',')
    Y = np.fromstring(gauges_y, sep=',')

    tr.Verbose = 1

    # make the directories to save results in
    if not os.path.isdir(step1dir + "/"):
        os.makedirs(step1dir)
    if not os.path.isdir(step2dir):
        os.makedirs(step2dir)

    if initialscale > 1:
        print("Initial scaling of DEM...")
        os.system(
            "resample -r "
            + str(initialscale)
            + " "
            + masterdem
            + " "
            + step1dir
            + "/dem_scaled.map"
        )
        print("Reading dem...")
        dem = pcr.readmap(step1dir + "/dem_scaled.map")
        ldddem = dem
    else:
        print ("Reading dem...")
        dem = pcr.readmap(masterdem)
        ldddem = dem

    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask...")
    else:
        print("clipping DEM with mask.....")
        mask = pcr.readmap(catchmask)
        ldddem = pcr.ifthen(pcr.boolean(mask), ldddem)
        dem = pcr.ifthen(pcr.boolean(mask), dem)

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        outletpointX = float(configget(config, "settings", "outflowpointX", "0.0"))
        outletpointY = float(configget(config, "settings", "outflowpointY", "0.0"))
    else:
        print("river file specified.....")
        try:
            outletpointX = float(configget(config, "settings", "outflowpointX", "0.0"))
            outletpointY = float(configget(config, "settings", "outflowpointY", "0.0"))
        except:
            print(
                "Need to specify the river outletpoint (a point at the end of the river within the current map)"
            )
            exit(1)

        outletpointmap = tr.points_to_map(dem, outletpointX, outletpointY, 0.5)
        pcr.report(outletpointmap, step1dir + "/outletpoint.map")
        # rivshpattr = config.get("files","riverattr")
        pcr.report(dem * 0.0, step1dir + "/nilmap.map")
        thestr = (
            "gdal_translate -of GTiff "
            + step1dir
            + "/nilmap.map "
            + step1dir
            + "/riverburn.tif"
        )
        os.system(thestr)
        rivshpattr = os.path.splitext(os.path.basename(rivshp))[0]
        os.system(
            "gdal_rasterize -burn 1 -l "
            + rivshpattr
            + " "
            + rivshp
            + " "
            + step1dir
            + "/riverburn.tif"
        )
        thestr = (
            "gdal_translate -of PCRaster "
            + step1dir
            + "/riverburn.tif "
            + step1dir
            + "/riverburn.map"
        )
        os.system(thestr)
        riverburn = pcr.readmap(step1dir + "/riverburn.map")
        # Determine regional slope assuming that is the way the river should run
        # Determine regional slope assuming that is the way the river should run
        # pcr.setglobaloption("unitcell")
        # demregional=pcr.windowaverage(dem,100)
        ldddem = pcr.ifthenelse(riverburn >= 1.0, dem - 1000, dem)

    pcr.setglobaloption("unittrue")
    upscalefactor = int(csize / pcr.celllength())

    print("Creating ldd...")
    ldd = tr.lddcreate_save(
        step1dir + "/ldd.map",
        ldddem,
        recreate,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    print("Determining streamorder...")
    stro = pcr.streamorder(ldd)
    pcr.report(stro, step1dir + "/streamorder.map")
    strdir = pcr.ifthen(stro >= strRiver, stro)
    pcr.report(strdir, step1dir + "/streamorderrive.map")
    pcr.report(pcr.boolean(pcr.ifthen(stro >= strRiver, stro)), step1dir + "/rivers.map")

    pcr.setglobaloption("unittrue")
    # outlet (and other gauges if given)
    # TODO: check is x/y set if not skip this
    print("Outlet...")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)

    if snapgaugestoriver:
        print("Snapping gauges to nearest river cells...")
        pcr.report(outlmap, step1dir + "/orggauges.map")
        outlmap = tr.snaptomap(outlmap, strdir)

    # noutletmap = tr.points_to_map(dem,XX,YY,0.5)
    # pcr.report(noutletmap,'noutlet.map')

    pcr.report(outlmap, step1dir + "/gauges.map")

    # check if there is a pre-define catchment map
    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask, finding outlet")
        # Find catchment (overall)
        outlet = tr.find_outlet(ldd)
        sub = tr.subcatch(ldd, outlet)
        pcr.report(sub, step1dir + "/catchment_overall.map")
    else:
        print("reading and converting catchment mask.....")
        os.system(
            "resample -r "
            + str(initialscale)
            + " "
            + catchmask
            + " "
            + step1dir
            + "/catchment_overall.map"
        )
        sub = pcr.readmap(step1dir + "/catchment_overall.map")

    print("Scatch...")
    sd = tr.subcatch(ldd, pcr.ifthen(outlmap > 0, outlmap))
    pcr.report(sd, step1dir + "/scatch.map")

    pcr.setglobaloption("unitcell")
    print("Upscalefactor: " + str(upscalefactor))

    if upscalefactor > 1:
        gc.collect()
        print("upscale river length1 (checkerboard map)...")
        ck = tr.checkerboard(dem, upscalefactor)
        pcr.report(ck, step1dir + "/ck.map")
        pcr.report(dem, step1dir + "/demck.map")
        print("upscale river length2...")
        fact = tr.area_riverlength_factor(ldd, ck, upscalefactor)
        pcr.report(fact, step1dir + "/riverlength_fact.map")

        # print("make dem statistics...")
        dem_ = pcr.areaaverage(dem, ck)
        pcr.report(dem_, step1dir + "/demavg.map")

        print("Create DEM statistics...")
        dem_ = pcr.areaminimum(dem, ck)
        pcr.report(dem_, step1dir + "/demmin.map")
        dem_ = pcr.areamaximum(dem, ck)
        pcr.report(dem_, step1dir + "/demmax.map")
        # calculate percentiles
        order = pcr.areaorder(dem, ck)
        n = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), ck)
        #: calculate 25 percentile
        perc = tr.area_percentile(dem, ck, n, order, 25.0)
        pcr.report(perc, step1dir + "/dem25.map")
        perc = tr.area_percentile(dem, ck, n, order, 10.0)
        pcr.report(perc, step1dir + "/dem10.map")
        perc = tr.area_percentile(dem, ck, n, order, 50.0)
        pcr.report(perc, step1dir + "/dem50.map")
        perc = tr.area_percentile(dem, ck, n, order, 33.0)
        pcr.report(perc, step1dir + "/dem33.map")
        perc = tr.area_percentile(dem, ck, n, order, 66.0)
        pcr.report(perc, step1dir + "/dem66.map")
        perc = tr.area_percentile(dem, ck, n, order, 75.0)
        pcr.report(perc, step1dir + "/dem75.map")
        perc = tr.area_percentile(dem, ck, n, order, 90.0)
        pcr.report(perc, step1dir + "/dem90.map")
    else:
        print("No fancy scaling done. Going strait to step2....")
        pcr.report(dem, step1dir + "/demavg.map")
        Xul = float(config.get("settings", "Xul"))
        Yul = float(config.get("settings", "Yul"))
        Xlr = float(config.get("settings", "Xlr"))
        Ylr = float(config.get("settings", "Ylr"))
        gdalstr = (
            "gdal_translate  -projwin "
            + str(Xul)
            + " "
            + str(Yul)
            + " "
            + str(Xlr)
            + " "
            + str(Ylr)
            + " -of PCRaster  "
        )
        # gdalstr = "gdal_translate  -a_ullr " + str(Xul) + " " + str(Yul) + " " +str(Xlr) + " " +str(Ylr) + " -of PCRaster  "
        print(gdalstr)
        pcr.report(pcr.cover(1.0), step1dir + "/wflow_riverlength_fact.map")
        # Now us gdat tp convert the maps
        os.system(
            gdalstr
            + step1dir
            + "/wflow_riverlength_fact.map"
            + " "
            + step2dir
            + "/wflow_riverlength_fact.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_dem.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmin.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmax.map"
        )
        os.system(
            gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_gauges.map"
        )
        os.system(
            gdalstr + step1dir + "/rivers.map" + " " + step2dir + "/wflow_river.map"
        )
        os.system(
            gdalstr
            + step1dir
            + "/streamorder.map"
            + " "
            + step2dir
            + "/wflow_streamorder.map"
        )
        os.system(
            gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_outlet.map"
        )
        os.system(
            gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_catchment.map"
        )
        os.system(gdalstr + step1dir + "/ldd.map" + " " + step2dir + "/wflow_ldd.map")
        os.system(
            gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_subcatch.map"
        )

        if lu_water:
            os.system(gdalstr + lu_water + " " + step2dir + "/WaterFrac.map")

        if lu_paved:
            os.system(gdalstr + lu_paved + " " + step2dir + "/PathFrac.map")

        try:
            lumap = config.get("files", "landuse")
        except:
            print("no landuse map...creating uniform map")
            # clone=pcr.readmap(step2dir + "/wflow_dem.map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_landuse.map")
        else:
            os.system(
                "resample --clone "
                + step2dir
                + "/wflow_dem.map "
                + lumap
                + " "
                + step2dir
                + "/wflow_landuse.map"
            )

        try:
            soilmap = config.get("files", "soil")
        except:
            print("no soil map..., creating uniform map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_soil.map")
        else:
            os.system(
                "resample --clone "
                + step2dir
                + "/wflow_dem.map "
                + soilmap
                + " "
                + step2dir
                + "/wflow_soil.map"
            )
Exemple #27
0
    def getParameterFiles(self, date_given, cellArea, ldd):

        # parameters for Water Bodies: fracWat
        #                              waterBodyIds
        #                              waterBodyOut
        #                              waterBodyArea
        #                              waterBodyTyp
        #                              waterBodyCap

        # cell surface area (m2) and ldd
        self.cellArea = cellArea
        ldd = pcr.ifthen(self.landmask, ldd)

        # date and year used for accessing/extracting water body information
        date_used = date_given
        if self.onlyNaturalWaterBodies == True:
            date_used = self.dateForNaturalCondition
        year_used = date_used[0:4]

        # fracWat = fraction of surface water bodies (dimensionless)
        self.fracWat = pcr.scalar(0.0)

        if self.useNetCDF:
            self.fracWat = vos.netcdf2PCRobjClone(self.ncFileInp,'fracWaterInp', \
                           date_used, useDoy = 'yearly',\
                           cloneMapFileName = self.cloneMap)
        self.fracWat = pcr.cover(self.fracWat, 0.0)
        self.fracWat = pcr.max(0.0, self.fracWat)
        self.fracWat = pcr.min(1.0, self.fracWat)

        self.waterBodyIds = pcr.nominal(0)  # waterBody ids
        self.waterBodyOut = pcr.boolean(0)  # waterBody outlets
        self.waterBodyArea = pcr.scalar(0.)  # waterBody surface areas

        # water body ids
        if self.useNetCDF:
            self.waterBodyIds = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyIds', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.nominal(self.waterBodyIds))

        # water body outlets (correcting outlet positions)
        wbCatchment = pcr.catchmenttotal(pcr.scalar(1), ldd)
        self.waterBodyOut = pcr.ifthen(wbCatchment ==\
                            pcr.areamaximum(wbCatchment, \
                            self.waterBodyIds),\
                            self.waterBodyIds)     # = outlet ids
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            self.waterBodyOut)
        # TODO: Please also consider endorheic lakes!

        # correcting water body ids
        self.waterBodyIds = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0.,\
                            pcr.subcatchment(ldd,self.waterBodyOut))

        # boolean map for water body outlets:
        self.waterBodyOut = pcr.ifthen(\
                            pcr.scalar(self.waterBodyOut) > 0.,\
                            pcr.boolean(1))

        # reservoir surface area (m2):
        if self.useNetCDF:
            resSfArea = 1000. * 1000. * \
                        vos.netcdf2PCRobjClone(self.ncFileInp,'resSfAreaInp', \
                        date_used, useDoy = 'yearly',\
                        cloneMapFileName = self.cloneMap)
        resSfArea = pcr.areaaverage(resSfArea, self.waterBodyIds)
        resSfArea = pcr.cover(resSfArea, 0.)

        # water body surface area (m2): (lakes and reservoirs)
        self.waterBodyArea = pcr.max(pcr.areatotal(\
                             pcr.cover(\
                             self.fracWat*self.cellArea, 0.0), self.waterBodyIds),
                             pcr.areaaverage(\
                             pcr.cover(resSfArea, 0.0) ,       self.waterBodyIds))
        self.waterBodyArea = pcr.ifthen(self.waterBodyArea > 0.,\
                             self.waterBodyArea)

        # correcting water body ids and outlets (exclude all water bodies with surfaceArea = 0)
        self.waterBodyIds = pcr.ifthen(self.waterBodyArea > 0.,
                                       self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyOut)

        # water body types:
        # - 2 = reservoirs (regulated discharge)
        # - 1 = lakes (weirFormula)
        # - 0 = non lakes or reservoirs (e.g. wetland)
        self.waterBodyTyp = pcr.nominal(0)

        if self.useNetCDF:
            self.waterBodyTyp = vos.netcdf2PCRobjClone(self.ncFileInp,'waterBodyTyp', \
                                date_used, useDoy = 'yearly',\
                                cloneMapFileName = self.cloneMap)

        # excluding wetlands (waterBodyTyp = 0) in all functions related to lakes/reservoirs
        #
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyIds) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.areamajority(self.waterBodyTyp,\
                                             self.waterBodyIds)     # choose only one type: either lake or reservoir
        self.waterBodyTyp = pcr.ifthen(\
                            pcr.scalar(self.waterBodyTyp) > 0,\
                            pcr.nominal(self.waterBodyTyp))
        self.waterBodyTyp = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyTyp)

        # correcting lakes and reservoirs ids and outlets
        self.waterBodyIds = pcr.ifthen(
            pcr.scalar(self.waterBodyTyp) > 0, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(
            pcr.scalar(self.waterBodyIds) > 0, self.waterBodyOut)

        # reservoir maximum capacity (m3):
        self.resMaxCap = pcr.scalar(0.0)
        self.waterBodyCap = pcr.scalar(0.0)

        if self.useNetCDF:
            self.resMaxCap = 1000. * 1000. * \
                             vos.netcdf2PCRobjClone(self.ncFileInp,'resMaxCapInp', \
                             date_used, useDoy = 'yearly',\
                             cloneMapFileName = self.cloneMap)
        self.resMaxCap = pcr.ifthen(self.resMaxCap > 0,\
                                    self.resMaxCap)
        self.resMaxCap = pcr.areaaverage(self.resMaxCap,\
                                         self.waterBodyIds)

        # water body capacity (m3): (lakes and reservoirs)
        self.waterBodyCap = pcr.cover(
            self.resMaxCap, 0.0)  # Note: Most of lakes have capacities > 0.
        self.waterBodyCap = pcr.ifthen(pcr.boolean(self.waterBodyIds),
                                       self.waterBodyCap)

        # correcting water body types:                                  # Reservoirs that have zero capacities will be assumed as lakes.
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp)
        self.waterBodyTyp = pcr.ifthenelse(self.waterBodyCap > 0.,\
                                           self.waterBodyTyp,\
                 pcr.ifthenelse(pcr.scalar(self.waterBodyTyp) == 2,\
                                           pcr.nominal(1),\
                                           self.waterBodyTyp))

        # final corrections:
        self.waterBodyTyp = pcr.ifthen(self.waterBodyArea > 0.,\
                                       self.waterBodyTyp)                     # make sure that all lakes and/or reservoirs have surface areas
        self.waterBodyTyp = \
                 pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                                       self.waterBodyTyp)                     # make sure that only types 1 and 2 will be considered in lake/reservoir functions
        self.waterBodyIds = pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                            self.waterBodyIds)                                # make sure that all lakes and/or reservoirs have ids
        self.waterBodyOut = pcr.ifthen(pcr.scalar(self.waterBodyIds) > 0.,\
                                                  self.waterBodyOut)          # make sure that all lakes and/or reservoirs have outlets

        # for a natural run (self.onlyNaturalWaterBodies == True)
        # which uses only the year 1900, assume all reservoirs are lakes
        if self.onlyNaturalWaterBodies == True and date_used == self.dateForNaturalCondition:
            logger.info(
                "Using only natural water bodies identified in the year 1900. All reservoirs in 1900 are assumed as lakes."
            )
            self.waterBodyTyp = \
             pcr.ifthen(pcr.scalar(self.waterBodyTyp) > 0.,\
                        pcr.nominal(1))

        # check that all lakes and/or reservoirs have types, ids, surface areas and outlets:
        test = pcr.defined(self.waterBodyTyp) & pcr.defined(self.waterBodyArea) &\
               pcr.defined(self.waterBodyIds) & pcr.boolean(pcr.areamaximum(pcr.scalar(self.waterBodyOut), self.waterBodyIds))
        a, b, c = vos.getMinMaxMean(
            pcr.cover(pcr.scalar(test), 1.0) - pcr.scalar(1.0))
        threshold = 1e-3
        if abs(a) > threshold or abs(b) > threshold:
            logger.warning(
                "Missing information in some lakes and/or reservoirs.")

        # cropping only in the landmask region:
        self.fracWat = pcr.ifthen(self.landmask, self.fracWat)
        self.waterBodyIds = pcr.ifthen(self.landmask, self.waterBodyIds)
        self.waterBodyOut = pcr.ifthen(self.landmask, self.waterBodyOut)
        self.waterBodyArea = pcr.ifthen(self.landmask, self.waterBodyArea)
        self.waterBodyTyp = pcr.ifthen(self.landmask, self.waterBodyTyp)
        self.waterBodyCap = pcr.ifthen(self.landmask, self.waterBodyCap)
    def dynamic(self):
        """ dynamic part of the water use module
            init water use before sub step routing
        """
        settings = LisSettings.instance()
        option = settings.options
        binding = settings.binding
        maskinfo = MaskInfo.instance()
        if option['wateruse']:
            # ************************************************************
            # ***** READ WATER DEMAND DATA *****************************
            # ************************************************************

            if option['TransientWaterDemandChange']:
                if option['readNetcdfStack']:
                    if option['useWaterDemandAveYear']:
                        # using average year in NetCDF file format
                        self.var.DomesticDemandMM = readnetcdf(
                            binding['DomesticDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest',
                            averageyearflag=True) * self.var.DtDay
                        self.var.IndustrialDemandMM = readnetcdf(
                            binding['IndustrialDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest',
                            averageyearflag=True) * self.var.DtDay
                        self.var.LivestockDemandMM = readnetcdf(
                            binding['LivestockDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest',
                            averageyearflag=True) * self.var.DtDay
                        self.var.EnergyDemandMM = readnetcdf(
                            binding['EnergyDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest',
                            averageyearflag=True) * self.var.DtDay
                    else:
                        # Read from stack of maps in NetCDF format. Get time step corresponding to model step.
                        # added management for sub-daily model time steps
                        self.var.DomesticDemandMM = readnetcdf(
                            binding['DomesticDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest') * self.var.DtDay
                        self.var.IndustrialDemandMM = readnetcdf(
                            binding['IndustrialDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest') * self.var.DtDay
                        self.var.LivestockDemandMM = readnetcdf(
                            binding['LivestockDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest') * self.var.DtDay
                        self.var.EnergyDemandMM = readnetcdf(
                            binding['EnergyDemandMaps'],
                            self.var.currentTimeStep(),
                            timestampflag='closest') * self.var.DtDay
                else:
                    # Read from stack of maps in Pcraster format
                    self.var.DomesticDemandMM = readmapsparse(
                        binding['DomesticDemandMaps'],
                        self.var.currentTimeStep(),
                        self.var.DomesticDemandMM) * self.var.DtDay
                    self.var.IndustrialDemandMM = readmapsparse(
                        binding['IndustrialDemandMaps'],
                        self.var.currentTimeStep(),
                        self.var.IndustrialDemandMM) * self.var.DtDay
                    self.var.LivestockDemandMM = readmapsparse(
                        binding['LivestockDemandMaps'],
                        self.var.currentTimeStep(),
                        self.var.LivestockDemandMM) * self.var.DtDay
                    self.var.EnergyDemandMM = readmapsparse(
                        binding['EnergyDemandMaps'],
                        self.var.currentTimeStep(),
                        self.var.EnergyDemandMM) * self.var.DtDay

            # ************************************************************
            # ***** LIVESTOCK ********************************************
            # ************************************************************

            self.var.LivestockAbstractionMM = self.var.LivestockDemandMM
            self.var.LivestockConsumptiveUseMM = self.var.LivestockAbstractionMM * self.var.LivestockConsumptiveUseFraction
            # the amount that is not returned to the hydrological cycle

            LivestockAbstractionFromGroundwaterM3 = np.where(
                self.var.GroundwaterBodies > 0,
                self.var.FractionGroundwaterUsed *
                self.var.LivestockConsumptiveUseMM * self.var.MMtoM3,
                maskinfo.in_zero())
            LivestockAbstractionFromNonConventionalWaterM3 = self.var.FractionNonConventionalWaterUsed * self.var.LivestockConsumptiveUseMM * self.var.MMtoM3
            LivestockAbstractionFromSurfaceWaterM3 = self.var.LivestockConsumptiveUseMM * self.var.MMtoM3 - LivestockAbstractionFromGroundwaterM3 - LivestockAbstractionFromNonConventionalWaterM3

            self.var.TotalLivestockAbstractionM3 += LivestockAbstractionFromGroundwaterM3 + LivestockAbstractionFromSurfaceWaterM3 + LivestockAbstractionFromNonConventionalWaterM3

            # ************************************************************
            # ***** DOMESTIC *********************************************
            # ************************************************************

            self.var.DomesticAbstractionMM = self.var.DomesticDemandMM * self.var.DomesticWaterSavingConstant * self.var.DomesticLeakageConstant
            # Domestic Water Abstraction (mm per day), already taking into account water saving in households and leakage of the supply network
            # Domestic water abstraction is larger if there is leakage, but is smaller if there is water savings
            self.var.LeakageMM = (
                self.var.DomesticLeakageConstant - 1
            ) * self.var.DomesticDemandMM * self.var.DomesticWaterSavingConstant
            # Leakage in mm per day
            self.var.LeakageLossMM = self.var.LeakageMM * self.var.LeakageWaterLossFraction
            # The leakage amount that is lost (evaporated)
            self.var.LeakageSoilMM = self.var.LeakageMM - self.var.LeakageLossMM
            self.var.DomesticConsumptiveUseMM = self.var.DomesticDemandMM * self.var.DomesticWaterSavingConstant * self.var.DomesticConsumptiveUseFraction + self.var.LeakageLossMM
            # DomesticConsumptiveUseMM is the amount that disappears from the waterbalance
            # Assumption here is that leakage is partially lost/evaporated (LeakageWaterLoss fraction)

            DomAbstractionFromGroundwaterM3 = np.where(
                self.var.GroundwaterBodies > 0,
                self.var.FractionGroundwaterUsed *
                self.var.DomesticConsumptiveUseMM * self.var.MMtoM3,
                maskinfo.in_zero())
            DomAbstractionFromNonConventionalWaterM3 = self.var.FractionNonConventionalWaterUsed * self.var.DomesticConsumptiveUseMM * self.var.MMtoM3
            DomAbstractionFromSurfaceWaterM3 = self.var.DomesticConsumptiveUseMM * self.var.MMtoM3 - DomAbstractionFromGroundwaterM3 - DomAbstractionFromNonConventionalWaterM3

            # ************************************************************
            # ***** INDUSTRY *********************************************
            # ************************************************************

            self.var.IndustrialAbstractionMM = self.var.IndustrialDemandMM * (
                1 - self.var.WaterReUseFraction)
            self.var.IndustrialConsumptiveUseMM = self.var.IndustrialAbstractionMM * self.var.IndustryConsumptiveUseFraction
            # IndustrialAbstractionMM = scalar(timeinputsparse(IndustrialAbstractionMaps)) * (1-WaterReUseFraction);
            # Industrial Water Demand (mm per day)
            # WaterReUseFraction: Fraction of water re-used in industry (e.g. 50% = 0.5 = half of the water is re-used, used twice (baseline=0, maximum=1)
            # IndustrialConsumptiveUseMM is the amount that evaporates etc
            # only 1 map so this one is loaded in initial!

            IndustrialWaterAbstractionM3 = self.var.IndustrialConsumptiveUseMM * self.var.MMtoM3
            IndustrialAbstractionFromGroundwaterM3 = np.where(
                self.var.GroundwaterBodies > 0,
                self.var.FractionGroundwaterUsed *
                IndustrialWaterAbstractionM3, maskinfo.in_zero())
            IndustrialAbstractionFromNonConventionalWaterM3 = self.var.FractionNonConventionalWaterUsed * IndustrialWaterAbstractionM3
            IndustrialAbstractionFromSurfaceWaterM3 = IndustrialWaterAbstractionM3 - IndustrialAbstractionFromGroundwaterM3 - IndustrialAbstractionFromNonConventionalWaterM3

            # ************************************************************
            # ***** ENERGY ***********************************************
            # ************************************************************

            self.var.EnergyAbstractionMM = self.var.EnergyDemandMM
            self.var.EnergyConsumptiveUseMM = self.var.EnergyAbstractionMM * self.var.EnergyConsumptiveUseFraction
            # EnergyConsumptiveUseMM is the amount that evaporates etc

            EnergyAbstractionFromSurfaceWaterM3 = self.var.EnergyConsumptiveUseMM * self.var.MMtoM3
            # all taken from surface water

            # ************************************************************
            # ***** IRRIGATION *******************************************
            # ************************************************************

            # water demand from loop3 = irrigated zone
            self.var.Ta[2] = np.maximum(
                np.minimum(self.var.RWS[2] * self.var.TranspirMaxCorrected,
                           self.var.W1[2] - self.var.WWP1[2]),
                maskinfo.in_zero())

            IrrigationWaterDemandMM = (
                self.var.TranspirMaxCorrected -
                self.var.Ta[2]) * self.var.IrrigationMult
            #  a factor (IrrigationMult) add some water (to prevent salinisation)
            # irrigationWaterNeed assumed to be equal to potential transpiration minus actual transpiration
            # in mm here, assumed for the entire pixel, thus later to be corrected with IrrigationFraction
            # IrrigationType (value between 0 and 1) is used here to distinguish between additional adding water until fieldcapacity (value set to 1) or not (value set to 0)
            IrrigationWaterDemandMM = np.where(
                self.var.FrostIndex > self.var.FrostIndexThreshold,
                maskinfo.in_zero(), IrrigationWaterDemandMM)
            # IrrigationWaterDemand is 0 when soil is frozen

            IrrigationWaterAbstractionMM = np.where(
                (self.var.IrrigationEfficiency * self.var.ConveyanceEfficiency)
                > 0, IrrigationWaterDemandMM * self.var.IrrigationFraction /
                (self.var.IrrigationEfficiency *
                 self.var.ConveyanceEfficiency), maskinfo.in_zero())
            self.var.IrrigationWaterAbstractionM3 = np.maximum(
                IrrigationWaterAbstractionMM * self.var.MMtoM3,
                maskinfo.in_zero())
            # irrigation efficiency max 1, ~0.90 drip irrigation, ~0.75 sprinkling
            # conveyance efficiency, around 0.80 for average channel
            # multiplied by actual irrigated area (fraction) and cellsize(MMtoM3) in M3 per pixel

            IrrigationAbstractionFromGroundwaterM3 = np.where(
                self.var.GroundwaterBodies > 0,
                self.var.FractionGroundwaterUsed *
                self.var.IrrigationWaterAbstractionM3, maskinfo.in_zero())
            IrrigationAbstractionFromSurfaceWaterM3 = np.maximum(
                self.var.IrrigationWaterAbstractionM3 -
                IrrigationAbstractionFromGroundwaterM3, maskinfo.in_zero())

            # ************************************************************
            # ***** TOTAL ABSTRACTIONS (DEMANDED) ************************
            # ************************************************************

            self.var.TotalAbstractionFromGroundwaterM3 = IrrigationAbstractionFromGroundwaterM3 + DomAbstractionFromGroundwaterM3 + LivestockAbstractionFromGroundwaterM3 + IndustrialAbstractionFromGroundwaterM3
            self.var.TotalAbstractionFromSurfaceWaterM3 = IrrigationAbstractionFromSurfaceWaterM3 + self.var.PaddyRiceWaterAbstractionFromSurfaceWaterM3 + DomAbstractionFromSurfaceWaterM3 + LivestockAbstractionFromSurfaceWaterM3 + IndustrialAbstractionFromSurfaceWaterM3 + EnergyAbstractionFromSurfaceWaterM3

            PaddyRiceWaterAbstractionFromSurfaceWaterMM = self.var.PaddyRiceWaterAbstractionFromSurfaceWaterM3 * self.var.M3toMM
            # taken from paddy rice routine

            self.var.TotalDemandM3 = (
                self.var.LivestockAbstractionMM +
                self.var.DomesticAbstractionMM + IrrigationWaterAbstractionMM +
                PaddyRiceWaterAbstractionFromSurfaceWaterMM +
                self.var.IndustrialAbstractionMM +
                self.var.EnergyAbstractionMM) * self.var.MMtoM3

            self.var.TotalIrrigationAbstractionM3 += IrrigationAbstractionFromGroundwaterM3 + IrrigationAbstractionFromSurfaceWaterM3
            self.var.TotalPaddyRiceIrrigationAbstractionM3 += self.var.PaddyRiceWaterAbstractionFromSurfaceWaterM3
            # totals calculated for reporting, for comparing with national reported values and possible calibration

            # ************************************************************
            # ***** ABSTRACTION FROM GROUNDWATER *************************
            # ************************************************************

            self.var.LZ = self.var.LZ - self.var.TotalAbstractionFromGroundwaterM3 * self.var.M3toMM
            self.var.IrriLossCUM = self.var.IrriLossCUM + self.var.TotalAbstractionFromGroundwaterM3
            # Abstraction is taken from lower groundwater zone
            # for mass balance calculation also summed up in IrrilossCUM (in M3)

            # ***********************************************************************
            # ***** ABSTRACTION SUPPLIED BY NONCONVENTIONAL SOURCES (DESALINATION) **
            # ***********************************************************************

            self.var.NonConventionalWaterM3 = DomAbstractionFromNonConventionalWaterM3 + LivestockAbstractionFromNonConventionalWaterM3 + IndustrialAbstractionFromNonConventionalWaterM3
            # Non conventional water producted is not abstracted from surface water

            # ************************************************************
            # ***** ABSTRACTION FROM LAKES AND RESERVOIRS ****************
            # ************************************************************

            if option['simulateReservoirs']:
                # PotentialAbstractionFromReservoirsM3 = np.minimum(0.02 * self.var.ReservoirStorageM3, 0.01*self.var.TotalReservoirStorageM3C) #original
                PotentialAbstractionFromReservoirsM3 = np.minimum(
                    0.02 * self.var.ReservoirStorageM3,
                    0.01 * self.var.TotalReservoirStorageM3C) * self.var.DtDay

                PotentialAbstractionFromReservoirsM3 = np.where(
                    np.isnan(PotentialAbstractionFromReservoirsM3), 0,
                    PotentialAbstractionFromReservoirsM3)
            else:
                PotentialAbstractionFromReservoirsM3 = maskinfo.in_zero()

            if option['simulateLakes']:
                # CM
                # PotentialAbstractionFromLakesM3 = 0.10 * self.var.LakeStorageM3  #original
                PotentialAbstractionFromLakesM3 = 0.10 * self.var.LakeStorageM3 * self.var.DtDay

                PotentialAbstractionFromLakesM3 = np.where(
                    np.isnan(PotentialAbstractionFromLakesM3), 0,
                    PotentialAbstractionFromLakesM3)
            else:
                PotentialAbstractionFromLakesM3 = maskinfo.in_zero()

            if option['simulateReservoirs'] or option['simulateLakes']:
                PotentialAbstractionFromLakesAndReservoirsM3 = PotentialAbstractionFromLakesM3 + PotentialAbstractionFromReservoirsM3
                # potential total m3 that can be extracted from all lakes and reservoirs in a pixel
            else:
                PotentialAbstractionFromLakesAndReservoirsM3 = maskinfo.in_zero(
                )

            AreatotalPotentialAbstractionFromLakesAndReservoirsM3 = np.take(
                np.bincount(
                    self.var.WUseRegionC,
                    weights=PotentialAbstractionFromLakesAndReservoirsM3),
                self.var.WUseRegionC)
            # potential total m3 that can be extracted from all lakes and reservoirs in the water region

            AreatotalWaterAbstractionFromAllSurfaceSourcesM3 = np.take(
                np.bincount(
                    self.var.WUseRegionC,
                    weights=self.var.TotalAbstractionFromSurfaceWaterM3),
                self.var.WUseRegionC)
            # the total amount that needs to be extracted from surface water, lakes and reservoirs in the water region
            # self.var.FractionAllSurfaceWaterUsed = np.maximum(1 - self.var.FractionGroundwaterUsed - self.var.FractionNonConventionalWaterUsed,maskinfo.in_zero())
            # self.var.FractionSurfaceWaterUsed = np.maximum(1 - self.var.FractionGroundwaterUsed - self.var.FractionNonConventionalWaterUsed-self.var.FractionLakeReservoirWaterUsed,maskinfo.in_zero())
            # AreatotalWaterToBeAbstractedfromLakesReservoirsM3 = np.where( (self.var.FractionSurfaceWaterUsed+self.var.FractionLakeReservoirWaterUsed)> 0, (self.var.FractionLakeReservoirWaterUsed / (self.var.FractionSurfaceWaterUsed+self.var.FractionLakeReservoirWaterUsed)) * AreatotalWaterAbstractionFromAllSurfaceSourcesM3,maskinfo.in_zero())
            AreatotalWaterToBeAbstractedfromLakesReservoirsM3 = self.var.FractionLakeReservoirWaterUsed * AreatotalWaterAbstractionFromAllSurfaceSourcesM3
            self.var.AreatotalWaterAbstractedfromLakesReservoirsM3 = np.minimum(
                AreatotalWaterToBeAbstractedfromLakesReservoirsM3,
                AreatotalPotentialAbstractionFromLakesAndReservoirsM3)
            # total amount of m3 abstracted from all lakes and reservoirs in the water regions
            FractionAbstractedByLakesReservoirs = np.where(
                AreatotalWaterAbstractionFromAllSurfaceSourcesM3 > 0,
                self.var.AreatotalWaterAbstractedfromLakesReservoirsM3 /
                AreatotalWaterAbstractionFromAllSurfaceSourcesM3,
                maskinfo.in_zero())

            self.var.TotalAbstractionFromSurfaceWaterM3 = self.var.TotalAbstractionFromSurfaceWaterM3 * (
                1 - FractionAbstractedByLakesReservoirs)
            # the original surface water abstraction amount is corrected for what is now already abstracted by lakes and reservoirs

            FractionLakesReservoirsEmptying = np.where(
                AreatotalPotentialAbstractionFromLakesAndReservoirsM3 > 0,
                self.var.AreatotalWaterAbstractedfromLakesReservoirsM3 /
                AreatotalPotentialAbstractionFromLakesAndReservoirsM3,
                maskinfo.in_zero())

            self.var.LakeAbstractionM3 = PotentialAbstractionFromLakesM3 * FractionLakesReservoirsEmptying
            if option['simulateLakes']:
                self.var.LakeStorageM3 = self.var.LakeStorageM3 - self.var.LakeAbstractionM3

            self.var.ReservoirAbstractionM3 = PotentialAbstractionFromReservoirsM3 * FractionLakesReservoirsEmptying
            if option['simulateReservoirs']:
                self.var.ReservoirStorageM3 = self.var.ReservoirStorageM3 - self.var.ReservoirAbstractionM3
                # subtract abstracted water from lakes and reservoir storage

            # ************************************************************
            # ***** Abstraction from channels ****************************
            # ***** average abstraction taken from entire waterregion ****
            # ***** limited by available channel water and e-flow minimum*
            # ************************************************************

            AreaTotalDemandedAbstractionFromSurfaceWaterM3 = np.maximum(
                np.take(
                    np.bincount(
                        self.var.WUseRegionC,
                        weights=self.var.TotalAbstractionFromSurfaceWaterM3),
                    self.var.WUseRegionC), 0)

            PixelAvailableWaterFromChannelsM3 = np.maximum(
                self.var.ChanM3Kin - self.var.EFlowThreshold * self.var.DtSec,
                0) * (1 - self.var.WUsePercRemain)
            # respecting e-flow

            AreaTotalAvailableWaterFromChannelsM3 = np.maximum(
                np.take(
                    np.bincount(self.var.WUseRegionC,
                                weights=PixelAvailableWaterFromChannelsM3),
                    self.var.WUseRegionC), 0)
            AreaTotalDemandedWaterFromChannelsM3 = np.minimum(
                AreaTotalAvailableWaterFromChannelsM3,
                AreaTotalDemandedAbstractionFromSurfaceWaterM3)

            self.var.FractionAbstractedFromChannels = np.where(
                AreaTotalAvailableWaterFromChannelsM3 > 0,
                np.minimum(
                    AreaTotalDemandedWaterFromChannelsM3 /
                    AreaTotalAvailableWaterFromChannelsM3, 1), 0)
            # IS THE DEFINITION OF AreaTotalDemandedWaterFromChannelsM3 REDUNDANT WITH np.minimum(...) ABOVE?
            # fraction that is abstracted from channels (should be 0-1)
            self.var.WUseAddM3 = self.var.FractionAbstractedFromChannels * PixelAvailableWaterFromChannelsM3
            # pixel abstracted water in m3

            self.var.WUseAddM3Dt = self.var.WUseAddM3 * self.var.InvNoRoutSteps
            # splitting water use per timestep into water use per sub time step

            self.var.wateruseCum += self.var.WUseAddM3
            # summing up for water balance calculation
            # If report wateruse
            if (option['repwateruseGauges']) or (option['repwateruseSites']):
                self.var.WUseSumM3 = accuflux(
                    self.var.Ldd,
                    decompress(self.var.WUseAddM3) * self.var.InvDtSec)

            # totalAdd = areatotal(decompress(WUseAddM3),self.var.WUseRegion);
            self.var.totalAddM3 = np.take(
                np.bincount(self.var.WUseRegionC, weights=self.var.WUseAddM3),
                self.var.WUseRegionC)

            self.var.WaterUseShortageM3 = self.var.TotalAbstractionFromSurfaceWaterM3 - self.var.WUseAddM3
            # amount of M3 that cannot be extracted from any source, including the channels

            self.var.PotentialSurfaceWaterAvailabilityForIrrigationM3 = np.maximum(
                PixelAvailableWaterFromChannelsM3 -
                self.var.TotalAbstractionFromSurfaceWaterM3 +
                IrrigationAbstractionFromSurfaceWaterM3 +
                self.var.PaddyRiceWaterAbstractionFromSurfaceWaterM3, 0.0)
            # available water excluding the surface water irrigation needs

            # ************************************************************
            # ***** Water Allocation *************************************
            # ***** average abstraction taken from entire waterregion ****
            # ***** limited by available channel water and e-flow minimum*
            # ************************************************************

            # totalAbstr = areatotal(decompress(TotalAbstractionFromSurfaceWaterM3),self.var.WUseRegion)
            self.var.AreaTotalAbstractionFromSurfaceWaterM3 = np.take(
                np.bincount(
                    self.var.WUseRegionC,
                    weights=self.var.TotalAbstractionFromSurfaceWaterM3 -
                    self.var.WUseAddM3), self.var.WUseRegionC)
            self.var.AreaTotalAbstractionFromGroundwaterM3 = np.take(
                np.bincount(
                    self.var.WUseRegionC,
                    weights=self.var.TotalAbstractionFromGroundwaterM3),
                self.var.WUseRegionC)

            # demand
            self.var.AreaTotalDemandM3 = np.take(
                np.bincount(self.var.WUseRegionC,
                            weights=self.var.TotalDemandM3),
                self.var.WUseRegionC)

            # totalEne = areatotal(decompress(self.var.EnergyConsumptiveUseMM*self.var.MMtoM3),self.var.WUseRegion)
            AreatotalIrriM3 = np.take(
                np.bincount(
                    self.var.WUseRegionC,
                    weights=IrrigationAbstractionFromSurfaceWaterM3 +
                    self.var.PaddyRiceWaterAbstractionFromSurfaceWaterM3),
                self.var.WUseRegionC)
            # AreatotalDomM3 = np.take(np.bincount(self.var.WUseRegionC, weights=DomAbstractionFromSurfaceWaterM3),
            #                          self.var.WUseRegionC)
            # AreatotalLiveM3 = np.take(np.bincount(self.var.WUseRegionC, weights=LivestockAbstractionFromSurfaceWaterM3),
            #                           self.var.WUseRegionC)
            # AreatotalIndM3 = np.take(np.bincount(self.var.WUseRegionC, weights=IndustrialAbstractionFromSurfaceWaterM3),
            #                          self.var.WUseRegionC)
            # AreatotalEneM3 = np.take(np.bincount(self.var.WUseRegionC, weights=EnergyAbstractionFromSurfaceWaterM3),
            #                          self.var.WUseRegionC)

            # Allocation rule: Domestic ->  Energy -> Livestock -> Industry -> Irrigation
            self.var.AreatotalIrrigationShortageM3 = np.take(
                np.bincount(self.var.WUseRegionC,
                            weights=self.var.WaterUseShortageM3),
                self.var.WUseRegionC)
            self.var.AreatotalIrrigationUseM3 = np.maximum(
                AreatotalIrriM3 - self.var.AreatotalIrrigationShortageM3, 0.0)

            with np.errstate(all='ignore'):
                fractionIrrigationAvailability = np.where(
                    AreatotalIrriM3 > 0,
                    self.var.AreatotalIrrigationUseM3 / AreatotalIrriM3, 1.0)

                self.var.IrrigationWaterAbstractionM3 = fractionIrrigationAvailability * IrrigationAbstractionFromSurfaceWaterM3 + IrrigationAbstractionFromGroundwaterM3
                # real irrigation is percentage of avail/demand for waterregion * old surface + old groundwater abstraction
                IrrigationWaterDemand = self.var.IrrigationWaterAbstractionM3 * self.var.M3toMM
                IrrigationWaterDemand = np.where(
                    self.var.IrrigationFraction > 0,
                    IrrigationWaterDemand / self.var.IrrigationFraction, 0.0)

            # for mass balance calculate the loss of irrigation water
            # ---------------------------------------------------------
            # updating soil in loop3=irrigation
            # ---------------------------------------------------------

            Wold = self.var.W1[2]
            IrrigationDemandW1b = np.maximum(
                IrrigationWaterDemand - (self.var.WFilla - self.var.W1a[2]), 0)
            self.var.W1a[2] = np.where(
                self.var.W1a[2] >= self.var.WFilla, self.var.W1a[2],
                np.minimum(self.var.WFilla,
                           self.var.W1a[2] + IrrigationWaterDemand))
            self.var.W1b[2] = np.where(
                self.var.W1b[2] >= self.var.WFillb, self.var.W1b[2],
                np.minimum(self.var.WFillb,
                           self.var.W1b[2] + IrrigationDemandW1b))
            self.var.W1[2] = np.add(self.var.W1a[2], self.var.W1b[2])
            # if irrigated soil is less than Pf3 then fill up to Pf3 (if there is water demand)
            # if more than Pf3 the additional water is transpirated
            # there is already no water demand if the soil is frozen
            Wdiff = self.var.W1[2] - Wold
            self.var.Ta[2] = self.var.Ta[2] + IrrigationWaterDemand - Wdiff

            self.var.IrriLossCUM = self.var.IrriLossCUM - self.var.IrrigationWaterAbstractionM3 * self.var.IrrigationEfficiency * self.var.ConveyanceEfficiency - Wdiff * self.var.MMtoM3 * self.var.IrrigationFraction

            # Added to TA but also
            # for mass balance calculate the loss of irrigation water
            # AdR: irrigation demand added to W1 and Ta; so assumption here that soil moisture stays the same
            # we could also abstract more water equivalent to satisfy Ta and bring soil moisture to pF2 or so, for later consideration#
            # self.var.Ta[2] = np.where(self.var.FrostIndex > self.var.FrostIndexThreshold, maskinfo.in_zero(), self.var.Ta[2])
            # transpiration is 0 when soil is frozen

            # ---------------------------------------------------------
            # E-flow
            # ---------------------------------------------------------

            self.var.EFlowIndicator = np.where(
                self.var.ChanQ <= self.var.EFlowThreshold,
                maskinfo.in_zero() + 1.0, maskinfo.in_zero())
            # if ChanQ is less than EflowThreshold, EFlowIndicator becomes 1

            # ************************************************************
            # ***** update state variables                             ***
            # ************************************************************
            # CM Update state variables for changes to W1a[2] and W1b[2]
            self.var.Theta1a[2] = self.var.W1a[2] / self.var.SoilDepth1a[2]
            self.var.Theta1b[2] = self.var.W1b[2] / self.var.SoilDepth1b[2]

            # ************************************************************
            # ***** smooth lower zone with correction                  ***
            # ************************************************************

            if option['groundwaterSmooth']:
                LZPcr = decompress(self.var.LZ)

                Range = self.var.LZSmoothRange * celllength()

                LZTemp1 = ifthen(self.var.GroundwaterBodiesPcr == 1, LZPcr)
                LZTemp2 = ifthen(self.var.GroundwaterBodiesPcr == 1,
                                 windowtotal(LZTemp1, Range))
                LZTemp3 = windowtotal(LZTemp1 * 0 + 1, Range)
                LZSmooth = ifthenelse(LZTemp3 == 0, 0.0,
                                      pcrDiv(LZTemp2, LZTemp3))
                LZPcr = ifthenelse(self.var.GroundwaterBodiesPcr == 0, LZPcr,
                                   0.9 * LZPcr + 0.1 * LZSmooth)

                diffCorr = 0.1 * areaaverage(LZSmooth - LZTemp1,
                                             self.var.groundwaterCatch)
                # error of 0.1  LZSmooth operation (same factor of 0.1 as above)
                LZPcr -= cover(diffCorr, 0)
                # correction of LZ by the average error from smoothing operation

                self.var.LZ = compressArray(LZPcr)
     # transfer 75% of overbank volume to the downstream (several cells downstream)
     transfer_to_downstream = pcr.cover(\
                              pcr.ifthen(pcr.scalar(water_body_outlet) > 0., lake_reservoir_overbank_volume * 0.50), 0.0)
     transfer_to_downstream = pcr.upstream(ldd_map_low_resolution,
                                           transfer_to_downstream)
     transfer_to_downstream = pcr.upstream(ldd_map_low_resolution,
                                           transfer_to_downstream)
     transfer_to_downstream = pcr.upstream(ldd_map_low_resolution,
                                           transfer_to_downstream)
     extreme_value_map      = transfer_to_downstream + \
                              pcr.ifthenelse(pcr.cover(pcr.scalar(water_body_id), 0.0) > 0.00, 0.00, extreme_value_map)
     #
     # the remaining overbank volume (50%) will be distributed to the shores
     lake_reservoir_overbank_volume = lake_reservoir_overbank_volume * 0.50
     land_area = cell_area * pcr.max(0.0, 1.0 - fracwat)
     land_area_average = pcr.areaaverage(land_area, water_body_id)
     land_area_weight = pcr.ifthenelse(land_area < land_area_average, 0.0,
                                       land_area_average)
     distributed_lake_reservoir_overbank_volume = pcr.cover(\
                                                  lake_reservoir_overbank_volume * land_area_weight / pcr.max(0.00, pcr.areatotal(land_area_weight, water_body_id)), 0.0)
     extreme_value_map = pcr.ifthenelse(
         pcr.cover(pcr.scalar(water_body_id), 0.0) > 0.00,
         distributed_lake_reservoir_overbank_volume, extreme_value_map)
 #
 # - cover the rests to zero (so they will not contribute to any flood/inundation)
 extreme_value_map = pcr.cover(extreme_value_map, 0.0)
 #
 # - make sure that extreme value maps increasing over return period
 if i_file == 0: previous_return_period_map = extreme_value_map
 if i_file > 0:
     extreme_value_map = pcr.max(previous_return_period_map,
Exemple #30
0
def complexreservoir(
    waterlevel,
    ReserVoirLocs,
    LinkedReserVoirLocs,
    ResArea,
    ResThreshold,
    ResStorFunc,
    ResOutflowFunc,
    sh,
    hq,
    res_b,
    res_e,
    inflow,
    precip,
    pet,
    ReservoirComplexAreas,
    JDOY,
    timestepsecs=86400,
):

    mv = -999.0

    inflow = pcr.ifthen(pcr.boolean(ReserVoirLocs), inflow)

    prec_av = pcr.ifthen(
        pcr.boolean(ReserVoirLocs), pcr.areaaverage(precip, ReservoirComplexAreas)
    )
    pet_av = pcr.ifthen(
        pcr.boolean(ReserVoirLocs), pcr.areaaverage(pet, ReservoirComplexAreas)
    )

    np_reslocs = pcr.pcr2numpy(ReserVoirLocs, 0.0)
    np_linkedreslocs = pcr.pcr2numpy(LinkedReserVoirLocs, 0.0)

    _outflow = []
    nr_loop = np.max([int(timestepsecs / 21600), 1])
    for n in range(0, nr_loop):
        np_waterlevel = pcr.pcr2numpy(waterlevel, np.nan)
        np_waterlevel_lower = np_waterlevel.copy()

        for val in np.unique(np_linkedreslocs):
            if val > 0:
                np_waterlevel_lower[np_linkedreslocs == val] = np_waterlevel[
                    np.where(np_reslocs == val)
                ]

        diff_wl = np_waterlevel - np_waterlevel_lower
        diff_wl[np.isnan(diff_wl)] = mv
        np_waterlevel_lower[np.isnan(np_waterlevel_lower)] = mv

        pcr_diff_wl = pcr.numpy2pcr(pcr.Scalar, diff_wl, mv)
        pcr_wl_lower = pcr.numpy2pcr(pcr.Scalar, np_waterlevel_lower, mv)

        storage_start = pcr.ifthenelse(
            ResStorFunc == 1,
            ResArea * waterlevel,
            lookupResFunc(ReserVoirLocs, waterlevel, sh, "0-1"),
        )

        outflow = pcr.ifthenelse(
            ResOutflowFunc == 1,
            lookupResRegMatr(ReserVoirLocs, waterlevel, hq, JDOY),
            pcr.ifthenelse(
                pcr_diff_wl >= 0,
                pcr.max(res_b * (waterlevel - ResThreshold) ** res_e, 0),
                pcr.min(-1 * res_b * (pcr_wl_lower - ResThreshold) ** res_e, 0),
            ),
        )

        np_outflow = pcr.pcr2numpy(outflow, np.nan)
        np_outflow_linked = np_reslocs * 0.0

        with np.errstate(invalid="ignore"):
            if np_outflow[np_outflow < 0] is not None:
                np_outflow_linked[
                    np.in1d(np_reslocs, np_linkedreslocs[np_outflow < 0]).reshape(
                        np_linkedreslocs.shape
                    )
                ] = np_outflow[np_outflow < 0]

        outflow_linked = pcr.numpy2pcr(pcr.Scalar, np_outflow_linked, 0.0)

        fl_nr_loop = float(nr_loop)
        storage = (
            storage_start
            + (inflow * timestepsecs / fl_nr_loop)
            + (prec_av / fl_nr_loop / 1000.0) * ResArea
            - (pet_av / fl_nr_loop / 1000.0) * ResArea
            - (pcr.cover(outflow, 0.0) * timestepsecs / fl_nr_loop)
            + (pcr.cover(outflow_linked, 0.0) * timestepsecs / fl_nr_loop)
        )

        waterlevel = pcr.ifthenelse(
            ResStorFunc == 1,
            waterlevel + (storage - storage_start) / ResArea,
            lookupResFunc(ReserVoirLocs, storage, sh, "1-0"),
        )

        np_outflow_nz = np_outflow * 0.0
        with np.errstate(invalid="ignore"):
            np_outflow_nz[np_outflow > 0] = np_outflow[np_outflow > 0]
        _outflow.append(np_outflow_nz)

    outflow_av_temp = np.average(_outflow, 0)
    outflow_av_temp[np.isnan(outflow_av_temp)] = mv
    outflow_av = pcr.numpy2pcr(pcr.Scalar, outflow_av_temp, mv)

    return waterlevel, outflow_av, prec_av, pet_av, storage
    def set_river_package(self, discharge):

        logger.info("Set the river package.")

        # specify the river package
        #
        # - surface water river bed/bottom elevation
        #
        # - for lakes and resevoirs, make the bottom elevation deep --- Shall we do this?
        #~ additional_depth = 500.
        #~ surface_water_bed_elevation = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
        #~ self.dem_riverbed - additional_depth)
        #~ surface_water_bed_elevation = pcr.cover(surface_water_bed_elevation, self.dem_riverbed)
        #
        surface_water_bed_elevation = self.dem_riverbed  # This is an alternative, if we do not want to introduce very deep bottom elevations of lakes and/or reservoirs.
        #
        # rounding values for surface_water_bed_elevation
        self.surface_water_bed_elevation = pcr.roundup(
            surface_water_bed_elevation * 1000.) / 1000.
        #
        # - river bed condutance (unit: m2/day)
        bed_surface_area = pcr.ifthen(pcr.scalar(self.WaterBodies.waterBodyIds) > 0.0, \
                                                 self.WaterBodies.fracWat * self.cellAreaMap)   # TODO: Incorporate the concept of dynamicFracWat
        bed_surface_area = pcr.cover(bed_surface_area, \
                                     self.bankfull_width * self.channelLength)
        bed_surface_area = self.bankfull_width * self.channelLength
        bed_conductance = (1.0 / self.bed_resistance) * bed_surface_area
        bed_conductance = pcr.ifthenelse(bed_conductance < 1e-20, 0.0, \
                                         bed_conductance)
        self.bed_conductance = pcr.cover(bed_conductance, 0.0)
        #
        # - 'channel width' for lakes and reservoirs
        channel_width = pcr.areamaximum(self.bankfull_width,
                                        self.WaterBodies.waterBodyIds)
        channel_width = pcr.cover(channel_width, self.bankfull_width)
        #
        # - convert discharge value to surface water elevation (m)
        river_water_height = (channel_width**(-3 / 5)) * (discharge**(
            3 / 5)) * ((self.gradient)**(-3 / 10)) * (self.manningsN**(3 / 5))
        surface_water_elevation = self.dem_riverbed + \
                                  river_water_height
        #
        # - calculating water level (unit: m) above the flood plain   # TODO: Improve this concept (using Rens's latest innundation scheme)
        #----------------------------------------------------------
        water_above_fpl = pcr.max(
            0.0, surface_water_elevation - self.dem_floodplain
        )  # unit: m, water level above the floodplain (not distributed)
        water_above_fpl *= self.bankfull_depth * self.bankfull_width / self.cellAreaMap  # unit: m, water level above the floodplain (distributed within the cell)
        # TODO: Improve this concept using Rens's latest scheme
        #
        # - corrected surface water elevation
        surface_water_elevation = pcr.ifthenelse(surface_water_elevation > self.dem_floodplain, \
                                                                           self.dem_floodplain + water_above_fpl, \
                                                                           surface_water_elevation)
        # - surface water elevation for lakes and reservoirs:
        lake_reservoir_water_elevation = pcr.ifthen(
            self.WaterBodies.waterBodyOut, surface_water_elevation)
        lake_reservoir_water_elevation = pcr.areamaximum(
            lake_reservoir_water_elevation, self.WaterBodies.waterBodyIds)
        lake_reservoir_water_elevation = pcr.cover(lake_reservoir_water_elevation, \
                                         pcr.areaaverage(surface_water_elevation, self.WaterBodies.waterBodyIds))
        # - maximum and minimum values for lake_reservoir_water_elevation
        lake_reservoir_water_elevation = pcr.min(
            self.dem_floodplain, lake_reservoir_water_elevation)
        lake_reservoir_water_elevation = pcr.max(
            surface_water_bed_elevation, lake_reservoir_water_elevation)
        # - smoothing
        lake_reservoir_water_elevation = pcr.areaaverage(
            surface_water_elevation, self.WaterBodies.waterBodyIds)
        #
        # - merge lake and reservoir water elevation
        surface_water_elevation = pcr.cover(lake_reservoir_water_elevation,
                                            surface_water_elevation)
        #
        # - pass values to the river package
        surface_water_elevation = pcr.cover(surface_water_elevation,
                                            self.surface_water_bed_elevation)
        surface_water_elevation = pcr.rounddown(
            surface_water_elevation * 1000.) / 1000.
        #
        # - make sure that HRIV >= RBOT ; no infiltration if HRIV = RBOT (and h < RBOT)
        self.surface_water_elevation = pcr.max(
            surface_water_elevation, self.surface_water_bed_elevation)
        #
        # - pass the values to the RIV package
        self.pcr_modflow.setRiver(self.surface_water_elevation, \
                                  self.surface_water_bed_elevation, self.bed_conductance, 2)
Exemple #32
0
def naturalLake(
    waterlevel,
    LakeLocs,
    LinkedLakeLocs,
    LakeArea,
    LakeThreshold,
    LakeStorFunc,
    LakeOutflowFunc,
    sh,
    hq,
    lake_b,
    lake_e,
    inflow,
    precip,
    pet,
    LakeAreasMap,
    JDOY,
    timestepsecs=86400,
):
    
    """
    Run Natural Lake module to compute the new waterlevel and outflow.
    Solves lake water balance with linearisation and iteration procedure,
    for any rating and storage curve.
    For the case where storage curve is S = AH and Q=b(H-Ho)^2, uses the direct
    solution from the Modified Puls Approach (LISFLOOD).


    :ivar waterlevel: water level H in the lake
    :ivar LakeLocs: location of lake's outlet
    :ivar LinkedLakeLocs: ID of linked lakes
    :ivar LakeArea: total lake area
    :ivar LakeThreshold: water level threshold Ho under which outflow is zero
    :ivar LakeStorFunc: type of lake storage curve
                        1: S = AH
                        2: S = f(H) from lake data and interpolation
    :ivar LakeOutflowFunc: type of lake rating curve
                           1: Q = f(H) from lake data and interpolation
                           2: General Q = b(H - Ho)^e
                           3: Case of Puls Approach Q = b(H - Ho)^2
    :ivar sh: data for storage curve
    :ivar hq: data for rating curve
    :ivar lake_b: rating curve coefficient
    :ivar lake_e: rating curve exponent
    :ivar inflow: inflow to the lake (surface runoff + river discharge + seepage)
    :ivar precip: precipitation map
    :ivar pet: PET map
    :ivar LakeAreasMap: lake extent map (for filtering P and PET)
    :ivar JDOY: Julian Day of Year to read storage/rating curve from data
    :ivar timestepsecs: model timestep in seconds

    :returns: waterlevel, outflow, prec_av, pet_av, storage
    """

    mv = -999.0
    LakeZeros = LakeArea * 0.0
    
    waterlevel_start = waterlevel

    inflow = pcr.ifthen(pcr.boolean(LakeLocs), inflow)

    prec_av = pcr.ifthen(
        pcr.boolean(LakeLocs), pcr.areaaverage(precip, LakeAreasMap)
    )
    pet_av = pcr.ifthen(
        pcr.boolean(LakeLocs), pcr.areaaverage(pet, LakeAreasMap)
    )
    
    
    ### Modified Puls Approach (Burek et al., 2013, LISFLOOD) ###
    #ResOutflowFunc = 3 
    
    #Calculate lake factor and SI parameter
    LakeFactor = pcr.ifthenelse(
            LakeOutflowFunc == 3,
            LakeArea / (timestepsecs * (lake_b) ** 0.5),
            mv
            )
    
    storage_start = pcr.ifthenelse(
            LakeStorFunc == 1,
            LakeArea * waterlevel_start,
            lookupResFunc(LakeLocs, waterlevel_start, sh, "0-1"),
            )

    SIFactor = pcr.ifthenelse(
            LakeOutflowFunc == 3,
            ((storage_start + (prec_av-pet_av)*LakeArea/1000.0) / timestepsecs 
             + inflow),
            mv
            )
    #Adjust SIFactor for ResThreshold != 0
    SIFactorAdj = SIFactor - LakeArea * LakeThreshold / timestepsecs
    
    #Calculate the new lake outflow/waterlevel/storage
    outflow = pcr.ifthenelse(
            LakeOutflowFunc == 3,
            pcr.ifthenelse(
                    SIFactorAdj > 0.0,
                    (-LakeFactor + (LakeFactor**2 + 2*SIFactorAdj) ** 0.5) ** 2,
                    0.0),
            LakeZeros
            )
    storage = pcr.ifthenelse(
            LakeOutflowFunc == 3,
            (SIFactor - outflow) * timestepsecs,
            LakeZeros
            )
    waterlevel = pcr.ifthenelse(
            LakeOutflowFunc == 3,
            storage / LakeArea,
            LakeZeros
            )
    
    ### Linearisation and iteration for specific storage/rating curves ###
    np_lakeoutflowfunc = pcr.pcr2numpy(LakeOutflowFunc, 0.0)
    if ((bool(np.isin(1, np.unique(np_lakeoutflowfunc)))) or 
        (bool(np.isin(2, np.unique(np_lakeoutflowfunc))))):
        
        np_lakelocs = pcr.pcr2numpy(LakeLocs, 0.0)
        np_linkedlakelocs = pcr.pcr2numpy(LinkedLakeLocs, 0.0)
        waterlevel_loop = waterlevel_start
    
        _outflow = []
        nr_loop = np.max([int(timestepsecs / 21600), 1])
        for n in range(0, nr_loop):
            np_waterlevel = pcr.pcr2numpy(waterlevel_loop, np.nan)
            np_waterlevel_lower = np_waterlevel.copy()
    
            for val in np.unique(np_linkedlakelocs):
                if val > 0:
                    np_waterlevel_lower[np_linkedlakelocs == val] = np_waterlevel[
                        np.where(np_lakelocs == val)
                    ]
    
            diff_wl = np_waterlevel - np_waterlevel_lower
            diff_wl[np.isnan(diff_wl)] = mv
            np_waterlevel_lower[np.isnan(np_waterlevel_lower)] = mv
    
            pcr_diff_wl = pcr.numpy2pcr(pcr.Scalar, diff_wl, mv)
            pcr_wl_lower = pcr.numpy2pcr(pcr.Scalar, np_waterlevel_lower, mv)
    
            storage_start_loop = pcr.ifthenelse(
                LakeStorFunc == 1,
                LakeArea * waterlevel_loop,
                lookupResFunc(LakeLocs, waterlevel_loop, sh, "0-1"),
            )
    
            outflow_loop = pcr.ifthenelse(
                LakeOutflowFunc == 1,
                lookupResRegMatr(LakeLocs, waterlevel_loop, hq, JDOY),
                pcr.ifthenelse(
                    pcr_diff_wl >= 0,
                    pcr.max(lake_b * (waterlevel_loop - LakeThreshold) ** lake_e, 0),
                    pcr.min(-1 * lake_b * (pcr_wl_lower - LakeThreshold) ** lake_e, 0),
                ),
            )
    
            np_outflow = pcr.pcr2numpy(outflow_loop, np.nan)
            np_outflow_linked = np_lakelocs * 0.0
    
            with np.errstate(invalid="ignore"):
                if np_outflow[np_outflow < 0] is not None:
                    np_outflow_linked[
                        np.in1d(np_lakelocs, np_linkedlakelocs[np_outflow < 0]).reshape(
                            np_linkedlakelocs.shape
                        )
                    ] = np_outflow[np_outflow < 0]
    
            outflow_linked = pcr.numpy2pcr(pcr.Scalar, np_outflow_linked, 0.0)
    
            fl_nr_loop = float(nr_loop)
            storage_loop = (
                storage_start_loop
                + (inflow * timestepsecs / fl_nr_loop)
                + (prec_av / fl_nr_loop / 1000.0) * LakeArea
                - (pet_av / fl_nr_loop / 1000.0) * LakeArea
                - (pcr.cover(outflow_loop, 0.0) * timestepsecs / fl_nr_loop)
                + (pcr.cover(outflow_linked, 0.0) * timestepsecs / fl_nr_loop)
            )
    
            waterlevel_loop = pcr.ifthenelse(
                LakeStorFunc == 1,
                waterlevel_loop + (storage_loop - storage_start_loop) / LakeArea,
                lookupResFunc(LakeLocs, storage_loop, sh, "1-0"),
            )
    
            np_outflow_nz = np_outflow * 0.0
            with np.errstate(invalid="ignore"):
                np_outflow_nz[np_outflow > 0] = np_outflow[np_outflow > 0]
            _outflow.append(np_outflow_nz)
    
        outflow_av_temp = np.average(_outflow, 0)
        outflow_av_temp[np.isnan(outflow_av_temp)] = mv
        outflow_av = pcr.numpy2pcr(pcr.Scalar, outflow_av_temp, mv)
        
        #Add the discharge/waterlevel/storage from the loop to the one from puls approach
        outflow = pcr.ifthenelse(
                LakeOutflowFunc == 3,
                outflow,
                outflow_av
                )
        waterlevel = pcr.ifthenelse(
                LakeOutflowFunc == 3,
                waterlevel,
                waterlevel_loop
                )
        storage = pcr.ifthenelse(
                LakeOutflowFunc == 3,
                storage,
                storage_loop
                )

    return waterlevel, outflow, prec_av, pet_av, storage
    def dynamic(self):

        # re-calculate current model time using current pcraster timestep value
        self.modelTime.update(self.currentTimeStep())
        msg = "\n\n\n Processing the date " + self.modelTime.fulldate + "\n\n\n"
        logger.info(msg)

        # read netcdf file
        logger.info("Reading netcdf file.")
        # - set the clone to the necdf file extent
        pcr.setclone(self.inputClone)
        # - read netcdf file
        input_pcr = vos.netcdf2PCRobjClone(ncFile = self.netcdf_input_file, \
                                           varName = "automatic", \
                                           dateInput = self.modelTime.fulldate, \
                                           useDoy = None, \
                                           cloneMapFileName  = self.inputClone, \
                                           LatitudeLongitude = True, \
                                           specificFillValue = None)

        # reprojection
        logger.info("Reprojection.")
        #
        # - save it to a pcraster file in the temporary folder
        tmp_input_pcr_file = self.tmpDir + "/" + "tmp_input_pcr.map"
        pcr.report(input_pcr, tmp_input_pcr_file)
        # - convert it to tif
        tmp_input_tif_file = self.tmpDir + "/" + "tmp_input_pcr.tif"
        cmd = 'gdal_translate ' + tmp_input_pcr_file + " " + tmp_input_tif_file
        logger.debug(cmd)
        os.system(cmd)
        # - re-projection to the outputProjection
        tmp_reprj_tif_file = self.tmpDir + "/" + "tmp_reprj_tif.tif"
        bound_box = self.x_min_output + " " + self.y_min_output + " " + self.x_max_output + " " + self.y_max_output
        cell_size = self.cell_length + " " + self.cell_length
        cmd = 'gdalwarp '+\
              '-s_srs ' + '"' + self.inputProjection  +'" '+\
              '-t_srs ' + '"' + self.outputProjection +'" '+\
              '-te ' + bound_box + " " +\
              '-tr ' + cell_size + " " +\
              '-r '+ self.resample_method + " " +\
              '-srcnodata -3.4028234663852886e+38 -dstnodata -3.4028234663852886e+38 '+\
              tmp_input_tif_file + " "+\
              tmp_reprj_tif_file
        logger.debug(cmd)
        os.system(cmd)
        # - convert it back to pcraster map
        tmp_reprj_map_file = self.tmpDir + "/" + "tmp_reprj_map.map"
        cmd = 'gdal_translate -of PCRaster ' + tmp_reprj_tif_file + " " + tmp_reprj_map_file
        logger.debug(cmd)
        os.system(cmd)
        # - make sure that it has a valid mapattr
        cmd = 'mapattr -c ' + self.outputClone + " " + tmp_reprj_map_file
        logger.debug(cmd)
        os.system(cmd)

        # read the re-projected file
        logger.info(
            "Read the re-projected file, including unit conversion/correction."
        )
        # - set the clone to the output clone
        pcr.setclone(self.outputClone)
        output_pcr = pcr.readmap(tmp_reprj_map_file)
        # - unit conversion
        output_pcr = output_pcr * self.unit_conversion_factor + self.unit_conversion_offset
        #~ pcr.aguila(output_pcr)
        #~ raw_input("Press Enter to continue...")

        # perform area operation
        logger.info("Performing area operation.")
        output_area_pcr = pcr.areaaverage(output_pcr, self.area_class)
        #~ pcr.aguila(output_area_pcr)
        #~ raw_input("Press Enter to continue...")

        # save it to a daily tss file
        logger.info("Saving daily value to a tss file.")
        self.tss_daily_reporting.sample(output_area_pcr)

        # calculate 10 day average
        # - initiate/reset counter and accumulator
        if self.modelTime.day == 1 or self.modelTime.day == 11 or self.modelTime.day == 21:
            self.day_counter = pcr.scalar(0.0)
            self.cummulative_per_ten_days = pcr.scalar(0.0)
            self.average_per_ten_days = pcr.scalar(0.0)
        # - accumulating
        self.day_counter = self.day_counter + 1.0
        self.cummulative_per_ten_days = self.cummulative_per_ten_days + output_area_pcr
        # - calculate 10 day average and reporting
        if self.modelTime.day == 10 or self.modelTime.day == 20 or self.modelTime.isLastDayOfMonth(
        ):
            logger.info(
                'Calculating/saving 10 day average value to a tss file.')
            average_per_ten_days = self.cummulative_per_ten_days / pcr.ifthen(
                self.landmask, self.day_counter)
            #~ pcr.aguila(average_per_ten_days)
            #~ raw_input("Press Enter to continue...")
            if self.report_10day_pcr_files:
                logger.info('Saving 10 day average value to pcraster file.')
                cwd = os.getcwd()
                os.chdir(self.mapDir)
                self.report(average_per_ten_days, "dcd")
                os.chdir(cwd)
        else:
            average_per_ten_days = pcr.scalar(-9999.99)
        self.tss_10day_reporting.sample(average_per_ten_days)

        # clean the temporary folder
        cmd = 'rm -r ' + self.tmpDir + "/*"
        print cmd
        os.system(cmd)

        # change directory to the output folder so that the tss file will be stored there
        os.chdir(self.output_folder)
Exemple #34
0
def main():
    """
        
    :ivar masterdem: digital elevation model
    :ivar dem: digital elevation model
    :ivar river: optional river map
    """

    # Default values
    strRiver = 8
    masterdem = "dem.map"
    step1dir = "step1"
    step2dir = "step2"
    workdir = "."
    inifile = "wflow_prepare.ini"
    recreate = False
    snapgaugestoriver = False

    try:
        opts, args = getopt.getopt(sys.argv[1:], "W:hI:f")
    except getopt.error as msg:
        usage(msg)

    for o, a in opts:
        if o == "-W":
            workdir = a
        if o == "-I":
            inifile = a
        if o == "-h":
            usage()
        if o == "-f":
            recreate = True

    pcr.setglobaloption("unitcell")
    os.chdir(workdir)

    config = OpenConf(workdir + "/" + inifile)

    masterdem = configget(config, "files", "masterdem", "dem.map")
    pcr.setclone(masterdem)

    strRiver = int(configget(config, "settings", "riverorder", "4"))

    try:
        gauges_x = config.get("settings", "gauges_x")
        gauges_y = config.get("settings", "gauges_y")
    except:
        print("gauges_x and  gauges_y are required entries in the ini file")
        sys.exit(1)

    step1dir = configget(config, "directories", "step1dir", "step1")
    step2dir = configget(config, "directories", "step2dir", "step2")
    # upscalefactor = float(config.get("settings","upscalefactor"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35"))
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(
        configget(config, "settings", "lddoutflowdepth", "1E35"))

    initialscale = int(configget(config, "settings", "initialscale", "1"))
    csize = float(configget(config, "settings", "cellsize", "1"))

    snapgaugestoriver = bool(
        int(configget(config, "settings", "snapgaugestoriver", "1")))
    lddglobaloption = configget(config, "settings", "lddglobaloption",
                                "lddout")
    pcr.setglobaloption(lddglobaloption)
    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    # X/Y coordinates of the gauges the system
    exec("X=tr.array(" + gauges_x + ")")
    exec("Y=tr.array(" + gauges_y + ")")

    tr.Verbose = 1

    # make the directories to save results in
    mkoutputdirs(step1dir, step2dir)

    ldddem = readdem(initialscale, masterdem, step1dir)
    dem = ldddem

    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask...")
    else:
        print("clipping DEM with mask.....")
        mask = pcr.readmap(catchmask)
        ldddem = pcr.ifthen(pcr.boolean(mask), ldddem)
        dem = pcr.ifthen(pcr.boolean(mask), dem)

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        outletpointX = float(
            configget(config, "settings", "outflowpointX", "0.0"))
        outletpointY = float(
            configget(config, "settings", "outflowpointY", "0.0"))
    else:
        print("river file specified.....")
        try:
            outletpointX = float(
                configget(config, "settings", "outflowpointX", "0.0"))
            outletpointY = float(
                configget(config, "settings", "outflowpointY", "0.0"))
        except:
            print(
                "Need to specify the river outletpoint (a point at the end of the river within the current map)"
            )
            exit(1)

        outletpointmap = tr.points_to_map(dem, outletpointX, outletpointY, 0.5)
        pcr.report(outletpointmap, step1dir + "/outletpoint.map")
        rivshpattr = config.get("files", "riverattr")
        pcr.report(dem * 0.0, step1dir + "/nilmap.map")
        thestr = ("gdal_translate -of GTiff " + step1dir + "/nilmap.map " +
                  step1dir + "/riverburn.tif")
        os.system(thestr)
        os.system("gdal_rasterize -burn 1 -l " + rivshpattr + " " + rivshp +
                  " " + step1dir + "/riverburn.tif")
        thestr = ("gdal_translate -of PCRaster " + step1dir +
                  "/riverburn.tif " + step1dir + "/riverburn.map")
        os.system(thestr)
        riverburn = pcr.readmap(step1dir + "/riverburn.map")
        # Determine regional slope assuming that is the way the river should run
        pcr.setglobaloption("unitcell")
        demregional = pcr.windowaverage(dem, 100)
        ldddem = pcr.ifthenelse(riverburn >= 1.0, demregional - 1000, dem)

    pcr.setglobaloption("unittrue")
    upscalefactor = int(csize / pcr.celllength())

    print("Creating ldd...")
    ldd = tr.lddcreate_save(
        step1dir + "/ldd.map",
        ldddem,
        recreate,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    print("Determining streamorder...")
    stro = pcr.streamorder(ldd)
    pcr.report(stro, step1dir + "/streamorder.map")
    strdir = pcr.ifthen(stro >= strRiver, stro)
    pcr.report(strdir, step1dir + "/streamorderrive.map")
    pcr.report(pcr.boolean(pcr.ifthen(stro >= strRiver, stro)),
               step1dir + "/rivers.map")

    pcr.setglobaloption("unittrue")
    # outlet (and other gauges if given)
    # TODO: check is x/y set if not skip this
    print("Outlet...")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)

    if snapgaugestoriver:
        print("Snapping gauges to nearest river cells...")
        pcr.report(outlmap, step1dir + "/orggauges.map")
        outlmap = tr.snaptomap(outlmap, strdir)

    # noutletmap = tr.points_to_map(dem,XX,YY,0.5)
    # pcr.report(noutletmap,'noutlet.map')

    pcr.report(outlmap, step1dir + "/gauges.map")

    # check if there is a pre-define catchment map
    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask, finding outlet")
        # Find catchment (overall)
        outlet = tr.find_outlet(ldd)
        sub = pcr.subcatch(ldd, outlet)
        pcr.report(sub, step1dir + "/catchment_overall.map")
    else:
        print("reading and converting catchment mask.....")
        os.system("resample -r " + str(initialscale) + " " + catchmask + " " +
                  step1dir + "/catchment_overall.map")
        sub = pcr.readmap(step1dir + "/catchment_overall.map")

    print("Scatch...")
    sd = pcr.subcatch(ldd, pcr.ifthen(outlmap > 0, outlmap))
    pcr.report(sd, step1dir + "/scatch.map")

    pcr.setglobaloption("unitcell")
    print("Upscalefactor: " + str(upscalefactor))

    if upscalefactor > 1:
        gc.collect()
        print("upscale river length1 (checkerboard map)...")
        ck = tr.checkerboard(dem, upscalefactor)
        pcr.report(ck, step1dir + "/ck.map")
        pcr.report(dem, step1dir + "/demck.map")
        print("upscale river length2...")
        fact = tr.area_riverlength_factor(ldd, ck, upscalefactor)
        pcr.report(fact, step1dir + "/riverlength_fact.map")

        # print("make dem statistics...")
        dem_ = pcr.areaaverage(dem, ck)
        pcr.report(dem_, step1dir + "/demavg.map")

        print("Create DEM statistics...")
        dem_ = pcr.areaminimum(dem, ck)
        pcr.report(dem_, step1dir + "/demmin.map")
        dem_ = pcr.areamaximum(dem, ck)
        pcr.report(dem_, step1dir + "/demmax.map")
        # calculate percentiles
        order = pcr.areaorder(dem, ck)
        n = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), ck)
        #: calculate 25 percentile
        perc = tr.area_percentile(dem, ck, n, order, 25.0)
        pcr.report(perc, step1dir + "/dem25.map")
        perc = tr.area_percentile(dem, ck, n, order, 10.0)
        pcr.report(perc, step1dir + "/dem10.map")
        perc = tr.area_percentile(dem, ck, n, order, 50.0)
        pcr.report(perc, step1dir + "/dem50.map")
        perc = tr.area_percentile(dem, ck, n, order, 33.0)
        pcr.report(perc, step1dir + "/dem33.map")
        perc = tr.area_percentile(dem, ck, n, order, 66.0)
        pcr.report(perc, step1dir + "/dem66.map")
        perc = tr.area_percentile(dem, ck, n, order, 75.0)
        pcr.report(perc, step1dir + "/dem75.map")
        perc = tr.area_percentile(dem, ck, n, order, 90.0)
        pcr.report(perc, step1dir + "/dem90.map")
    else:
        print("No fancy scaling done. Going strait to step2....")
        pcr.report(dem, step1dir + "/demavg.map")
        Xul = float(config.get("settings", "Xul"))
        Yul = float(config.get("settings", "Yul"))
        Xlr = float(config.get("settings", "Xlr"))
        Ylr = float(config.get("settings", "Ylr"))
        gdalstr = ("gdal_translate  -projwin " + str(Xul) + " " + str(Yul) +
                   " " + str(Xlr) + " " + str(Ylr) + " -of PCRaster  ")
        # gdalstr = "gdal_translate  -a_ullr " + str(Xul) + " " + str(Yul) + " " +str(Xlr) + " " +str(Ylr) + " -of PCRaster  "
        print(gdalstr)
        pcr.report(pcr.cover(1.0), step1dir + "/wflow_riverlength_fact.map")
        # Now us gdat tp convert the maps
        os.system(gdalstr + step1dir + "/wflow_riverlength_fact.map" + " " +
                  step2dir + "/wflow_riverlength_fact.map")
        os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir +
                  "/wflow_dem.map")
        os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir +
                  "/wflow_demmin.map")
        os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir +
                  "/wflow_demmax.map")
        os.system(gdalstr + step1dir + "/gauges.map" + " " + step2dir +
                  "/wflow_gauges.map")
        os.system(gdalstr + step1dir + "/rivers.map" + " " + step2dir +
                  "/wflow_river.map")
        os.system(gdalstr + step1dir + "/streamorder.map" + " " + step2dir +
                  "/wflow_streamorder.map")
        os.system(gdalstr + step1dir + "/gauges.map" + " " + step2dir +
                  "/wflow_outlet.map")
        os.system(gdalstr + step1dir + "/scatch.map" + " " + step2dir +
                  "/wflow_catchment.map")
        os.system(gdalstr + step1dir + "/ldd.map" + " " + step2dir +
                  "/wflow_ldd.map")
        os.system(gdalstr + step1dir + "/scatch.map" + " " + step2dir +
                  "/wflow_subcatch.map")

        if lu_water:
            os.system(gdalstr + lu_water + " " + step2dir + "/WaterFrac.map")

        if lu_paved:
            os.system(gdalstr + lu_paved + " " + step2dir + "/PathFrac.map")

        try:
            lumap = config.get("files", "landuse")
        except:
            print("no landuse map...creating uniform map")
            # clone=pcr.readmap(step2dir + "/wflow_dem.map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_landuse.map")
        else:
            os.system("resample --clone " + step2dir + "/wflow_dem.map " +
                      lumap + " " + step2dir + "/wflow_landuse.map")

        try:
            soilmap = config.get("files", "soil")
        except:
            print("no soil map..., creating uniform map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_soil.map")
        else:
            os.system("resample --clone " + step2dir + "/wflow_dem.map " +
                      soilmap + " " + step2dir + "/wflow_soil.map")

    ##################################
    # Step 2 starts here
    ##################################

    pcr.setclone(step2dir + "/cutout.map")

    strRiver = int(configget(config, "settings", "riverorder_step2", "4"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35"))
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(
        configget(config, "settings", "lddoutflowdepth", "1E35"))
    lddmethod = configget(config, "settings", "lddmethod", "dem")
    lddglobaloption = configget(config, "settings", "lddglobaloption",
                                "lddout")
    pcr.setglobaloption(lddglobaloption)

    nrrow = round(abs(Yul - Ylr) / csize)
    nrcol = round(abs(Xlr - Xul) / csize)
    mapstr = ("mapattr -s -S -R " + str(nrrow) + " -C " + str(nrcol) + " -l " +
              str(csize) + " -x " + str(Xul) + " -y " + str(Yul) +
              " -P yb2t " + step2dir + "/cutout.map")

    os.system(mapstr)
    pcr.setclone(step2dir + "/cutout.map")

    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    if lu_water:
        os.system("resample --clone " + step2dir + "/cutout.map " + lu_water +
                  " " + step2dir + "/wflow_waterfrac.map")

    if lu_paved:
        os.system("resample --clone " + step2dir + "/cutout.map " + lu_paved +
                  " " + step2dir + "/PathFrac.map")

    #
    try:
        lumap = config.get("files", "landuse")
    except:
        print("no landuse map...creating uniform map")
        clone = pcr.readmap(step2dir + "/cutout.map")
        pcr.report(pcr.nominal(clone), step2dir + "/wflow_landuse.map")
    else:
        os.system("resample --clone " + step2dir + "/cutout.map " + lumap +
                  " " + step2dir + "/wflow_landuse.map")

    try:
        soilmap = config.get("files", "soil")
    except:
        print("no soil map..., creating uniform map")
        clone = pcr.readmap(step2dir + "/cutout.map")
        pcr.report(pcr.nominal(clone), step2dir + "/wflow_soil.map")
    else:
        os.system("resample --clone " + step2dir + "/cutout.map " + soilmap +
                  " " + step2dir + "/wflow_soil.map")

    resamplemaps(step1dir, step2dir)

    dem = pcr.readmap(step2dir + "/wflow_dem.map")
    demmin = pcr.readmap(step2dir + "/wflow_demmin.map")
    demmax = pcr.readmap(step2dir + "/wflow_demmax.map")
    catchcut = pcr.readmap(step2dir + "/catchment_cut.map")
    # now apply the area of interest (catchcut) to the DEM
    # dem=pcr.ifthen(catchcut >=1 , dem)
    #

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map")
    else:
        print("river file speficied.....")
        rivshpattr = config.get("files", "riverattr")
        pcr.report(dem * 0.0, step2dir + "/nilmap.map")
        thestr = ("gdal_translate -of GTiff " + step2dir + "/nilmap.map " +
                  step2dir + "/wflow_riverburnin.tif")
        os.system(thestr)
        os.system("gdal_rasterize -burn 1 -l " + rivshpattr + " " + rivshp +
                  " " + step2dir + "/wflow_riverburnin.tif")
        thestr = ("gdal_translate -of PCRaster " + step2dir +
                  "/wflow_riverburnin.tif " + step2dir +
                  "/wflow_riverburnin.map")
        os.system(thestr)
        riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map")
        # ldddem = pcr.ifthenelse(riverburn >= 1.0, dem -1000 , dem)

    # Only burn within the original catchment
    riverburn = pcr.ifthen(pcr.scalar(catchcut) >= 1, riverburn)
    # Now setup a very high wall around the catchment that is scale
    # based on the distance to the catchment so that it slopes away from the
    # catchment
    if lddmethod != "river":
        print("Burning in highres-river ...")
        disttocatch = pcr.spread(pcr.nominal(catchcut), 0.0, 1.0)
        demmax = pcr.ifthenelse(
            pcr.scalar(catchcut) >= 1.0,
            demmax,
            demmax + (pcr.celllength() * 100.0) / disttocatch,
        )
        pcr.setglobaloption("unitcell")
        demregional = pcr.windowaverage(demmin, 100)
        demburn = pcr.cover(
            pcr.ifthen(pcr.boolean(riverburn), demregional - 100.0), demmax)
    else:
        print("using average dem..")
        demburn = dem

    ldd = tr.lddcreate_save(
        step2dir + "/ldd.map",
        demburn,
        True,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    # Find catchment (overall)
    outlet = tr.find_outlet(ldd)
    sub = pcr.subcatch(ldd, outlet)
    pcr.report(sub, step2dir + "/wflow_catchment.map")
    pcr.report(outlet, step2dir + "/wflow_outlet.map")

    # make river map
    strorder = pcr.streamorder(ldd)
    pcr.report(strorder, step2dir + "/wflow_streamorder.map")

    river = pcr.ifthen(pcr.boolean(strorder >= strRiver), strorder)
    pcr.report(river, step2dir + "/wflow_river.map")

    # make subcatchments
    # os.system("col2map --clone " + step2dir + "/cutout.map gauges.col " + step2dir + "/wflow_gauges.map")
    exec("X=tr.array(" + gauges_x + ")")
    exec("Y=tr.array(" + gauges_y + ")")

    pcr.setglobaloption("unittrue")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)
    pcr.report(outlmap, step2dir + "/wflow_gauges_.map")

    if snapgaugestoriver:
        print("Snapping gauges to river")
        pcr.report(outlmap, step2dir + "/wflow_orggauges.map")
        outlmap = tr.snaptomap(outlmap, river)

    outlmap = pcr.ifthen(outlmap > 0, outlmap)
    pcr.report(outlmap, step2dir + "/wflow_gauges.map")

    scatch = pcr.subcatch(ldd, outlmap)
    pcr.report(scatch, step2dir + "/wflow_subcatch.map")
Exemple #35
0
def simplereservoir(
    storage,
    inflow,
    ResArea,
    maxstorage,
    target_perc_full,
    maximum_Q,
    demand,
    minimum_full_perc,
    ReserVoirLocs,
    precip,
    pet,
    ReservoirSimpleAreas,
    timestepsecs=86400,
):
    """

    :param storage: initial storage m^3
    :param inflow: inflow m^3/s
    :param maxstorage: maximum storage (above which water is spilled) m^3
    :param target_perc_full: target fraction full (of max storage) -
    :param maximum_Q: maximum Q to release m^3/s if below spillway
    :param demand: water demand (all combined) m^3/s
    :param minimum_full_perc: target minimum full fraction (of max storage) -
    :param ReserVoirLocs: map with reservoir locations
    :param timestepsecs: timestep of the model in seconds (default = 86400)
    :return: storage (m^3), outflow (m^3/s), PercentageFull (0-1), Release (m^3/sec)
    """

    inflow = pcr.ifthen(pcr.boolean(ReserVoirLocs), inflow)

    prec_av = pcr.cover(
        pcr.ifthen(
            pcr.boolean(ReserVoirLocs), pcr.areaaverage(precip, ReservoirSimpleAreas)
        ),
        pcr.scalar(0.0),
    )
    pet_av = pcr.cover(
        pcr.ifthen(
            pcr.boolean(ReserVoirLocs), pcr.areaaverage(pet, ReservoirSimpleAreas)
        ),
        pcr.scalar(0.0),
    )

    oldstorage = storage
    storage = (
        storage
        + (inflow * timestepsecs)
        + (prec_av / 1000.0) * ResArea
        - (pet_av / 1000.0) * ResArea
    )

    percfull = ((storage + oldstorage) * 0.5) / maxstorage
    # first determine minimum (environmental) flow using a simple sigmoid curve to scale for target level
    fac = sCurve(percfull, a=minimum_full_perc, c=30.0)
    demandRelease = pcr.min(fac * demand * timestepsecs, storage)
    storage = storage - demandRelease

    # Re-determine percfull
    percfull = ((storage + oldstorage) * 0.5) / maxstorage

    wantrel = pcr.max(0.0, storage - (maxstorage * target_perc_full))
    # Assume extra maximum Q if spilling
    overflowQ = (percfull - 1.0) * (storage - maxstorage)
    torelease = pcr.min(wantrel, overflowQ + maximum_Q * timestepsecs)
    storage = storage - torelease
    outflow = (torelease + demandRelease) / timestepsecs
    percfull = storage / maxstorage

    return storage, outflow, percfull, prec_av, pet_av, demandRelease / timestepsecs