コード例 #1
0
 def spatial(self):
     """Computes requruired biosafe output for a spatial domain"""
     
     #-determine a representative points for each floodplain section        
     points = pcrr.representativePoint(self.sections)
     clone = pcr.defined(self.sections)
     pcr.setglobaloption('unittrue')
     xcoor = pcr.xcoordinate(clone)
     ycoor = pcr.ycoordinate(clone)
     geoDf = pcrr.getCellValues(points, \
                             mapList = [points, xcoor, ycoor],\
                             columns = ['ID', 'xcoor', 'ycoor'])        
     geoDf.set_index('ID', inplace=True, drop=False)
     geoDf.drop(['rowIdx', 'colIdx', 'ID'], axis=1, inplace=True)
     
     #-compupte the required biosafe parameters for all sections
     sectionIDs = np.unique(pcr.pcr2numpy(self.sections,-9999))[1:]
     ll = []
     for sectionID in sectionIDs:
         ll.append(self.sectionScores(sectionID))
     paramLL = zip(*ll)
             
     dfParamLL = []
     for ii in range(len(self.params)):
         bsScores = pd.concat(paramLL[ii], axis=1).T
         bsScores = bsScores.join(geoDf)
         bsScores.index.name = 'ID'
         bsScores.columns.name = self.params[ii]
         dfParamLL.append(bsScores)
     
     return dfParamLL
コード例 #2
0
    def initial(self):

        """
    *Required*
    
    Initial part of the model, executed only once. It reads all static model
    information (parameters) and sets-up the variables used in modelling.
    
    This function is required. The contents is free. However, in order to
    easily connect to other models it is advised to adhere to the directory
    structure used in the other models.
    
    """
        #: pcraster option to calculate with units or cells. Not really an issue
        #: in this model but always good to keep in mind.
        pcr.setglobaloption("unittrue")

        self.timestepsecs = int(
            configget(self.config, "model", "timestepsecs", "86400")
        )
        self.basetimestep = 86400
        # Reads all parameter from disk
        self.wf_updateparameters()
        self.wf_multparameters()    # needed so parameters can be altered in the inifile
        self.logger.info("Starting Dynamic run...")
コード例 #3
0
def lattometres(lat):
    """"
    Determines the length of one degree lat/long at a given latitude (in meter).
    Code taken from http:www.nga.mil/MSISiteContent/StaticFiles/Calculators/degree.html
    Input: map with lattitude values for each cell
    Returns: length of a cell lat, length of a cell long
    """
    # radlat = pcr.spatial(lat * ((2.0 * math.pi)/360.0))
    # radlat = lat * (2.0 * math.pi)/360.0
    pcr.setglobaloption("degrees")
    radlat = pcr.spatial(lat)  # pcraster cos/sin work in degrees!

    m1 = 111132.92  # latitude calculation term 1
    m2 = -559.82  # latitude calculation term 2
    m3 = 1.175  # latitude calculation term 3
    m4 = -0.0023  # latitude calculation term 4
    p1 = 111412.84  # longitude calculation term 1
    p2 = -93.5  # longitude calculation term 2
    p3 = 0.118  # longitude calculation term 3
    # # Calculate the length of a degree of latitude and longitude in meters

    latlen = (m1 + (m2 * pcr.cos(2.0 * radlat)) +
              (m3 * pcr.cos(4.0 * radlat)) + (m4 * pcr.cos(6.0 * radlat)))
    longlen = ((p1 * pcr.cos(radlat)) + (p2 * pcr.cos(3.0 * radlat)) +
               (p3 * pcr.cos(5.0 * radlat)))

    return latlen, longlen
コード例 #4
0
ファイル: pcrut.py プロジェクト: openstreams/wflow
def lattometres(lat):
    """"
    Determines the length of one degree lat/long at a given latitude (in meter).
    Code taken from http:www.nga.mil/MSISiteContent/StaticFiles/Calculators/degree.html
    Input: map with lattitude values for each cell
    Returns: length of a cell lat, length of a cell long
    """
    # radlat = pcr.spatial(lat * ((2.0 * math.pi)/360.0))
    # radlat = lat * (2.0 * math.pi)/360.0
    pcr.setglobaloption("degrees")
    radlat = pcr.spatial(lat)  # pcraster cos/sin work in degrees!

    m1 = 111132.92  # latitude calculation term 1
    m2 = -559.82  # latitude calculation term 2
    m3 = 1.175  # latitude calculation term 3
    m4 = -0.0023  # latitude calculation term 4
    p1 = 111412.84  # longitude calculation term 1
    p2 = -93.5  # longitude calculation term 2
    p3 = 0.118  # longitude calculation term 3
    # # Calculate the length of a degree of latitude and longitude in meters

    latlen = (
        m1
        + (m2 * pcr.cos(2.0 * radlat))
        + (m3 * pcr.cos(4.0 * radlat))
        + (m4 * pcr.cos(6.0 * radlat))
    )
    longlen = (
        (p1 * pcr.cos(radlat))
        + (p2 * pcr.cos(3.0 * radlat))
        + (p3 * pcr.cos(5.0 * radlat))
    )

    return latlen, longlen
コード例 #5
0
def getCellValues(pointMap, mapList=[], columns=[]):
    """ Get the cell values of the maps in mapList at the locations of pointMap
    """
    #-determine where the indices are True
    arr = pcr.pcr2numpy(pcr.boolean(pointMap), 0).astype('bool')
    indices = np.where(arr == True)

    #-loop over the points in pointMap
    pcr.setglobaloption('unitcell')
    ll = []
    for rowIdx, colIdx in zip(indices[0], indices[1]):
        line = []
        line.append(rowIdx)
        line.append(colIdx)
        for pcrMap in mapList:
            line.append(
                pcr.cellvalue(pcrMap, np.int(rowIdx + 1),
                              np.int(colIdx + 1))[0])
        ll.append(line)

    #-optionally add column names
    if len(columns) == len(mapList):
        columnNames = ['rowIdx', 'colIdx'] + columns
    else:
        columnNames = ['rowIdx', 'colIdx'] + \
                    ['map' + str(ii) for ii in range(1, 1 + len(mapList), 1)]

    #-return as Pandas DataFrame
    return pd.DataFrame(np.array(ll), columns=columnNames)
コード例 #6
0
ファイル: wflow_lib.py プロジェクト: ninjach/wflow
def getgridparams():
    """ return grid parameters in a python friendly way

    Output:
        [ Xul, Yul, xsize, ysize, rows, cols]

        - xul - x upper left centre
        - yul - y upper left centre
        - xsize - size of a cell in x direction
        - ysize - size of a cell in y direction
        - cols - number of columns
        - rows - number of rows
        - xlr -  x lower right centre
        - ylr -  y lower right centre
    """
    # This is the default, but add for safety...
    pcr.setglobaloption("coorcentre")
    # x and Y are the same for now
    xy = pcr.pcr2numpy(pcr.celllength(), np.nan)[0, 0]
    xu = pcr.pcr2numpy(pcr.xcoordinate(1), np.nan)[0, 0]
    yu = pcr.pcr2numpy(pcr.ycoordinate(1), np.nan)[0, 0]
    ylr = pcr.pcr2numpy(pcr.ycoordinate(1), np.nan)[getrows() - 1, getcols() - 1]
    xlr = pcr.pcr2numpy(pcr.xcoordinate(1), np.nan)[getrows() - 1, getcols() - 1]

    return [xu, yu, xy, xy, getrows(), getcols(), xlr, ylr]
コード例 #7
0
def zonalSumArea(nominalMap, areaClass):
    """Memory efficient method to sum up the surface area of the different 
        classes in the nominal map. Separate by the regions in areaClass
        
        input:
            nominalMap: nominal map, e.g. ecotope map
            areaClass: regions to compute surface areas over
    """ 
    #-create a pointMap of the output locations, one for each areaClass
    outputPointMap = pcrr.pointPerClass(areaClass)
    
    #-iniate output DataFrame
    dfInit = pcrr.getCellValues(outputPointMap, mapList = [areaClass], columns = ['areaClass'])
 
    #-loop over the classes in nominalMap and compute the summed area per areaClass
    IDs = np.unique(pcr.pcr2numpy(nominalMap, -9999))[1:]
    dfList = []
    for ID in IDs[:]:
        pcrID = pcr.nominal(ID)
        pcr.setglobaloption('unittrue')
        IDArea = pcr.ifthen(nominalMap == pcrID, pcr.cellarea())
        sectionSum = pcr.areatotal(IDArea, areaClass)
        df = pcrr.getCellValues(outputPointMap, [sectionSum], [ID])
            # df columns = rowIdx, colIdx, ID
        df = df.drop(['rowIdx', 'colIdx'],  axis=1)
        dfList.append(df)
        
    dfOut = dfInit.join(dfList)
    #return dfInit, df, dfOut, dfList
    return dfOut
コード例 #8
0
ファイル: wflow_lib.py プロジェクト: openstreams/wflow
def getgridparams():
    """ return grid parameters in a python friendly way

    Output:
        [ Xul, Yul, xsize, ysize, rows, cols]

        - xul - x upper left centre
        - yul - y upper left centre
        - xsize - size of a cell in x direction
        - ysize - size of a cell in y direction
        - cols - number of columns
        - rows - number of rows
        - xlr -  x lower right centre
        - ylr -  y lower right centre
    """
    # This is the default, but add for safety...
    pcr.setglobaloption("coorcentre")
    # x and Y are the same for now
    xy = pcr.pcr2numpy(pcr.celllength(), np.nan)[0, 0]
    xu = pcr.pcr2numpy(pcr.xcoordinate(1), np.nan)[0, 0]
    yu = pcr.pcr2numpy(pcr.ycoordinate(1), np.nan)[0, 0]
    ylr = pcr.pcr2numpy(pcr.ycoordinate(1), np.nan)[getrows() - 1, getcols() - 1]
    xlr = pcr.pcr2numpy(pcr.xcoordinate(1), np.nan)[getrows() - 1, getcols() - 1]

    return [xu, yu, xy, xy, getrows(), getcols(), xlr, ylr]
コード例 #9
0
def volume_spread(ldd,
                  hand,
                  subcatch,
                  volume,
                  volume_thres=0.,
                  area_multiplier=1.,
                  iterations=15):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unittrue")
    dem_min = pcr.areaminimum(hand,
                              subcatch)  # minimum elevation in subcatchments
    # pcr.report(dem_min, 'dem_min.map')
    dem_norm = hand - dem_min
    # pcr.report(dem_norm, 'dem_norm.map')
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch) * area_multiplier
    pcr.report(surface, 'surface.map')

    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    # pcr.report(volume_catch, 'volume_catch.map')

    depth_catch = volume_catch / surface
    pcr.report(depth_catch, 'depth_catch.map')

    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        print('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max) / 2
        # pcr.report(dem_av, 'dem_av00.{:03d}'.format(n + 1))
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0),
                                              subcatch)
        # pcr.report(average_depth_catch, 'depth_c0.{:03d}'.format(n + 1))
        error = pcr.cover((depth_catch - average_depth_catch) / depth_catch,
                          depth_catch * 0)
        # pcr.report(error, 'error000.{:03d}'.format(n + 1))
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    # error_abs = np.abs(error)  # TODO: not needed probably, remove
    inundation = pcr.max(dem_av - dem_norm, 0)
    return inundation
コード例 #10
0
ファイル: wflow_lintul.py プロジェクト: teije01/wflow
    def initial(self):
        """
        *Required*

        Initial part of the model, executed only once. It reads all static model
        information (parameters) and sets-up the variables used in modelling.

        This function is required. The contents is free. However, in order to
        easily connect to other models it is advised to adhere to the directory
        structure used in the other models.

        """
        #: pcraster option to calculate with units or cells. Not really an issue
        #: in this model but always good to keep in mind.
        pcr.setglobaloption("unittrue")

        self.timestepsecs = int(
            configget(self.config, "model", "timestepsecs", "86400"))
        self.basetimestep = 86400

        # Reads all parameter from disk
        self.wf_updateparameters()
        self.logger.info("Starting LINTUL Dynamic Crop Growth Simulation...")

        # Read a static map of the rice area. To be replaced with real-time radar images of rice area in the future? Todo
        # Simulation is mostly restricted to the rice area (to be checked), which saves calculation time. Todo
        wflow_ricemask = configget(self.config, "model", "wflow_ricemask",
                                   "staticmaps/wflow_ricemask.map")
        self.ricemask = self.wf_readmap(os.path.join(self.Dir, wflow_ricemask),
                                        0.0,
                                        fail=True)
        # Create a PCRaster boolean map too:
        self.ricemask_BOOL = pcr.boolean(self.ricemask)
        self.Pausedays = self.Pause + 1

        # Calculate initial development stage (at the time of transplanting)
        self.DVSI = self.TSUMI / self.TSUMAN

        # Turn all interpolation tables (model parameters) into instances of the Interpol_Obj class
        self.RDRTB = Interpol_Obj(self.RDRTB)
        self.PHOTTB = Interpol_Obj(self.PHOTTB)
        self.SLACF = Interpol_Obj(self.SLACF)
        self.FRTTB = Interpol_Obj(self.FRTTB)
        self.FLVTB = Interpol_Obj(self.FLVTB)
        self.FSTTB = Interpol_Obj(self.FSTTB)
        self.FSOTB = Interpol_Obj(self.FSOTB)

        # Calculate the initial leaf area correction function as a function of development stage, DVS.
        SLACFI = self.SLACF.lookup_linear(self.DVSI)
        # Multiply with specific leaf area constant => initial specific leaf area
        ISLA = self.SLAC * SLACFI
        # Multiply with weight of green leaves to obtain initial LAI
        self.LAII = self.WLVGI * ISLA
        # Calculate total temperature sum from transplanting to crop maturity:
        self.TTSUM = self.TSUMAN + self.TSUMMT
コード例 #11
0
 def testDirectionalArray2Raster(self):
   pcraster.setclone("boolean_Expr.map")
   pcraster.setglobaloption("degrees")
   try:
     a = numpy.array([ [math.radians(350),math.radians(0),math.radians(0.01)],\
        [20,math.radians(350),math.radians(21)],\
        [math.radians(359),math.radians(40),math.radians(0)] ])
     result = pcraster.numpy2pcr(pcraster.Directional, a, 20)
     self.failUnless(self.mapEqualsValidated(result, "directional_Result2.map"), "test1: %s" % ("Result and validated result are not the same"))
   except Exception as exception:
     self.failUnless(False, "test1: %s" % (str(exception)))
コード例 #12
0
def representativePoint(nominalMap):
    """Select a representative point for a nominal map
    """
    pcr.setglobaloption('unitcell')
    filled = pcr.cover(nominalMap, 0)
    edges = pcr.windowdiversity(filled, 3) > 1
    edges = pcr.ifthen(pcr.defined(nominalMap), edges)
    edges = map_edges(nominalMap) | edges
    dist = pcr.spread(edges, 0, 1)
    dist = dist + pcr.uniform(pcr.defined(nominalMap))
    points = dist == pcr.areamaximum(dist, nominalMap)
    return pcr.ifthen(points, nominalMap)
コード例 #13
0
def volume_spread(ldd,
                  hand,
                  subcatch,
                  volume,
                  volume_thres=0.,
                  cell_surface=1.,
                  iterations=15,
                  logging=logging,
                  order=0):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unitcell")
    dem_min = pcr.areaminimum(hand,
                              subcatch)  # minimum elevation in subcatchments
    dem_norm = hand - dem_min
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch) * pcr.areaaverage(
        cell_surface, subcatch)  # area_multiplier
    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    depth_catch = volume_catch / surface  # meters water disc averaged over subcatchment
    # ilt(depth_catch, 'depth_catch_{:02d}.map'.format(order))
    # pcr.report(volume, 'volume_{:02d}.map'.format(order))
    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        logging.debug('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max) / 2
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0),
                                              subcatch)
        error = pcr.cover((depth_catch - average_depth_catch) / depth_catch,
                          depth_catch * 0)
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    inundation = pcr.max(dem_av - dem_norm, 0)
    pcr.setglobaloption('unittrue')
    return inundation
コード例 #14
0
def map_edges(clone):
    """Boolean map true map edges, false elsewhere"""

    pcr.setglobaloption('unittrue')
    xmin, xmax, ymin, ymax, nr_rows, nr_cols, cell_size = clone_attributes()
    clone = pcr.ifthenelse(pcr.defined(clone), pcr.boolean(1), pcr.boolean(1))
    x_coor = pcr.xcoordinate(clone)
    y_coor = pcr.ycoordinate(clone)
    north = y_coor > (ymax - cell_size)
    south = y_coor < (ymin + cell_size)
    west = x_coor < (xmin + cell_size)
    east = x_coor > (xmax - cell_size)
    edges = north | south | west | east
    return edges
コード例 #15
0
ファイル: wflow_flood_lib.py プロジェクト: Imme1992/wflow
def volume_spread(ldd, hand, subcatch, volume, volume_thres=0., area_multiplier=1., iterations=15):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unittrue")
    dem_min = pcr.areaminimum(hand, subcatch)  # minimum elevation in subcatchments
    # pcr.report(dem_min, 'dem_min.map')
    dem_norm = hand - dem_min
    # pcr.report(dem_norm, 'dem_norm.map')
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch)*area_multiplier
    pcr.report(surface, 'surface.map')

    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    # pcr.report(volume_catch, 'volume_catch.map')

    depth_catch = volume_catch/surface
    pcr.report(depth_catch, 'depth_catch.map')

    dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0))  # bizarre high inundation depth
    dem_min = pcr.scalar(0.)
    for n in range(iterations):
        print('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max)/2
        # pcr.report(dem_av, 'dem_av00.{:03d}'.format(n + 1))
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0), subcatch)
        # pcr.report(average_depth_catch, 'depth_c0.{:03d}'.format(n + 1))
        error = pcr.cover((depth_catch-average_depth_catch)/depth_catch, depth_catch*0)
        # pcr.report(error, 'error000.{:03d}'.format(n + 1))
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    # error_abs = np.abs(error)  # TODO: not needed probably, remove
    inundation = pcr.max(dem_av - dem_norm, 0)
    return inundation
コード例 #16
0
def pcr2col(listOfMaps, MV, selection='ONE_TRUE'):
    """converts a set of maps to a column array: X, Y, map values
       selection can be set to ALL, ALL_TRUE, ONE_TRUE"""

    #-intersect all maps and get X and Y coordinates
    intersection = pcr.boolean(pcr.cover(listOfMaps[0], 0))
    for mapX in listOfMaps[1:]:
        intersection = intersection | pcr.boolean(pcr.cover(mapX, 0))
    pcr.setglobaloption("unittrue")
    xCoor = pcr.ifthen(intersection, pcr.xcoordinate(intersection))
    yCoor = pcr.ifthen(intersection, pcr.ycoordinate(intersection))
    pcr.setglobaloption("unitcell")

    #-initiate outArray with xCoor and yCoor
    xCoorArr = pcr.pcr2numpy(xCoor, MV)
    yCoorArr = pcr.pcr2numpy(yCoor, MV)
    nRows, nCols = xCoorArr.shape
    nrCells = nRows * nCols
    outArray = np.hstack((xCoorArr.reshape(nrCells,
                                           1), yCoorArr.reshape(nrCells, 1)))

    #-add subsequent maps
    for mapX in listOfMaps:
        arr = pcr.pcr2numpy(mapX, MV).reshape(nrCells, 1)
        outArray = np.hstack((outArray, arr))

    #-subset output based on selection criterium
    ll = []
    nrMaps = len(listOfMaps)
    if selection == 'ONE_TRUE':
        for line in outArray:
            nrMV = len(line[line == MV])
            if nrMV < nrMaps:
                ll.append(line)
            else:
                pass
        outArray = np.array(ll)
    elif selection == 'ALL_TRUE':
        for line in outArray:
            if MV not in line:
                ll.append(line)
            else:
                pass
        outArray = np.array(ll)
    elif selection == 'ALL':
        pass
    return outArray
コード例 #17
0
ファイル: wflow_flood_lib.py プロジェクト: edwinkost/wflow
def volume_spread(ldd, hand, subcatch, volume, volume_thres=0., cell_surface=1., iterations=15, logging=logging, order=0, neg_HAND=None):
    """
    Estimate 2D flooding from a 1D simulation per subcatchment reach
    Input:
        ldd -- pcraster object direction, local drain directions
        hand -- pcraster object float32, elevation data normalised to nearest drain
        subcatch -- pcraster object ordinal, subcatchments with IDs
        volume -- pcraster object float32, scalar flood volume (i.e. m3 volume outside the river bank within subcatchment)
        volume_thres=0. -- scalar threshold, at least this amount of m3 of volume should be present in a catchment
        area_multiplier=1. -- in case the maps are not in m2, set a multiplier other than 1. to convert
        iterations=15 -- number of iterations to use
        neg_HAND -- if set to 1, HAND maps can have negative values when elevation outside of stream is lower than
        stream (for example when there are natural embankments)
    Output:
        inundation -- pcraster object float32, scalar inundation estimate
    """
    #initial values
    pcr.setglobaloption("unitcell")
    dem_min = pcr.areaminimum(hand, subcatch)  # minimum elevation in subcatchments
    dem_norm = hand - dem_min
    # surface of each subcatchment
    surface = pcr.areaarea(subcatch)*pcr.areaaverage(cell_surface, subcatch) # area_multiplier
    error_abs = pcr.scalar(1e10)  # initial error (very high)
    volume_catch = pcr.areatotal(volume, subcatch)
    depth_catch = volume_catch/surface  # meters water disc averaged over subcatchment
    # ilt(depth_catch, 'depth_catch_{:02d}.map'.format(order))
    # pcr.report(volume, 'volume_{:02d}.map'.format(order))
    if neg_HAND == 1:
        dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(-32.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(-32.)
    else:
        dem_max = pcr.ifthenelse(volume_catch > volume_thres, pcr.scalar(32.),
                             pcr.scalar(0.))  # bizarre high inundation depth☻
        dem_min = pcr.scalar(0.)
    for n in range(iterations):
        logging.debug('Iteration: {:02d}'.format(n + 1))
        #####while np.logical_and(error_abs > error_thres, dem_min < dem_max):
        dem_av = (dem_min + dem_max)/2
        # compute value at dem_av
        average_depth_catch = pcr.areaaverage(pcr.max(dem_av - dem_norm, 0), subcatch)
        error = pcr.cover((depth_catch-average_depth_catch)/depth_catch, depth_catch*0)
        dem_min = pcr.ifthenelse(error > 0, dem_av, dem_min)
        dem_max = pcr.ifthenelse(error <= 0, dem_av, dem_max)
    inundation = pcr.max(dem_av - dem_norm, 0)
    pcr.setglobaloption('unittrue')
    return inundation
コード例 #18
0
ファイル: test.py プロジェクト: gaoshuai/pcraster
  def test_5(self):
      """ test windowaverage and kernel size larger than raster """
      filename = "windowaverage_Expr.map"
      pcraster.setclone(filename)
      raster = pcraster.readmap(filename)

      result1 = windowaverage(raster, 18)
      result2 = mapmaximum(result1)
      value, isValid = pcraster.cellvalue(result2, 1)
      self.assertEqual(isValid, True)
      self.assertAlmostEqual(value, 1.708333, places=6)

      pcraster.setglobaloption("unitcell")
      result1 = windowaverage(raster, 9)
      result2 = mapmaximum(result1)
      value, isValid = pcraster.cellvalue(result2, 1)
      self.assertEqual(isValid, True)
      self.assertAlmostEqual(value,  1.708333, places=6)
コード例 #19
0
ファイル: test.py プロジェクト: timebridge/pcraster
    def test_5(self):
        """ test windowaverage and kernel size larger than raster """
        filename = "windowaverage_Expr.map"
        pcraster.setclone(filename)
        raster = pcraster.readmap(filename)

        result1 = windowaverage(raster, 18)
        result2 = mapmaximum(result1)
        value, isValid = pcraster.cellvalue(result2, 1)
        self.assertEqual(isValid, True)
        self.assertAlmostEqual(value, 1.708333, places=6)

        pcraster.setglobaloption("unitcell")
        result1 = windowaverage(raster, 9)
        result2 = mapmaximum(result1)
        value, isValid = pcraster.cellvalue(result2, 1)
        self.assertEqual(isValid, True)
        self.assertAlmostEqual(value, 1.708333, places=6)
コード例 #20
0
def pcr_preprocess(dem_in, x, y, itile, tempdir, ldd_in=None,
                    test=False, create_ldd=True):
    """
    function to set pcr clone and translate DEM (terrain) and ldd numpy 2d arrays to pcr maps

    :param terrain:     masked numpy 2d array with elevation data
    :param x:           numpy 1d array with x coordinates of elevation grid
    :param y:           numpy 1d array with Y coordinates of elevation grid
    :param tempdir:     string with directory to temporary save clone pcrmap
    :param ldd:         numpy 2d array with ldd grid, make sure it uses the pcrmap definition of ldd
    :param test:        if True do not remove clone maps

    :return:            pcr maps for dem and ldd
    """
    # create clone in temp_dir
    fn_clone = os.path.join(tempdir, '_{:03d}_dem.map'.format(itile))
    cl.makeDir(tempdir)  # make dir if not exist
    # DEM
    gdal_writemap(fn_clone, 'PCRaster', x, y, dem_in, -9999)  # note: missing value needs conversion in python item
    pcr.setclone(fn_clone)
    pcr.setglobaloption("unitcell")
    dem = pcr.readmap(fn_clone)
    # cleanup
    if not test:
        os.unlink(fn_clone)  # cleanup clone file
    os.unlink(fn_clone+'.aux.xml')
    # LDD
    if create_ldd:
        if ldd_in is None:
            print('Calculating LDD')
            ldd = pcr.lddcreate(dem, 1E31, 1E31, 1E31, 1E31)
        else:
            # TODO note that np.nan is default NoDataValue for ldd file. check this when reading data
            # TODO: x and y axis got mixed up when translating with numpy2pcr. check if it works here!
            # in process_tile function in coastal_inun.py
            ldd = pcr.lddrepair(pcr.ldd(pcr.numpy2pcr(pcr.Ldd, ldd_in, np.nan)))
    else:
        ldd = None
    return dem, ldd
コード例 #21
0
def pcr_coast(dem, points):
    """  project points to coast with nearest neighbourhood
    finds coastal cells based on dem with NoDataValues and the locations of boundary conditions at sea
    using pcr spread the a nearest neighbor interpolation of the point ids is done for coastal cells

    :param dem: pcr dem
    :param points: pcrmap with ids in cells

    :returns: pcr map with location ids projected to coastline
    """
    # clump areas based on NoDataValues in dem
    dem_NoDataValues = pcr.cover(pcr.ifthenelse(dem > -9999, pcr.boolean(0), pcr.boolean(1)), pcr.boolean(1))
    # find number of boundary conditions in area where dem_novalue
    pcr.setglobaloption("nondiagonal")  # only top, bottom, left, right
    area_nbounds = pcr.areatotal(pcr.scalar(points), pcr.clump(dem_NoDataValues)) * pcr.scalar(dem_NoDataValues)
    pcr.setglobaloption("diagonal")  # diagonal again
    # make sea (True) and land (False) mask
    if np.any(pcr.pcr2numpy(area_nbounds,-9999) > 0):
        sea = pcr.ifthenelse(area_nbounds > 0, pcr.boolean(1), pcr.boolean(0))
    else:
        sea = dem_NoDataValues
    # find coast based on sea in neighboring cells and at land (sea = 0)
    coast = pcr.ifthenelse((pcr.window4total(pcr.scalar(sea)) > pcr.scalar(0)) & (sea == pcr.boolean(0)),
                           pcr.boolean(1), pcr.boolean(0))

    # move points to nearest sea cell(s)
    point_dist = pcr.ifthenelse(sea, pcr.spread(points, 0, 1), 1E31)  # distance from each point for sea cells
    nnpoints = pcr.ifthenelse(sea, pcr.spreadzone(points, 0, 1), 0)  # closest point for sea cells
    dist2sea = pcr.areaminimum(point_dist, nnpoints)  # shortest distance to each point to sea
    points_in_sea = pcr.nominal(pcr.ifthenelse(dist2sea == point_dist, nnpoints, 0))  # map points to nearest sea cell

    # map point at sea to coastline according to shortest distance over sea
    res = pcr.ifthenelse((pcr.scalar(sea) + pcr.scalar(coast)) >= 1, pcr.scalar(1), 1E31)  # mask out non sea or coast cells
    ids_coastline = pcr.scalar(pcr.spreadzone(points_in_sea, 0, res)) * pcr.scalar(coast)

    return ids_coastline, points_in_sea
コード例 #22
0
def main():
    """

    """
    workdir = "."
    inifile = "wflow_prepare.ini"

    try:
        opts, args = getopt.getopt(sys.argv[1:], "W:hI:f", ['version'])
    except getopt.error as msg:
        usage(msg)

    for o, a in opts:
        if o == "-W":
            workdir = a
        if o == "-I":
            inifile = a
        if o == "-h":
            usage()
        if o == "-f":
            recreate = True
        if o == "--version":
            import wflow
            print("wflow version: ", wflow.__version__)
            sys.exit(0)

    os.chdir(workdir)

    config = OpenConf(workdir + "/" + inifile)

    step1dir = configget(config, "directories", "step1dir", "step1")
    step2dir = configget(config, "directories", "step2dir", "step2")
    snapgaugestoriver = bool(
        int(configget(config, "settings", "snapgaugestoriver", "1"))
    )

    # make the directories to save results in
    if not os.path.isdir(step1dir + "/"):
        os.makedirs(step1dir)
    if not os.path.isdir(step2dir):
        os.makedirs(step2dir)

    ##first make the clone map
    try:
        Xul = float(config.get("settings", "Xul"))
        Yul = float(config.get("settings", "Yul"))
        Xlr = float(config.get("settings", "Xlr"))
        Ylr = float(config.get("settings", "Ylr"))
    except:
        print("Xul, Xul, Xlr and  Ylr are required entries in the ini file")
        sys.exit(1)

    csize = float(configget(config, "settings", "cellsize", "1"))
    try:
        gauges_x = config.get("settings", "gauges_x")
        gauges_y = config.get("settings", "gauges_y")
    except:
        print("gauges_x and  gauges_y are required entries in the ini file")
        sys.exit(1)

    strRiver = int(configget(config, "settings", "riverorder_step2", "4"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35")
    )
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(configget(config, "settings", "lddoutflowdepth", "1E35"))
    lddmethod = configget(config, "settings", "lddmethod", "dem")
    lddglobaloption = configget(config, "settings", "lddglobaloption", "lddout")
    pcr.setglobaloption(lddglobaloption)

    nrrow = round(abs(Yul - Ylr) / csize)
    nrcol = round(abs(Xlr - Xul) / csize)
    mapstr = (
        "mapattr -s -S -R "
        + str(nrrow)
        + " -C "
        + str(nrcol)
        + " -l "
        + str(csize)
        + " -x "
        + str(Xul)
        + " -y "
        + str(Yul)
        + " -P yb2t "
        + step2dir
        + "/cutout.map"
    )

    os.system(mapstr)
    pcr.setclone(step2dir + "/cutout.map")

    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    if lu_water:
        os.system(
            "resample --clone "
            + step2dir
            + "/cutout.map "
            + lu_water
            + " "
            + step2dir
            + "/wflow_waterfrac.map"
        )

    if lu_paved:
        os.system(
            "resample --clone "
            + step2dir
            + "/cutout.map "
            + lu_paved
            + " "
            + step2dir
            + "/PathFrac.map"
        )

    #
    try:
        lumap = config.get("files", "landuse")
    except:
        print("no landuse map...creating uniform map")
        clone = pcr.readmap(step2dir + "/cutout.map")
        pcr.report(pcr.nominal(clone), step2dir + "/wflow_landuse.map")
    else:
        os.system(
            "resample --clone "
            + step2dir
            + "/cutout.map "
            + lumap
            + " "
            + step2dir
            + "/wflow_landuse.map"
        )

    try:
        soilmap = config.get("files", "soil")
    except:
        print("no soil map..., creating uniform map")
        clone = pcr.readmap(step2dir + "/cutout.map")
        pcr.report(pcr.nominal(clone), step2dir + "/wflow_soil.map")
    else:
        os.system(
            "resample --clone "
            + step2dir
            + "/cutout.map "
            + soilmap
            + " "
            + step2dir
            + "/wflow_soil.map"
        )

    resamplemaps(step1dir, step2dir)

    dem = pcr.readmap(step2dir + "/wflow_dem.map")
    demmin = pcr.readmap(step2dir + "/wflow_demmin.map")
    demmax = pcr.readmap(step2dir + "/wflow_demmax.map")
    # catchcut = pcr.readmap(step2dir + "/catchment_cut.map")
    catchcut = pcr.readmap(step2dir + "/cutout.map")
    # now apply the area of interest (catchcut) to the DEM
    # dem=pcr.ifthen(catchcut >=1 , dem)
    #

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map")
    else:
        print("river file speficied.....")
        # rivshpattr = config.get("files","riverattr")
        pcr.report(dem * 0.0, step2dir + "/nilmap.map")
        thestr = (
            "gdal_translate -of GTiff "
            + step2dir
            + "/nilmap.map "
            + step2dir
            + "/wflow_riverburnin.tif"
        )
        os.system(thestr)
        rivshpattr = os.path.splitext(os.path.basename(rivshp))[0]
        os.system(
            "gdal_rasterize -burn 1 -l "
            + rivshpattr
            + " "
            + rivshp
            + " "
            + step2dir
            + "/wflow_riverburnin.tif"
        )
        thestr = (
            "gdal_translate -of PCRaster "
            + step2dir
            + "/wflow_riverburnin.tif "
            + step2dir
            + "/wflow_riverburnin.map"
        )
        os.system(thestr)
        riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map")
        # ldddem = pcr.ifthenelse(riverburn >= 1.0, dem -1000 , dem)

    # Only burn within the original catchment
    riverburn = pcr.ifthen(pcr.scalar(catchcut) >= 1, riverburn)
    # Now setup a very high wall around the catchment that is scale
    # based on the distance to the catchment so that it slopes away from the
    # catchment
    if lddmethod != "river":
        print("Burning in highres-river ...")
        disttocatch = pcr.spread(pcr.nominal(catchcut), 0.0, 1.0)
        demmax = pcr.ifthenelse(
            pcr.scalar(catchcut) >= 1.0,
            demmax,
            demmax + (pcr.celllength() * 100.0) / disttocatch,
        )
        pcr.setglobaloption("unitcell")
        # demregional=pcr.windowaverage(demmin,100)
        demburn = pcr.cover(pcr.ifthen(pcr.boolean(riverburn), demmin - 100.0), demmax)
    else:
        print("using average dem..")
        demburn = dem

    ldd = tr.lddcreate_save(
        step2dir + "/wflow_ldd.map",
        demburn,
        True,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    # Find catchment (overall)
    outlet = tr.find_outlet(ldd)
    sub = tr.subcatch(ldd, outlet)
    pcr.report(sub, step2dir + "/wflow_catchment.map")
    pcr.report(outlet, step2dir + "/wflow_outlet.map")

    # make river map
    strorder = pcr.streamorder(ldd)
    pcr.report(strorder, step2dir + "/wflow_streamorder.map")

    river = pcr.ifthen(pcr.boolean(strorder >= strRiver), strorder)
    pcr.report(river, step2dir + "/wflow_river.map")

    # make subcatchments
    # os.system("col2map --clone " + step2dir + "/cutout.map gauges.col " + step2dir + "/wflow_gauges.map")
    X = np.fromstring(gauges_x, sep=',')
    Y = np.fromstring(gauges_y, sep=',')

    pcr.setglobaloption("unittrue")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)
    pcr.report(outlmap, step2dir + "/wflow_gauges_.map")

    if snapgaugestoriver:
        print("Snapping gauges to river")
        pcr.report(outlmap, step2dir + "/wflow_orggauges.map")
        outlmap = tr.snaptomap(outlmap, river)

    outlmap = pcr.ifthen(outlmap > 0, outlmap)
    pcr.report(outlmap, step2dir + "/wflow_gauges.map")

    scatch = tr.subcatch(ldd, outlmap)
    pcr.report(scatch, step2dir + "/wflow_subcatch.map")
コード例 #23
0
    def dynamic(self):
        """
          *Required*
          This is where all the time dependent functions are executed. Time dependent
          output should also be saved here.
        """
        # print 'useETPdata' , self.UseETPdata
        # Put the W3RA here. Stuff from W3RA_timestep_model.m
        # read meteo from file
        self.logger.debug("Running for: " + str(self.currentdatetime))
        self.PRECIP = pcr.cover(
            self.wf_readmap(self.PRECIP_mapstack, 0.0), pcr.scalar(0.0)
        )  # mm

        if self.UseETPdata == 1:
            self.TDAY = pcr.cover(
                self.wf_readmap(self.TDAY_mapstack, 10.0), pcr.scalar(10.0)
            )  # T in degC
            self.EPOT = pcr.cover(
                self.wf_readmap(self.EPOT_mapstack, 0.0), pcr.scalar(0.0)
            )  # mm
            self.WINDSPEED = pcr.cover(
                self.wf_readmap(self.WINDSPEED_mapstack, default=1.0), pcr.scalar(1.0)
            )
            self.AIRPRESS = pcr.cover(
                self.wf_readmap(self.AIRPRESS_mapstack, default=980.0),
                pcr.scalar(980.0),
            )
            # print "Using climatology for wind, air pressure and albedo."
        elif self.UseETPdata == 0:
            self.TMIN = pcr.cover(
                self.wf_readmap(self.TMIN_mapstack, 10.0), pcr.scalar(10.0)
            )  # T in degC
            self.TMAX = pcr.cover(
                self.wf_readmap(self.TMAX_mapstack, 10.0), pcr.scalar(10.0)
            )  # T in degC
            self.RAD = pcr.cover(
                self.wf_readmap(self.RAD_mapstack, 10.0), pcr.scalar(10.0)
            )  # W m-2 s-1
            self.WINDSPEED = pcr.cover(
                self.wf_readmap(self.WINDSPEED_mapstack, 10.0), pcr.scalar(10.0)
            )  # ms-1
            self.AIRPRESS = pcr.cover(
                self.wf_readmap(self.AIRPRESS_mapstack, 10.0), pcr.scalar(10.0)
            )  # Pa
            self.ALBEDO = pcr.cover(
                self.wf_readmapClimatology(self.ALBEDO_mapstack, default=0.1),
                pcr.scalar(0.1),
            )

        self.wf_multparameters()
        doy = self.currentdatetime.timetuple().tm_yday

        # conversion daylength
        pcr.setglobaloption("radians")
        m = pcr.scalar(1) - pcr.tan(
            (self.latitude * pcr.scalar(math.pi) / pcr.scalar(180))
        ) * pcr.tan(
            (
                (pcr.scalar(23.439) * pcr.scalar(math.pi) / pcr.scalar(180))
                * pcr.cos(
                    pcr.scalar(2)
                    * pcr.scalar(math.pi)
                    * (doy + pcr.scalar(9))
                    / pcr.scalar(365.25)
                )
            )
        )
        self.fday = pcr.min(
            pcr.max(
                pcr.scalar(0.02),
                pcr.scalar(
                    pcr.acos(
                        pcr.scalar(1)
                        - pcr.min(pcr.max(pcr.scalar(0), m), pcr.scalar(2))
                    )
                )
                / pcr.scalar(math.pi),
            ),
            pcr.scalar(1),
        )  # fraction daylength

        # Assign forcing and estimate effective meteorological variables

        Pg = self.PRECIP  # mm

        if self.UseETPdata == 1:
            Ta = self.TDAY  # T in degC
            T24 = self.TDAY  # T in degC
        elif self.UseETPdata == 0:
            Rg = pcr.max(
                self.RAD, pcr.scalar(0.0001)
            )  # already in W m-2 s-1; set minimum of 0.01 to avoid numerical problems
            Ta = self.TMIN + pcr.scalar(0.75) * (self.TMAX - self.TMIN)  # T in degC
            T24 = self.TMIN + pcr.scalar(0.5) * (self.TMAX - self.TMIN)  # T in degC
            pex = pcr.min(
                pcr.scalar(17.27) * (self.TMIN) / (pcr.scalar(237.3) + self.TMIN),
                pcr.scalar(10),
            )  # T in degC
            pe = pcr.min(
                pcr.scalar(610.8) * (pcr.exp(pex)), pcr.scalar(10000.0)
            )  # Mean actual vapour pressure, from dewpoint temperature
        # rescale factor because windspeed climatology is at 2m
        WindFactor = 1.0
        # u2 = pcr.scalar(WindFactor)*self.WINDSPEED*(pcr.scalar(1)-(pcr.scalar(1)-self.fday)*scalar(0.25))/self.fday
        self.u2 = (
            pcr.scalar(WindFactor)
            * self.WINDSPEED
            * (pcr.scalar(1) - (pcr.scalar(1) - self.fday) * pcr.scalar(0.25))
            / self.fday
        )
        pair = self.AIRPRESS  # already in Pa

        # diagnostic equations

        self.LAI1 = self.SLA1 * self.Mleaf1  # (5.3)
        self.LAI2 = self.SLA2 * self.Mleaf2  # (5.3)
        fveg1 = pcr.max(1 - pcr.exp(-self.LAI1 / self.LAIref1), 0.000001)  # (5.3)
        fveg2 = pcr.max(1 - pcr.exp(-self.LAI2 / self.LAIref2), 0.000001)

        # Vc = pcr.max(0,EVI-0.07)/fveg
        fsoil1 = 1 - fveg1
        fsoil2 = 1 - fveg2
        w01 = self.S01 / self.S0FC1  # (2.1)
        w02 = self.S02 / self.S0FC2
        ws1 = self.Ss1 / self.SsFC1  # (2.1)
        ws2 = self.Ss2 / self.SsFC2
        wd1 = self.Sd1 / self.SdFC1  # (2.1)
        wd2 = self.Sd2 / self.SdFC2  # (2.1)

        TotSnow1 = self.FreeWater1 + self.DrySnow1
        TotSnow2 = self.FreeWater2 + self.DrySnow2
        wSnow1 = self.FreeWater1 / (TotSnow1 + 1e-5)
        wSnow2 = self.FreeWater2 / (TotSnow2 + 1e-5)

        # Spatialise catchment fractions
        Sgfree = pcr.max(self.Sg, 0.0)
        # JS: Not sure if this is translated properly....
        # for i=1:par.Nhru
        fwater1 = pcr.min(0.005, (0.007 * self.Sr ** 0.75))
        fwater2 = pcr.min(0.005, (0.007 * self.Sr ** 0.75))
        fsat1 = pcr.min(
            1.0, pcr.max(pcr.min(0.005, 0.007 * self.Sr ** 0.75), Sgfree / self.Sgref)
        )
        fsat2 = pcr.min(
            1.0, pcr.max(pcr.min(0.005, 0.007 * self.Sr ** 0.75), Sgfree / self.Sgref)
        )
        Sghru1 = self.Sg
        Sghru2 = self.Sg

        # CALCULATION OF PET
        # Conversions and coefficients (3.1)
        pesx = pcr.min(
            (pcr.scalar(17.27) * Ta / (pcr.scalar(237.3) + Ta)), pcr.scalar(10)
        )
        pes = pcr.min(
            pcr.scalar((pcr.scalar(610.8)) * pcr.exp(pesx)), pcr.scalar(10000)
        )  # saturated vapour pressure
        # fRH = pe/pes  # relative air humidity                                  -------------- check
        cRE = 0.03449 + 4.27e-5 * Ta
        # Caero = self.fday*0.176*(1+Ta/209.1)*(pair-0.417*pe)*(1-fRH)         -------------- check
        # keps = 1.4e-3*((Ta/187)**2+Ta/107+1)*(6.36*pair+pe)/pes
        ga1 = self.ku2_1 * self.u2
        ga2 = self.ku2_2 * self.u2

        if self.UseETPdata == 1:
            self.E01 = pcr.max(self.EPOT, 0)
            self.E02 = pcr.max(self.EPOT, 0)
            keps = (
                0.655e-3 * pair / pes
            )  # See Appendix A3 (http://www.clw.csiro.au/publications/waterforahealthycountry/2010/wfhc-aus-water-resources-assessment-system.pdf) --------------------------------   check!

        elif self.UseETPdata == 0:
            # Aerodynamic conductance (3.7)

            ns_alb = self.ALBEDO
            Rgeff = Rg / self.fday
            # shortwave radiation balance (3.2)
            # alb_veg = 0.452*Vc
            # alb_soil = alb_wet+(alb_dry-alb_wet)*exp(-w0/w0ref_alb)
            # new equations for snow albedo
            alb_snow1 = 0.65 - 0.2 * wSnow1  # assumed; ideally some lit research needed
            alb_snow2 = 0.65 - 0.2 * wSnow2
            fsnow1 = pcr.min(
                1.0, 0.05 * TotSnow1
            )  # assumed; ideally some lit research needed
            fsnow2 = pcr.min(1.0, 0.05 * TotSnow2)
            # alb = fveg*alb_veg+(fsoil-fsnow)*alb_soil +fsnow*alb_snow
            # alb = albedo
            alb1 = (1 - fsnow1) * ns_alb + fsnow1 * alb_snow1
            alb2 = (1 - fsnow2) * ns_alb + fsnow2 * alb_snow2
            RSn1 = (1 - alb1) * Rgeff
            RSn2 = (1 - alb2) * Rgeff
            # long wave radiation balance (3.3 to 3.5)
            StefBolz = 5.67e-8
            Tkelv = Ta + 273.16
            self.RLin = (0.65 * (pe / Tkelv) ** 0.14) * StefBolz * Tkelv ** 4  # (3.3)
            RLout = StefBolz * Tkelv ** 4.0  # (3.4)
            self.RLn = self.RLin - RLout

            self.fGR1 = self.Gfrac_max1 * (1 - pcr.exp(-fsoil1 / self.fvegref_G1))
            self.fGR2 = self.Gfrac_max2 * (
                1 - pcr.exp(-fsoil2 / self.fvegref_G2)
            )  # (3.5)
            self.Rneff1 = (RSn1 + self.RLn) * (1 - self.fGR1)
            self.Rneff2 = (RSn2 + self.RLn) * (1 - self.fGR2)

            fRH = pe / pes  # relative air humidity
            Caero = (
                self.fday * 0.176 * (1 + Ta / 209.1) * (pair - 0.417 * pe) * (1 - fRH)
            )  # -------------- check
            keps = 1.4e-3 * ((Ta / 187) ** 2 + Ta / 107 + 1) * (6.36 * pair + pe) / pes

            #  Potential evaporation
            kalpha1 = 1 + Caero * ga1 / self.Rneff1
            kalpha2 = 1 + Caero * ga2 / self.Rneff2
            self.E01 = cRE * (1 / (1 + keps)) * kalpha1 * self.Rneff1 * self.fday
            self.E02 = cRE * (1 / (1 + keps)) * kalpha2 * self.Rneff2 * self.fday
            self.E01 = pcr.max(self.E01, 0)
            self.E02 = pcr.max(self.E02, 0)

        # CALCULATION OF ET FLUXES AND ROOT WATER UPTAKE
        # Root water uptake constraint (4.4)
        Usmax1 = pcr.max(
            0, self.Us01 * pcr.min(1, ws1 / self.wslimU1)
        )  ##0-waarden omdat ws1 bevat 0-waarden (zie regel 116)
        Usmax2 = pcr.max(
            0, self.Us02 * pcr.min(1, ws2 / self.wslimU2)
        )  ##0-waarden omdat ws2 bevat 0-waarden (zie regel 117)
        Udmax1 = pcr.max(
            0, self.Ud01 * pcr.min(1, wd1 / self.wdlimU1)
        )  ##0-waarden omdat wd1 bevat 0-waarden (zie regel 118)
        Udmax2 = pcr.max(
            0, self.Ud02 * pcr.min(1, wd2 / self.wdlimU2)
        )  ##0-waarden omdat wd2 bevat 0-waarden (zie regel 119)
        # U0max = pcr.max(0, Us0*min(1,w0/wslimU))
        U0max1 = pcr.scalar(0)
        U0max2 = pcr.scalar(0)
        Utot1 = pcr.max(Usmax1, pcr.max(Udmax1, U0max1))
        Utot2 = pcr.max(Usmax2, pcr.max(Udmax2, U0max2))

        # Maximum transpiration (4.3)
        Gsmax1 = self.cGsmax1 * self.Vc1
        gs1 = fveg1 * Gsmax1
        ft1 = 1 / (1 + (keps / (1 + keps)) * ga1 / gs1)
        Etmax1 = ft1 * self.E01
        Gsmax2 = self.cGsmax2 * self.Vc2
        gs2 = fveg2 * Gsmax2
        ft2 = 1 / (1 + (keps / (1 + keps)) * ga2 / gs2)
        Etmax2 = ft2 * self.E02

        # Actual transpiration (4.1)
        Et1 = pcr.min(Utot1, Etmax1)
        Et2 = pcr.min(Utot2, Etmax2)

        # # Root water uptake distribution (2.3)
        U01 = pcr.max(
            pcr.min((U0max1 / (U0max1 + Usmax1 + Udmax1)) * Et1, self.S01 - 1e-2), 0
        )
        Us1 = pcr.max(
            pcr.min((Usmax1 / (U0max1 + Usmax1 + Udmax1)) * Et1, self.Ss1 - 1e-2), 0
        )
        Ud1 = pcr.max(
            pcr.min((Udmax1 / (U0max1 + Usmax1 + Udmax1)) * Et1, self.Sd1 - 1e-2), 0
        )
        Et1 = U01 + Us1 + Ud1  # to ensure mass balance

        U02 = pcr.max(
            pcr.min((U0max2 / (U0max2 + Usmax2 + Udmax2)) * Et2, self.S02 - 1e-2), 0
        )
        Us2 = pcr.max(
            pcr.min((Usmax2 / (U0max2 + Usmax2 + Udmax2)) * Et2, self.Ss2 - 1e-2), 0
        )
        Ud2 = pcr.max(
            pcr.min((Udmax2 / (U0max2 + Usmax2 + Udmax2)) * Et2, self.Sd2 - 1e-2), 0
        )
        Et2 = U02 + Us2 + Ud2

        # Soil evaporation (4.5)
        self.S01 = pcr.max(0, self.S01 - U01)
        self.S02 = pcr.max(0, self.S02 - U02)
        w01 = self.S01 / self.S0FC1  # (2.1)
        w02 = self.S02 / self.S0FC2  # (2.1)
        fsoilE1 = self.FsoilEmax1 * pcr.min(1, w01 / self.w0limE1)
        fsoilE2 = self.FsoilEmax2 * pcr.min(1, w02 / self.w0limE2)
        Es1 = pcr.max(
            0, pcr.min(((1 - fsat1) * fsoilE1 * (self.E01 - Et1)), self.S01 - 1e-2)
        )
        Es2 = pcr.max(
            0, pcr.min(((1 - fsat2) * fsoilE2 * (self.E02 - Et2)), self.S02 - 1e-2)
        )
        # Groundwater evaporation (4.6)
        Eg1 = pcr.min((fsat1 - fwater1) * self.FsoilEmax1 * (self.E01 - Et1), Sghru1)
        Eg2 = pcr.min((fsat2 - fwater2) * self.FsoilEmax2 * (self.E02 - Et2), Sghru2)
        # Open water evaporation (4.7)
        Er1 = pcr.min(fwater1 * self.FwaterE1 * pcr.max(0, self.E01 - Et1), self.Sr)
        Er2 = pcr.min(fwater2 * self.FwaterE2 * pcr.max(0, self.E02 - Et2), self.Sr)
        # Rainfall interception evaporation (4.2)
        Sveg1 = self.S_sls1 * self.LAI1
        fER1 = self.ER_frac_ref1 * fveg1
        Pwet1 = -pcr.ln(1 - fER1 / fveg1) * Sveg1 / fER1
        Ei1 = pcr.scalar(Pg < Pwet1) * fveg1 * Pg + pcr.scalar(Pg >= Pwet1) * (
            fveg1 * Pwet1 + fER1 * (Pg - Pwet1)
        )

        Sveg2 = self.S_sls2 * self.LAI2
        fER2 = self.ER_frac_ref2 * fveg2
        Pwet2 = -pcr.ln(1 - fER2 / fveg2) * Sveg2 / fER2
        Ei2 = pcr.scalar(Pg < Pwet2) * fveg2 * Pg + pcr.scalar(Pg >= Pwet2) * (
            fveg2 * Pwet2 + fER2 * (Pg - Pwet2)
        )

        self.EACT1 = (Et1 + Es1 + Eg1 + Er1 + Ei1) * self.Fhru1
        self.EACT2 = (Et2 + Es2 + Eg2 + Er2 + Ei2) * self.Fhru2
        self.EACT = self.EACT1 + self.EACT2

        # HBV snow routine
        # Matlab: function [FreeWater,DrySnow,InSoil]=snow_submodel(Precipitation,Temperature,FreeWater,DrySnow)
        # derived from HBV-96 shared by Jaap Schellekens (Deltares) in May 2011
        # original in PCraster, adapted to Matlab by Albert van Dijk
        # HBV snow routine
        Pn1 = Pg - Ei1
        Pn2 = Pg - Ei2
        Precipitation1 = Pn1
        Precipitation2 = Pn2

        # Snow routine parameters
        # parameters
        # TODO: Check this, not sure if this works.......
        x = pcr.scalar(Pg)
        Cfmax1 = 0.6 * 3.75653 * pcr.scalar(x >= 0)
        Cfmax2 = 3.75653 * pcr.scalar(x >= 0)
        TT1 = -1.41934 * pcr.scalar(
            x >= 0
        )  # critical temperature for snowmelt and refreezing
        TT2 = -1.41934 * pcr.scalar(x >= 0)
        TTI1 = 1.00000 * pcr.scalar(
            x >= 0
        )  # defines interval in which precipitation falls as rainfall and snowfall
        TTI2 = 1.00000 * pcr.scalar(x >= 0)
        CFR1 = 0.05000 * pcr.scalar(
            x >= 0
        )  # refreezing efficiency constant in refreezing of freewater in snow
        CFR2 = 0.05000 * pcr.scalar(x >= 0)
        WHC1 = 0.10000 * pcr.scalar(x >= 0)
        WHC2 = 0.10000 * pcr.scalar(x >= 0)

        # Partitioning into fractions rain and snow
        Temperature = T24  # Dimmie, let op: tijdelijke regel!!
        RainFrac1 = pcr.max(0, pcr.min((Temperature - (TT1 - TTI1 / 2)) / TTI1, 1))
        RainFrac2 = pcr.max(0, pcr.min((Temperature - (TT2 - TTI2 / 2)) / TTI2, 1))
        SnowFrac1 = 1 - RainFrac1  # fraction of precipitation which falls as snow
        SnowFrac2 = 1 - RainFrac2

        # Snowfall/melt calculations
        SnowFall1 = SnowFrac1 * Precipitation1  # snowfall depth
        SnowFall2 = SnowFrac2 * Precipitation2
        RainFall1 = RainFrac1 * Precipitation1  # rainfall depth
        RainFall2 = RainFrac2 * Precipitation2
        PotSnowMelt1 = Cfmax1 * pcr.max(
            0, Temperature - TT1
        )  # Potential snow melt, based on temperature
        PotSnowMelt2 = Cfmax2 * pcr.max(0, Temperature - TT2)
        PotRefreezing1 = (
            Cfmax1 * CFR1 * pcr.max(TT1 - Temperature, 0)
        )  # Potential refreezing, based on temperature
        PotRefreezing2 = Cfmax2 * CFR2 * pcr.max(TT2 - Temperature, 0)
        Refreezing1 = pcr.min(PotRefreezing1, self.FreeWater1)  # actual refreezing
        Refreezing2 = pcr.min(PotRefreezing2, self.FreeWater2)
        SnowMelt1 = pcr.min(PotSnowMelt1, self.DrySnow1)  # actual snow melt
        SnowMelt2 = pcr.min(PotSnowMelt2, self.DrySnow2)
        self.DrySnow1 = (
            self.DrySnow1 + SnowFall1 + Refreezing1 - SnowMelt1
        )  # dry snow content
        self.DrySnow2 = self.DrySnow2 + SnowFall2 + Refreezing2 - SnowMelt2
        self.FreeWater1 = self.FreeWater1 - Refreezing1  # free water content in snow
        self.FreeWater2 = self.FreeWater2 - Refreezing2
        MaxFreeWater1 = self.DrySnow1 * WHC1
        MaxFreeWater2 = self.DrySnow2 * WHC2
        self.FreeWater1 = self.FreeWater1 + SnowMelt1 + RainFall1
        self.FreeWater2 = self.FreeWater2 + SnowMelt2 + RainFall2
        InSoil1 = pcr.max(
            self.FreeWater1 - MaxFreeWater1, 0
        )  # abundant water in snow pack which goes into soil
        InSoil2 = pcr.max(self.FreeWater2 - MaxFreeWater2, 0)
        self.FreeWater1 = self.FreeWater1 - InSoil1
        self.FreeWater2 = self.FreeWater2 - InSoil2
        # End of Snow Module

        # CALCULATION OF WATER BALANCES
        # surface water fluxes (2.2)
        NetInSoil1 = pcr.max(0, (InSoil1 - self.InitLoss1))
        NetInSoil2 = pcr.max(0, (InSoil2 - self.InitLoss2))
        Rhof1 = (1 - fsat1) * (NetInSoil1 / (NetInSoil1 + self.PrefR1)) * NetInSoil1
        Rhof2 = (1 - fsat2) * (NetInSoil2 / (NetInSoil2 + self.PrefR2)) * NetInSoil2
        Rsof1 = fsat1 * NetInSoil1
        Rsof2 = fsat2 * NetInSoil2
        QR1 = Rhof1 + Rsof1
        QR2 = Rhof2 + Rsof2
        I1 = InSoil1 - QR1
        I2 = InSoil2 - QR2
        # SOIL WATER BALANCES (2.1 & 2.4)
        # Topsoil water balance (S0)
        self.S01 = self.S01 + I1 - Es1 - U01
        self.S02 = self.S02 + I2 - Es2 - U02
        SzFC1 = self.S0FC1
        SzFC2 = self.S0FC2
        Sz1 = self.S01
        Sz2 = self.S02
        wz1 = pcr.max(1e-2, Sz1) / SzFC1
        wz2 = pcr.max(1e-2, Sz2) / SzFC2
        self.TMP = SzFC1

        # TODO: Check if this works
        fD1 = pcr.scalar(wz1 > 1) * pcr.max(self.FdrainFC1, 1 - 1 / wz1) + pcr.scalar(
            wz1 <= 1
        ) * self.FdrainFC1 * pcr.exp(self.beta1 * pcr.scalar(wz1 - 1))
        fD2 = pcr.scalar(wz2 > 1) * pcr.max(self.FdrainFC2, 1 - 1 / wz2) + pcr.scalar(
            wz2 <= 1
        ) * self.FdrainFC2 * pcr.exp(self.beta2 * pcr.scalar(wz2 - 1))
        Dz1 = pcr.max(0, pcr.min(fD1 * Sz1, Sz1 - 1e-2))
        Dz2 = pcr.max(0, pcr.min(fD2 * Sz2, Sz2 - 1e-2))
        D01 = Dz1
        D02 = Dz2
        self.S01 = self.S01 - D01
        self.S02 = self.S02 - D02
        # Shallow root zone water balance (Ss)
        self.Ss1 = self.Ss1 + D01 - Us1
        self.Ss2 = self.Ss2 + D02 - Us2
        SzFC1 = self.SsFC1
        SzFC2 = self.SsFC2
        Sz1 = self.Ss1
        Sz2 = self.Ss2
        wz1 = pcr.max(1e-2, Sz1) / SzFC1
        wz2 = pcr.max(1e-2, Sz2) / SzFC2
        fD1 = pcr.scalar(wz1 > 1) * pcr.max(self.FdrainFC1, 1 - 1 / wz1) + pcr.scalar(
            wz1 <= 1
        ) * self.FdrainFC1 * pcr.exp(self.beta1 * pcr.scalar(wz1 - 1))
        fD2 = pcr.scalar(wz2 > 1) * pcr.max(self.FdrainFC2, 1 - 1 / wz2) + pcr.scalar(
            wz2 <= 1
        ) * self.FdrainFC2 * pcr.exp(self.beta2 * pcr.scalar(wz2 - 1))
        Dz1 = pcr.max(0, pcr.min(fD1 * Sz1, Sz1 - 1e-2))
        Dz2 = pcr.max(0, pcr.min(fD2 * Sz2, Sz2 - 1e-2))
        Ds1 = Dz1
        Ds2 = Dz2
        self.Ss1 = self.Ss1 - Ds1
        self.Ss2 = self.Ss2 - Ds2
        # Deep root zone water balance (Sd) (2.6)
        self.Sd1 = self.Sd1 + Ds1 - Ud1
        self.Sd2 = self.Sd2 + Ds2 - Ud2
        SzFC1 = self.SdFC1
        SzFC2 = self.SdFC2
        Sz1 = self.Sd1
        Sz2 = self.Sd2
        wz1 = pcr.max(1e-2, Sz1) / SzFC1
        wz2 = pcr.max(1e-2, Sz2) / SzFC2
        fD1 = pcr.scalar(wz1 > 1) * pcr.max(self.FdrainFC1, 1 - 1 / wz1) + pcr.scalar(
            wz1 <= 1
        ) * self.FdrainFC1 * pcr.exp(self.beta1 * pcr.scalar(wz1 - 1))
        fD2 = pcr.scalar(wz2 > 1) * pcr.max(self.FdrainFC2, 1 - 1 / wz2) + pcr.scalar(
            wz2 <= 1
        ) * self.FdrainFC2 * pcr.exp(self.beta2 * pcr.scalar(wz2 - 1))
        Dz1 = pcr.max(0, pcr.min(fD1 * Sz1, Sz1 - 1e-2))
        Dz2 = pcr.max(0, pcr.min(fD2 * Sz2, Sz2 - 1e-2))
        Dd1 = Dz1
        Dd2 = Dz2
        self.Sd1 = self.Sd1 - Dd1
        self.Sd2 = self.Sd2 - Dd2
        Y1 = pcr.min(
            self.Fgw_conn1 * pcr.max(0, self.wdlimU1 * self.SdFC1 - self.Sd1),
            Sghru1 - Eg1,
        )
        Y2 = pcr.min(
            self.Fgw_conn2 * pcr.max(0, self.wdlimU2 * self.SdFC2 - self.Sd2),
            Sghru2 - Eg2,
        )
        # Y = Fgw_conn.*max(0,wdlimU.*SdFC-Sd); #nog matlab script
        self.Sd1 = self.Sd1 + Y1
        self.Sd2 = self.Sd2 + Y2

        # CATCHMENT WATER BALANCE
        # Groundwater store water balance (Sg) (2.5)
        NetGf = (self.Fhru1 * (Dd1 - Eg1 - Y1)) + (self.Fhru2 * (Dd2 - Eg2 - Y2))
        self.Sg = self.Sg + NetGf
        Sgfree = pcr.max(self.Sg, 0)
        Qg = pcr.min(Sgfree, (1 - pcr.exp(-self.K_gw)) * Sgfree)
        self.Sg = self.Sg - Qg

        # Surface water store water balance (Sr) (2.7)
        self.Sr = self.Sr + (self.Fhru1 * (QR1 - Er1)) + (self.Fhru2 * (QR2 - Er2)) + Qg
        self.Qtot = pcr.min(self.Sr, (1 - pcr.exp(-self.K_rout)) * self.Sr)
        self.Sr = self.Sr - self.Qtot

        # VEGETATION ADJUSTMENT (5)

        fveq1 = (
            (1 / pcr.max((self.E01 / Utot1) - 1, 1e-3))
            * (keps / (1 + keps))
            * (ga1 / Gsmax1)
        )
        fveq2 = (
            (1 / pcr.max((self.E02 / Utot2) - 1, 1e-3))
            * (keps / (1 + keps))
            * (ga2 / Gsmax2)
        )
        fvmax1 = 1 - pcr.exp(-self.LAImax1 / self.LAIref1)
        fvmax2 = 1 - pcr.exp(-self.LAImax2 / self.LAIref2)
        fveq1 = pcr.min(fveq1, fvmax1)
        fveq2 = pcr.min(fveq2, fvmax2)
        dMleaf1 = -pcr.ln(1 - fveq1) * self.LAIref1 / self.SLA1 - self.Mleaf1
        dMleaf2 = -pcr.ln(1 - fveq2) * self.LAIref2 / self.SLA2 - self.Mleaf2

        # Mleafnet1 = dMleaf1 * (dMleaf1/self.Tgrow1) + dMleaf1 * dMleaf1/self.Tsenc1
        # Mleafnet2 = dMleaf2 * (dMleaf1/self.Tgrow2) + dMleaf2 * dMleaf2/self.Tsenc2
        Mleafnet1 = (
            pcr.scalar(dMleaf1 > 0) * (dMleaf1 / self.Tgrow1)
            + pcr.scalar(dMleaf1 < 0) * dMleaf1 / self.Tsenc1
        )
        Mleafnet2 = (
            pcr.scalar(dMleaf2 > 0) * (dMleaf2 / self.Tgrow2)
            + pcr.scalar(dMleaf2 < 0) * dMleaf2 / self.Tsenc2
        )

        self.Mleaf1 = self.Mleaf1 + Mleafnet1
        self.Mleaf2 = self.Mleaf2 + Mleafnet2
        self.LAI1 = self.SLA1 * self.Mleaf1  # (5.3)
        self.LAI2 = self.SLA2 * self.Mleaf2

        # Updating diagnostics
        self.LAI1 = self.SLA1 * self.Mleaf1  # (5.3)
        self.LAI2 = self.SLA2 * self.Mleaf2
        fveg1 = 1 - pcr.exp(-self.LAI1 / self.LAIref1)  # (5.3)
        fveg2 = 1 - pcr.exp(-self.LAI2 / self.LAIref2)
        fsoil1 = 1 - fveg1
        fsoil2 = 1 - fveg2
        w01 = self.S01 / self.S0FC1  # (2.1)
        w02 = self.S02 / self.S0FC2
        ws1 = self.Ss1 / self.SsFC1  # (2.1)
        ws2 = self.Ss2 / self.SsFC2
        wd1 = self.Sd1 / self.SdFC1  # (2.1)
        wd2 = self.Sd2 / self.SdFC2
コード例 #24
0
    def initial(self):

        """
    *Required*

    Initial part of the model, executed only once. It reads all static model
    information (parameters) and sets-up the variables used in modelling.

    This function is required. The contents is free. However, in order to
    easily connect to other models it is advised to adhere to the directory
    structure used in the other models.

    """
        #: pcraster option to calculate with units or cells. Not really an issue
        #: in this model but always good to keep in mind.
        pcr.setglobaloption("unittrue")
        pcr.setglobaloption(
            "radians"
        )  # Needed as W3RA was originally written in matlab

        # SET GLBOAL PARAMETER VALUES (however not used in original script)
        # Nhru=2
        # K_gw_scale=0.0146
        # K_gw_shape=0.0709
        # K_rout_scale=0.1943
        # K_rout_int=0.0589
        # FdrainFC_scale=0.2909
        # FdrainFC_shape=0.5154
        # Sgref_scale=3.2220
        # Sgref_shape=3.2860
        # fday=0.5000
        self.timestepsecs = int(
            configget(self.config, "model", "timestepsecs", "86400")
        )
        
        self.reinit = int(configget(self.config, "run", "reinit", "0"))
        self.OverWriteInit = int(configget(self.config, "model", "OverWriteInit", "0"))
        
        self.UseETPdata = int(
            configget(self.config, "model", "UseETPdata", "1")
        )  #  1: Use ETP data, 0: Compute ETP from meteorological variables
        self.logger.debug("use DATA: " + str(self.UseETPdata))
        self.basetimestep = 86400
        self.SaveMapDir = self.Dir + "/" + self.runId + "/outmaps"

        # Define here the W3RA mapstacks (best to read these via netcdf)

        self.TMAX_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "TMAX", "/inmaps/TMAX"
        )
        self.TMIN_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "TMIN", "/inmaps/TMIN"
        )
        self.TDAY_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "TDAY", "/inmaps/TDAY"
        )
        self.EPOT_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "EPOT", "/inmaps/EPOT"
        )
        self.PRECIP_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "PRECIP", "/inmaps/PRECIP"
        )
        self.RAD_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "RAD", "/inmaps/RAD"
        )
        # self.WINDSPEED_mapstack=self.Dir + configget(self.config,"inputmapstacks","WINDSPEED","/inmaps/ClimatologyMapFiles/WINDS/WNDSPEED")
        # self.AIRPRESS_mapstack=self.Dir + configget(self.config,"inputmapstacks","AIRPRESS","/inmaps/ClimatologyMapFiles/AIRPRESS/AIRPRESS")
        self.ALBEDO_mapstack = self.Dir + configget(
            self.config,
            "inputmapstacks",
            "ALBEDO",
            "/inmaps/ClimatologyMapFiles/ALBEDO/ALBEDO",
        )
        self.WINDSPEED_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "WINDSPEED", "/inmaps/WIND"
        )
        self.AIRPRESS_mapstack = self.Dir + configget(
            self.config, "inputmapstacks", "AIRPRESS", "/inmaps/PRES"
        )

        self.Altitude = pcr.readmap(self.Dir + "/staticmaps/wflow_dem")

        self.latitude = pcr.ycoordinate(pcr.boolean(self.Altitude))

        # Add reading of parameters here

        self.K_gw = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/k_gw.map"), 0.0, fail=True
        )
        self.K_rout = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/k_rout.map"), 0.0, fail=True
        )
        self.Sgref = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/sgref.map"), 0.0, fail=True
        )
        self.alb_dry1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/alb_dry.map"), 0.0, fail=True
        )
        self.alb_wet1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/alb_wet.map"), 0.0, fail=True
        )
        self.beta1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/beta.map"), 0.0, fail=True
        )
        self.cGsmax1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/cgsmax.map"), 0.0, fail=True
        )
        self.ER_frac_ref1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/er_frac_ref.map"), 0.0, fail=True
        )
        self.FdrainFC1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fdrainfc.map"), 0.0, fail=True
        )
        self.Fgw_conn1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fgw_conn.map"), 0.0, fail=True
        )
        self.Fhru1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fhru.map"), 0.0, fail=True
        )
        self.SLA1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/sla.map"), 0.0, fail=True
        )
        self.LAIref1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/lairef.map"), 0.0, fail=True
        )
        self.FsoilEmax1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fsoilemax.map"), 0.0, fail=True
        )
        self.fvegref_G1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fvegref_g.map"), 0.0, fail=True
        )
        self.FwaterE1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fwatere.map"), 0.0, fail=True
        )
        self.Gfrac_max1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/gfrac_max.map"), 0.0, fail=True
        )
        self.hveg1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/hveg.map"), 0.0, fail=True
        )
        self.InitLoss1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/initloss.map"), 0.0, fail=True
        )
        self.LAImax1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/laimax.map"), 0.0, fail=True
        )
        self.PrefR1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/prefr.map"), 0.0, fail=True
        )
        self.S_sls1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/s_sls.map"), 0.0, fail=True
        )
        self.S0FC1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/s0fc.map"), 0.0, fail=True
        )
        self.SsFC1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/ssfc.map"), 0.0, fail=True
        )
        self.SdFC1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/sdfc.map"), 0.0, fail=True
        )
        self.Vc1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/vc.map"), 0.0, fail=True
        )
        self.w0ref_alb1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/w0ref_alb.map"), 0.0, fail=True
        )
        self.Us01 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/us0.map"), 0.0, fail=True
        )
        self.Ud01 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/ud0.map"), 0.0, fail=True
        )
        self.wslimU1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/wslimu.map"), 0.0, fail=True
        )
        self.wdlimU1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/wdlimu.map"), 0.0, fail=True
        )
        self.w0limE1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/w0lime.map"), 0.0, fail=True
        )
        self.Tgrow1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/tgrow.map"), 0.0, fail=True
        )
        self.Tsenc1 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/tsenc.map"), 0.0, fail=True
        )

        self.alb_dry2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/alb_dry2.map"), 0.0, fail=True
        )
        self.alb_wet2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/alb_wet2.map"), 0.0, fail=True
        )
        self.beta2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/beta2.map"), 0.0, fail=True
        )
        self.cGsmax2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/cgsmax2.map"), 0.0, fail=True
        )
        self.ER_frac_ref2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/er_frac_ref2.map"), 0.0, fail=True
        )
        self.FdrainFC2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fdrainfc2.map"), 0.0, fail=True
        )
        self.Fgw_conn2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fgw_conn2.map"), 0.0, fail=True
        )
        self.Fhru2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fhru2.map"), 0.0, fail=True
        )
        self.SLA2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/sla2.map"), 0.0, fail=True
        )
        self.LAIref2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/lairef2.map"), 0.0, fail=True
        )
        self.FsoilEmax2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fsoilemax2.map"), 0.0, fail=True
        )
        self.fvegref_G2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fvegref_g2.map"), 0.0, fail=True
        )
        self.FwaterE2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/fwatere2.map"), 0.0, fail=True
        )
        self.Gfrac_max2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/gfrac_max2.map"), 0.0, fail=True
        )
        self.hveg2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/hveg2.map"), 0.0, fail=True
        )
        self.InitLoss2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/initloss2.map"), 0.0, fail=True
        )
        self.LAImax2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/laimax2.map"), 0.0, fail=True
        )
        self.PrefR2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/prefr2.map"), 0.0, fail=True
        )
        self.S_sls2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/s_sls2.map"), 0.0, fail=True
        )
        self.S0FC2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/s0fc2.map"), 0.0, fail=True
        )
        self.SsFC2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/ssfc2.map"), 0.0, fail=True
        )
        self.SdFC2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/sdfc2.map"), 0.0, fail=True
        )
        self.Vc2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/vc2.map"), 0.0, fail=True
        )
        self.w0ref_alb2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/w0ref_alb2.map"), 0.0, fail=True
        )
        self.Us02 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/us02.map"), 0.0, fail=True
        )
        self.Ud02 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/ud02.map"), 0.0, fail=True
        )
        self.wslimU2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/wslimu2.map"), 0.0, fail=True
        )
        self.wdlimU2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/wdlimu2.map"), 0.0, fail=True
        )
        self.w0limE2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/w0lime2.map"), 0.0, fail=True
        )
        self.Tgrow2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/tgrow2.map"), 0.0, fail=True
        )
        self.Tsenc2 = self.wf_readmap(
            os.path.join(self.Dir, "staticmaps/tsenc2.map"), 0.0, fail=True
        )

        self.wf_multparameters()
        # Static, for the computation of Aerodynamic conductance (3.7)
        self.fh1 = pcr.ln(813.0 / self.hveg1 - 5.45)
        self.fh2 = pcr.ln(813.0 / self.hveg2 - 5.45)
        self.ku2_1 = 0.305 / (self.fh1 * (self.fh1 + 2.3))
        self.ku2_2 = 0.305 / (self.fh2 * (self.fh2 + 2.3))

        self.logger.info("Starting Dynamic run...")
コード例 #25
0
        point_map = workdir + 'point.map' 
        call(('gdal_translate','-of','GTiff','-a_srs',EPSG,'-ot','Float32',clone_map,point_tif))
        call(('gdal_rasterize','-burn','1','-l',file_att,pointshp,point_tif))
        call(('gdal_translate','-of','PCRaster','-a_srs',EPSG,'-ot','Float32',point_tif,point_map))
        points = pcr.scalar(pcr.readmap(point_map))
        if snapgaugestoriver:    
            print "Snapping points to line"
            points= wt.snaptomap(pcr.ordinal(points),pcr.boolean(lines)) 
            points= pcr.cover(pcr.scalar(points),pcr.scalar(0))     
        points = pcr.cover(points, pcr.scalar(0))
        #pcr.report(points,'points.map')
        burn = burn - (points * pcr.scalar(burnvalue)*2)
        #pcr.report(burn,'burn3.map')
    
''' create ldd '''
pcr.setglobaloption("lddout")
if lddin:
    pcr.setglobaloption("lddin")
ldd_map = workdir + 'ldd.map'
streamorder_map = workdir + 'streamorder.map'
river_map = workdir + 'river.map'
catchments_map = workdir + 'catchments.map'
catchments_tif = workdir + 'catchments.tif'
#catchments_shp = resultdir + 'catchments.shp'

generateldd = True

if skipldd:
    print 'Option -S is set'
    print 'ldd will be read from ' + ldd_map   
    if os.path.exists(ldd_map):
コード例 #26
0
root_dir = os.path.dirname(os.getcwd())
input_dir = os.path.join(root_dir, 'input')
ref_map_dir = os.path.join(input_dir, 'reference_maps')
bio_dir = os.path.join(input_dir, 'bio')
cost_dir = os.path.join(input_dir, 'cost')

output_dir = os.path.join(root_dir, 'output/waal_XL')
ens_dir = os.path.join(output_dir, 'measures_ensemble02')
ens_map_dir = os.path.join(ens_dir, 'maps')
ens_FM_dir = os.path.join(ens_dir, 'hydro')
ens_overview_dir = os.path.join(ens_dir, 'overview')

scratch_dir = os.path.join(root_dir, 'scratch')
clone_file = os.path.join(ref_map_dir, 'clone.map')
pcr.setclone(clone_file)
pcr.setglobaloption('unittrue')
os.chdir(scratch_dir)
ppp
#%% Initialize BIOSAFE
ndff_species = pd.read_pickle(os.path.join(bio_dir, 'ndff_sub_BS_13.pkl'))
flpl_sections = pcr.readmap(os.path.join(bio_dir, 'flpl_sections.map'))
ecotopes = measures.read_map_with_legend(os.path.join(bio_dir, 'ecotopes.map'))
legalWeights, linksLaw, linksEco = bsIO.from_csv(bio_dir)
speciesPresence = pd.DataFrame(np.random.randint(2, size=len(linksLaw)),\
                    columns=['speciesPresence'], \
                    index=linksLaw.index)
ecotopeArea = pd.DataFrame(np.ones(82) * 1e5,\
                           columns = ['area_m2'],\
                           index = linksEco.columns.values[0:-1])

bs = biosafe.biosafe(legalWeights, linksLaw, linksEco, speciesPresence,
コード例 #27
0
def main():

    ### Read input arguments #####
    logfilename = 'wtools_static_maps.log'
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option('-q',
                      '--quiet',
                      dest='verbose',
                      default=True,
                      action='store_false',
                      help='do not print status messages to stdout')
    parser.add_option('-i',
                      '--ini',
                      dest='inifile',
                      default=None,
                      help='ini file with settings for static_maps.exe')
    parser.add_option('-s',
                      '--source',
                      dest='source',
                      default='wflow',
                      help='Source folder containing clone (default=./wflow)')
    parser.add_option('-d',
                      '--destination',
                      dest='destination',
                      default='staticmaps',
                      help='Destination folder (default=./staticmaps)')
    parser.add_option('-r',
                      '--river',
                      dest='rivshp',
                      default=None,
                      help='river network polyline layer (ESRI Shapefile)')
    parser.add_option('-c',
                      '--catchment',
                      dest='catchshp',
                      default=None,
                      help='catchment polygon layer (ESRI Shapefile)')
    parser.add_option('-g',
                      '--gauges',
                      dest='gaugeshp',
                      default=None,
                      help='gauge point layer (ESRI Shapefile)')
    parser.add_option('-D',
                      '--dem',
                      dest='dem_in',
                      default=None,
                      help='digital elevation model (GeoTiff)')
    parser.add_option('-L',
                      '--landuse',
                      dest='landuse',
                      default=None,
                      help='land use / land cover layer (GeoTiff)')
    parser.add_option('-S',
                      '--soiltype',
                      dest='soil',
                      default=None,
                      help='soil type layer (GeoTiff)')
    parser.add_option(
        '-V',
        '--vegetation',
        dest='lai',
        default=None,
        help=
        'vegetation LAI layer location (containing 12 GeoTiffs <LAI00000.XXX.tif>)'
    )
    parser.add_option(
        '-O',
        '--other_maps',
        dest='other_maps',
        default=None,
        help=
        'bracketed [] comma-separated list of paths to other maps that should be reprojected'
    )
    parser.add_option(
        '-C',
        '--clean',
        dest='clean',
        default=False,
        action='store_true',
        help='Clean the .xml files from static maps folder when finished')
    parser.add_option(
        '-A',
        '--alltouch',
        dest='alltouch',
        default=False,
        action='store_true',
        help=
        'option to burn catchments "all touching".\nUseful when catchment-size is small compared to cellsize'
    )
    (options, args) = parser.parse_args()
    # parse other maps into an array
    options.other_maps = options.other_maps.replace(' ', '').replace(
        '[', '').replace(']', '').split(',')

    options.source = os.path.abspath(options.source)
    clone_map = os.path.join(options.source, 'mask.map')
    clone_shp = os.path.join(options.source, 'mask.shp')
    clone_prj = os.path.join(options.source, 'mask.prj')

    if None in (options.inifile, options.rivshp, options.catchshp,
                options.dem_in):
        msg = """The following files are compulsory:
        - ini file
        - DEM (raster)
        - river (shape)
        - catchment (shape)
        """
        print(msg)
        parser.print_help()
        sys.exit(1)
    if not os.path.exists(options.inifile):
        print 'path to ini file cannot be found'
        sys.exit(1)
    if not os.path.exists(options.rivshp):
        print 'path to river shape cannot be found'
        sys.exit(1)
    if not os.path.exists(options.catchshp):
        print 'path to catchment shape cannot be found'
        sys.exit(1)
    if not os.path.exists(options.dem_in):
        print 'path to DEM cannot be found'
        sys.exit(1)

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wtools_lib.setlogger(logfilename, 'WTOOLS', options.verbose)

    # create directories # TODO: check if workdir is still necessary, try to keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(options.destination), options.destination
                      is not options.source):
        shutil.rmtree(options.destination)
    if options.destination is not options.source:
        os.makedirs(options.destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            'Clone file {:s} not found. Please run create_grid first.'.format(
                clone_map))
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = gis.gdal_readmap(clone_map, 'GTiff')
        trans = wtools_lib.get_geotransform(clone_map)
        extent = wtools_lib.get_extent(clone_map)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wtools_lib.get_projection(clone_map)
        unit_clone = srs.GetAttrValue('UNIT').lower()

    ### READ CONFIG FILE
    # open config-file
    config = wtools_lib.OpenConf(options.inifile)

    # read settings
    snapgaugestoriver = wtools_lib.configget(config,
                                             'settings',
                                             'snapgaugestoriver',
                                             True,
                                             datatype='boolean')
    burnalltouching = wtools_lib.configget(config,
                                           'settings',
                                           'burncatchalltouching',
                                           True,
                                           datatype='boolean')
    burninorder = wtools_lib.configget(config,
                                       'settings',
                                       'burncatchalltouching',
                                       False,
                                       datatype='boolean')
    verticetollerance = wtools_lib.configget(config,
                                             'settings',
                                             'vertice_tollerance',
                                             0.0001,
                                             datatype='float')
    ''' read parameters '''
    burn_outlets = wtools_lib.configget(config,
                                        'parameters',
                                        'burn_outlets',
                                        10000,
                                        datatype='int')
    burn_rivers = wtools_lib.configget(config,
                                       'parameters',
                                       'burn_rivers',
                                       200,
                                       datatype='int')
    burn_connections = wtools_lib.configget(config,
                                            'parameters',
                                            'burn_connections',
                                            100,
                                            datatype='int')
    burn_gauges = wtools_lib.configget(config,
                                       'parameters',
                                       'burn_gauges',
                                       100,
                                       datatype='int')
    minorder = wtools_lib.configget(config,
                                    'parameters',
                                    'riverorder_min',
                                    3,
                                    datatype='int')
    percentiles = np.array(config.get('parameters', 'statisticmaps',
                                      '0, 100').replace(' ', '').split(','),
                           dtype='float')

    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == 'degree':
        cellsize_hr = wtools_lib.configget(config,
                                           'parameters',
                                           'highres_degree',
                                           0.0005,
                                           datatype='float')
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = wtools_lib.configget(config,
                                           'parameters',
                                           'highres_metre',
                                           50,
                                           datatype='float')

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    clone_hr = os.path.join(options.destination, 'clone_highres.tif')
    # make a highres clone as well!
    wtools_lib.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    catchment_map = wtools_lib.configget(config, 'staticmaps', 'catchment',
                                         'wflow_catchment.map')
    dem_map = wtools_lib.configget(config, 'staticmaps', 'dem',
                                   'wflow_dem.map')
    demmax_map = wtools_lib.configget(config, 'staticmaps', 'demmax',
                                      'wflow_demmax.map')
    demmin_map = wtools_lib.configget(config, 'staticmaps', 'demmin',
                                      'wflow_demmin.map')
    gauges_map = wtools_lib.configget(config, 'staticmaps', 'gauges',
                                      'wflow_gauges.map')
    landuse_map = wtools_lib.configget(config, 'staticmaps', 'landuse',
                                       'wflow_landuse.map')
    ldd_map = wtools_lib.configget(config, 'staticmaps', 'ldd',
                                   'wflow_ldd.map')
    river_map = wtools_lib.configget(config, 'staticmaps', 'river',
                                     'wflow_river.map')
    outlet_map = wtools_lib.configget(config, 'staticmaps', 'outlet',
                                      'wflow_outlet.map')
    riverlength_fact_map = wtools_lib.configget(config, 'staticmaps',
                                                'riverlength_fact',
                                                'wflow_riverlength_fact.map')
    soil_map = wtools_lib.configget(config, 'staticmaps', 'soil',
                                    'wflow_soil.map')
    streamorder_map = wtools_lib.configget(config, 'staticmaps', 'streamorder',
                                           'wflow_streamorder.map')
    subcatch_map = wtools_lib.configget(config, 'staticmaps', 'subcatch',
                                        'wflow_subcatch.map')

    # read mask location (optional)
    masklayer = wtools_lib.configget(config, 'mask', 'masklayer',
                                     options.catchshp)

    # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0))

    # TODO: check if extents are correct this way
    # TODO: check what the role of missing values is in zeros and ones (l. 123 in old code)

    # first add a missing value to dem_in
    ds = gdal.Open(options.dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info('Resampling dem from {:s} to {:s}'.format(
        os.path.abspath(options.dem_in),
        os.path.join(options.destination, dem_map)))
    gis.gdal_warp(options.dem_in,
                  clone_map,
                  os.path.join(options.destination, dem_map),
                  format='PCRaster',
                  gdal_interp=gdalconst.GRA_Average)
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wtools_lib.get_projection(options.dem_in)
    except:
        logger.warning(
            'No projection found in DEM, assuming WGS 1984 lat long')
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    #if srs.ExportToProj4() == srs_dem.ExportToProj4():
    for percentile in percentiles:
        if percentile >= 100:
            logger.info('computing window maximum')
            percentile_dem = os.path.join(options.destination,
                                          'wflow_dem_max.map')
        elif percentile <= 0:
            logger.info('computing window minimum')
            percentile_dem = os.path.join(options.destination,
                                          'wflow_dem_min.map')
        else:
            logger.info('computing window {:d} percentile'.format(
                int(percentile)))
            percentile_dem = os.path.join(
                options.destination,
                'wflow_dem_{:03d}.map'.format(int(percentile)))

        percentile_dem = os.path.join(
            options.destination,
            'wflow_dem_{:03d}.map'.format(int(percentile)))
        stats = wtools_lib.windowstats(options.dem_in,
                                       len(yax),
                                       len(xax),
                                       trans,
                                       srs,
                                       percentile_dem,
                                       percentile,
                                       transform=clone2dem_transform,
                                       logger=logger)


#    else:
#        logger.warning('Projections of DEM and clone are different. DEM statistics for different projections is not yet implemented')
    """

    # burn in rivers
    # first convert and clip the river shapefile
    # retrieve river shape projection, if not available assume EPSG:4326
    file_att = os.path.splitext(os.path.basename(options.rivshp))[0]
    ds = ogr.Open(options.rivshp)
    lyr = ds.GetLayerByName(file_att)
    extent = lyr.GetExtent()
    extent_in = [extent[0], extent[2], extent[1], extent[3]]
    try:
        # get spatial reference from shapefile
        srs_rivshp = lyr.GetSpatialRef()
        logger.info('Projection in river shapefile is {:s}'.format(srs_rivshp.ExportToProj4()))
    except:
        logger.warning('No projection found in {:s}, assuming WGS 1984 lat-lon'.format(options.rivshp))
        srs_rivshp = osr.SpatialReference()
        srs_rivshp.ImportFromEPSG(4326)
    rivprojshp = os.path.join(options.destination, 'rivshp_proj.shp')
    logger.info('Projecting and clipping {:s} to {:s}'.format(options.rivshp, rivprojshp))
    # TODO: Line below takes a very long time to process, the bigger the shapefile, the more time. How do we deal with this?
    call(('ogr2ogr','-s_srs', srs_rivshp.ExportToProj4(),'-t_srs', srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin), '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax), rivprojshp, options.rivshp))
    """

    # TODO: BURNING!!

    # project catchment layer to projection of clone
    file_att = os.path.splitext(os.path.basename(options.catchshp))[0]
    print options.catchshp
    ds = ogr.Open(options.catchshp)
    lyr = ds.GetLayerByName(file_att)
    extent = lyr.GetExtent()
    extent_in = [extent[0], extent[2], extent[1], extent[3]]
    try:
        # get spatial reference from shapefile
        srs_catchshp = lyr.GetSpatialRef()
        logger.info('Projection in catchment shapefile is {:s}'.format(
            srs_catchshp.ExportToProj4()))
    except:
        logger.warning(
            'No projection found in {:s}, assuming WGS 1984 lat-lon'.format(
                options.catchshp))
        srs_catchshp = osr.SpatialReference()
        srs_catchshp.ImportFromEPSG(4326)
    catchprojshp = os.path.join(options.destination, 'catchshp_proj.shp')
    logger.info('Projecting {:s} to {:s}'.format(options.catchshp,
                                                 catchprojshp))
    call(('ogr2ogr', '-s_srs', srs_catchshp.ExportToProj4(), '-t_srs',
          srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin),
          '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax),
          catchprojshp, options.catchshp))

    #
    logger.info('Calculating ldd')
    ldddem = pcr.readmap(os.path.join(options.destination, dem_map))
    ldd_select = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    pcr.report(ldd_select, os.path.join(options.destination, 'wflow_ldd.map'))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(ldd_select))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(options.destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(options.destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(options.destination, dem_map))
    pcr.report(streamorder, os.path.join(options.destination, streamorder_map))
    pcr.report(river, os.path.join(options.destination, river_map))

    # deal with your catchments
    if options.gaugeshp == None:
        logger.info('No gauges defined, using outlets instead')
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(
                    pcr.ifthen(pcr.scalar(ldd_select) == 5, pcr.boolean(1)))))
        pcr.report(gauges, os.path.join(options.destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # report river length
    # make a high resolution empty map
    dem_hr_file = os.path.join(options.destination, 'dem_highres.tif')
    burn_hr_file = os.path.join(options.destination, 'burn_highres.tif')
    demburn_hr_file = os.path.join(options.destination, 'demburn_highres.map')
    riv_hr_file = os.path.join(options.destination, 'riv_highres.map')
    gis.gdal_warp(options.dem_in, clone_hr, dem_hr_file)
    # wtools_lib.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
    file_att = os.path.splitext(os.path.basename(options.rivshp))[0]
    # open the shape layer
    ds = ogr.Open(options.rivshp)
    lyr = ds.GetLayerByName(file_att)
    gis.ogr_burn(lyr,
                 clone_hr,
                 -100,
                 file_out=burn_hr_file,
                 format='GTiff',
                 gdal_type=gdal.GDT_Float32,
                 fill_value=0)
    # read dem and burn values and add
    xax_hr, yax_hr, burn_hr, fill = gis.gdal_readmap(burn_hr_file, 'GTiff')
    burn_hr[burn_hr == fill] = 0
    xax_hr, yax_hr, dem_hr, fill = gis.gdal_readmap(dem_hr_file, 'GTiff')
    dem_hr[dem_hr == fill] = np.nan
    demburn_hr = dem_hr + burn_hr
    demburn_hr[np.isnan(demburn_hr)] = -9999
    gis.gdal_writemap(demburn_hr_file, 'PCRaster', xax_hr, yax_hr, demburn_hr,
                      -9999.)
    pcr.setclone(demburn_hr_file)
    demburn_hr = pcr.readmap(demburn_hr_file)
    ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
    pcr.report(ldd_hr, os.path.join(options.destination, 'ldd_hr.map'))
    pcr.setglobaloption('unitcell')
    riv_hr = pcr.scalar(
        pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr)
    pcr.report(riv_hr, riv_hr_file)
    pcr.setglobaloption('unittrue')
    pcr.setclone(clone_map)
    logger.info('Computing river length')
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wtools_lib.windowstats(riv_hr_file,
                                         len(yax),
                                         len(xax),
                                         trans,
                                         srs,
                                         os.path.join(options.destination,
                                                      riverlength_fact_map),
                                         stat='fact',
                                         logger=logger)
    # TODO: nothing happends with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(pcr.ifthen(pcr.ordinal(ldd_select) == 5, pcr.ordinal(1)),
               os.path.join(options.destination, outlet_map))

    # report subcatchment map
    subcatchment = pcr.subcatchment(ldd_select, gauges)
    pcr.report(pcr.ordinal(subcatchment),
               os.path.join(options.destination, subcatch_map))

    # Report land use map
    if options.landuse == None:
        logger.info(
            'No land use map used. Preparing {:s} with only ones.'.format(
                os.path.join(options.destination, landuse_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, landuse_map))
    else:
        logger.info('Resampling land use from {:s} to {:s}'.format(
            os.path.abspath(options.landuse),
            os.path.join(options.destination, os.path.abspath(landuse_map))))
        gis.gdal_warp(options.landuse,
                      clone_map,
                      os.path.join(options.destination, landuse_map),
                      format='PCRaster',
                      gdal_interp=gdalconst.GRA_Mode,
                      gdal_type=gdalconst.GDT_Int32)

    # report soil map
    if options.soil == None:
        logger.info('No soil map used. Preparing {:s} with only ones.'.format(
            os.path.join(options.destination, soil_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, soil_map))
    else:
        logger.info('Resampling soil from {:s} to {:s}'.format(
            os.path.abspath(options.soil),
            os.path.join(options.destination, os.path.abspath(soil_map))))
        gis.gdal_warp(options.soil,
                      clone_map,
                      os.path.join(options.destination, soil_map),
                      format='PCRaster',
                      gdal_interp=gdalconst.GRA_Mode,
                      gdal_type=gdalconst.GDT_Int32)

    if options.lai == None:
        logger.info(
            'No vegetation LAI maps used. Preparing default maps {:s} with only ones.'
            .format(os.path.join(options.destination, soil_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, soil_map))
    else:
        dest_lai = os.path.join(options.destination, 'clim')
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(options.lai,
                                  'LAI00000.{:03d}'.format(month + 1))
            lai_out = os.path.join(dest_lai,
                                   'LAI00000.{:03d}'.format(month + 1))
            logger.info('Resampling vegetation LAI from {:s} to {:s}'.format(
                os.path.abspath(lai_in), os.path.abspath(lai_out)))
            gis.gdal_warp(lai_in,
                          clone_map,
                          lai_out,
                          format='PCRaster',
                          gdal_interp=gdalconst.GRA_Bilinear,
                          gdal_type=gdalconst.GDT_Float32)

    # report soil map
    if options.other_maps == None:
        logger.info('No other maps used. Skipping other maps.')
    else:
        logger.info('Resampling list of other maps...')
        for map_file in options.other_maps:
            map_name = os.path.split(map_file)[1]
            logger.info('Resampling a map from {:s} to {:s}'.format(
                os.path.abspath(map_file),
                os.path.join(options.destination, map_name)))
            gis.gdal_warp(map_file,
                          clone_map,
                          os.path.join(options.destination, map_name),
                          format='PCRaster',
                          gdal_interp=gdalconst.GRA_Mode,
                          gdal_type=gdalconst.GDT_Float32)

    if options.clean:
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, '*.xml')),
                              logger=logger)
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, 'clim', '*.xml')),
                              logger=logger)
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, '*highres*')),
                              logger=logger)
コード例 #28
0
def main(
        source,
        destination,
        inifile,
        dem_in,
        rivshp,
        catchshp,
        gaugeshp=None,
        landuse=None,
        soil=None,
        lai=None,
        other_maps=None,
        logfilename="wtools_static_maps.log",
        verbose=True,
        clean=True,
        alltouch=False,
        outlets=([], []),
):
    # parse other maps into an array
    if not other_maps == None:
        if type(other_maps) == str:
            print other_maps
            other_maps = (other_maps.replace(" ", "").replace("[", "").replace(
                "]", "").split(","))

    source = os.path.abspath(source)
    clone_map = os.path.join(source, "mask.map")
    clone_shp = os.path.join(source, "mask.shp")
    clone_prj = os.path.join(source, "mask.prj")

    if None in (rivshp, catchshp, dem_in):
        msg = """The following files are compulsory:
        - DEM (raster)
        - river (shape)
        - catchment (shape)
        """
        print(msg)
        parser.print_help()
        sys.exit(1)
    if (inifile is not None) and (not os.path.exists(inifile)):
        print "path to ini file cannot be found"
        sys.exit(1)
    if not os.path.exists(rivshp):
        print "path to river shape cannot be found"
        sys.exit(1)
    if not os.path.exists(catchshp):
        print "path to catchment shape cannot be found"
        sys.exit(1)
    if not os.path.exists(dem_in):
        print "path to DEM cannot be found"
        sys.exit(1)

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wt.setlogger(logfilename, "WTOOLS", verbose)

    # create directories # TODO: check if workdir is still necessary, try to
    # keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(destination), destination is not source):
        shutil.rmtree(destination)
    if destination is not source:
        os.makedirs(destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            "Clone file {:s} not found. Please run create_grid first.".format(
                clone_map))
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = wt.gdal_readmap(clone_map, "GTiff")
        trans = wt.get_geotransform(clone_map)
        extent = wt.get_extent(clone_map)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wt.get_projection(clone_map)
        unit_clone = srs.GetAttrValue("UNIT").lower()

    # READ CONFIG FILE
    # open config-file
    if inifile is None:
        config = ConfigParser.SafeConfigParser()
        config.optionxform = str
    else:
        config = wt.OpenConf(inifile)

    # read settings
    snapgaugestoriver = wt.configget(config,
                                     "settings",
                                     "snapgaugestoriver",
                                     True,
                                     datatype="boolean")
    burnalltouching = wt.configget(config,
                                   "settings",
                                   "burncatchalltouching",
                                   True,
                                   datatype="boolean")
    burninorder = wt.configget(config,
                               "settings",
                               "burncatchalltouching",
                               False,
                               datatype="boolean")
    verticetollerance = wt.configget(config,
                                     "settings",
                                     "vertice_tollerance",
                                     0.0001,
                                     datatype="float")
    """ read parameters """
    burn_outlets = wt.configget(config,
                                "parameters",
                                "burn_outlets",
                                10000,
                                datatype="int")
    burn_rivers = wt.configget(config,
                               "parameters",
                               "burn_rivers",
                               200,
                               datatype="int")
    burn_connections = wt.configget(config,
                                    "parameters",
                                    "burn_connections",
                                    100,
                                    datatype="int")
    burn_gauges = wt.configget(config,
                               "parameters",
                               "burn_gauges",
                               100,
                               datatype="int")
    minorder = wt.configget(config,
                            "parameters",
                            "riverorder_min",
                            3,
                            datatype="int")
    try:
        percentiles = np.array(
            config.get("parameters", "statisticmaps",
                       "0, 100").replace(" ", "").split(","),
            dtype="float",
        )
    except ConfigParser.NoOptionError:
        percentiles = [0.0, 100.0]
    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == "degree":
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_degree",
                                   0.0005,
                                   datatype="float")
    elif (unit_clone == "metre") or (unit_clone == "meter"):
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_metre",
                                   50,
                                   datatype="float")

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    clone_hr = os.path.join(destination, "clone_highres.tif")
    # make a highres clone as well!
    wt.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")

    # read mask location (optional)
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)

    # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0))

    # TODO: check if extents are correct this way
    # TODO: check what the role of missing values is in zeros and ones (l. 123
    # in old code)

    # first add a missing value to dem_in
    ds = gdal.Open(dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info("Resampling dem from {:s} to {:s}".format(
        os.path.abspath(dem_in), os.path.join(destination, dem_map)))
    wt.gdal_warp(
        dem_in,
        clone_map,
        os.path.join(destination, dem_map),
        format="PCRaster",
        gdal_interp=gdalconst.GRA_Average,
    )
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wt.get_projection(dem_in)
    except:
        logger.warning(
            "No projection found in DEM, assuming WGS 1984 lat long")
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    # if srs.ExportToProj4() == srs_dem.ExportToProj4():

    wt.windowstats(
        dem_in,
        len(yax),
        len(xax),
        trans,
        srs,
        destination,
        percentiles,
        transform=clone2dem_transform,
        logger=logger,
    )

    ## read catchment shape-file to create catchment map
    src = rasterio.open(clone_map)
    shapefile = fiona.open(catchshp, "r")
    catchment_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(catchment_shapes,
                               out_shape=src.shape,
                               all_touched=True,
                               transform=src.transform)
    catchment_domain = pcr.numpy2pcr(pcr.Ordinal, image.copy(), 0)

    ## read river shape-file and create burn layer
    shapefile = fiona.open(rivshp, "r")
    river_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(river_shapes,
                               out_shape=src.shape,
                               all_touched=False,
                               transform=src.transform)
    rivers = pcr.numpy2pcr(pcr.Nominal, image.copy(), 0)
    riverdem = pcr.scalar(rivers) * pcr.readmap(
        os.path.join(destination, dem_map))
    pcr.setglobaloption("lddin")
    riverldd = pcr.lddcreate(riverdem, 1e35, 1e35, 1e35, 1e35)

    riveroutlet = pcr.cover(
        pcr.ifthen(pcr.scalar(riverldd) == 5, pcr.scalar(1000)), 0)
    burn_layer = pcr.cover(
        (pcr.scalar(
            pcr.ifthen(
                pcr.streamorder(riverldd) > 1, pcr.streamorder(riverldd))) - 1)
        * 1000 + riveroutlet,
        0,
    )

    outlets_x, outlets_y = outlets
    n_outlets = len(outlets_x)
    logger.info("Number of outlets: {}".format(n_outlets))
    if n_outlets >= 1:
        outlets_map_numbered = tr.points_to_map(pcr.scalar(0), outlets_x,
                                                outlets_y, 0.5)
        outlets_map = pcr.boolean(outlets_map_numbered)
        # snap outlets to closest river (max 1 cell closer to river)
        outlets_map = pcr.boolean(
            pcr.cover(tr.snaptomap(pcr.ordinal(outlets_map), rivers), 0))

    ## create ldd per catchment
    logger.info("Calculating ldd")
    ldddem = pcr.scalar(clone_map)

    # per subcatchment, burn dem, then create modified dem that fits the ldd of the subcatchment
    # this ldd dem is merged over catchments, to create a global ldd that abides to the subcatchment boundaries
    for idx, shape in enumerate(catchment_shapes):
        logger.info("Computing ldd for catchment " + str(idx + 1) + "/" +
                    str(len(catchment_shapes)))
        image = features.rasterize([shape],
                                   out_shape=src.shape,
                                   all_touched=True,
                                   transform=src.transform)
        catchment = pcr.numpy2pcr(pcr.Scalar, image.copy(), 0)
        dem_burned_catchment = (
            pcr.readmap(os.path.join(destination, dem_map)) *
            pcr.scalar(catchment_domain) * catchment) - burn_layer
        # ldddem_catchment = pcr.lddcreatedem(
        #    dem_burned_catchment, 1e35, 1e35, 1e35, 1e35)
        ldddem = pcr.cover(ldddem, dem_burned_catchment)

    pcr.report(ldddem, os.path.join(destination, "ldddem.map"))

    wflow_ldd = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    if n_outlets >= 1:
        # set outlets to pit
        wflow_ldd = pcr.ifthenelse(outlets_map, pcr.ldd(5), wflow_ldd)
        wflow_ldd = pcr.lddrepair(wflow_ldd)

    pcr.report(wflow_ldd, os.path.join(destination, "wflow_ldd.map"))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(wflow_ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(destination, dem_map))
    pcr.report(streamorder, os.path.join(destination, streamorder_map))
    pcr.report(river, os.path.join(destination, river_map))

    # deal with your catchments
    if gaugeshp == None:
        logger.info("No gauges defined, using outlets instead")
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(
                    pcr.ifthen(pcr.scalar(wflow_ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, os.path.join(destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # since the products here (river length fraction) are not yet used
    # this is disabled for now, as it also takes a lot of computation time
    if False:
        # report river length
        # make a high resolution empty map
        dem_hr_file = os.path.join(destination, "dem_highres.tif")
        burn_hr_file = os.path.join(destination, "burn_highres.tif")
        demburn_hr_file = os.path.join(destination, "demburn_highres.map")
        riv_hr_file = os.path.join(destination, "riv_highres.map")
        wt.gdal_warp(dem_in, clone_hr, dem_hr_file)
        # wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
        # open the shape layer
        ds = ogr.Open(rivshp)
        lyr = ds.GetLayer(0)
        wt.ogr_burn(
            lyr,
            clone_hr,
            -100,
            file_out=burn_hr_file,
            format="GTiff",
            gdal_type=gdal.GDT_Float32,
            fill_value=0,
        )
        # read dem and burn values and add
        xax_hr, yax_hr, burn_hr, fill = wt.gdal_readmap(burn_hr_file, "GTiff")
        burn_hr[burn_hr == fill] = 0
        xax_hr, yax_hr, dem_hr, fill = wt.gdal_readmap(dem_hr_file, "GTiff")
        dem_hr[dem_hr == fill] = np.nan
        demburn_hr = dem_hr + burn_hr
        demburn_hr[np.isnan(demburn_hr)] = -9999
        wt.gdal_writemap(demburn_hr_file, "PCRaster", xax_hr, yax_hr,
                         demburn_hr, -9999.)
        pcr.setclone(demburn_hr_file)
        demburn_hr = pcr.readmap(demburn_hr_file)

        logger.info("Calculating ldd to determine river length")
        ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
        pcr.report(ldd_hr, os.path.join(destination, "ldd_hr.map"))
        pcr.setglobaloption("unitcell")
        riv_hr = pcr.scalar(
            pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr)
        pcr.report(riv_hr, riv_hr_file)
        pcr.setglobaloption("unittrue")
        pcr.setclone(clone_map)
        logger.info("Computing river length")
        wt.windowstats(
            riv_hr_file,
            len(yax),
            len(xax),
            trans,
            srs,
            destination,
            stat="fact",
            transform=False,
            logger=logger,
        )
        # TODO: nothing happens with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(
        pcr.ifthen(pcr.ordinal(wflow_ldd) == 5, pcr.ordinal(1)),
        os.path.join(destination, outlet_map),
    )

    # report subcatchment map
    subcatchment = pcr.subcatchment(wflow_ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment),
               os.path.join(destination, subcatch_map))

    # Report land use map
    if landuse == None:
        logger.info(
            "No land use map used. Preparing {:s} with only ones.".format(
                os.path.join(destination, landuse_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, landuse_map))
    else:
        logger.info("Resampling land use from {:s} to {:s}".format(
            os.path.abspath(landuse),
            os.path.join(destination, os.path.abspath(landuse_map)),
        ))
        wt.gdal_warp(
            landuse,
            clone_map,
            os.path.join(destination, landuse_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    # report soil map
    if soil == None:
        logger.info("No soil map used. Preparing {:s} with only ones.".format(
            os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        logger.info("Resampling soil from {:s} to {:s}".format(
            os.path.abspath(soil),
            os.path.join(destination, os.path.abspath(soil_map)),
        ))
        wt.gdal_warp(
            soil,
            clone_map,
            os.path.join(destination, soil_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    if lai == None:
        logger.info(
            "No vegetation LAI maps used. Preparing default maps {:s} with only ones."
            .format(os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        dest_lai = os.path.join(destination, "clim")
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(lai, "LAI00000.{:03d}".format(month + 1))
            lai_out = os.path.join(dest_lai,
                                   "LAI00000.{:03d}".format(month + 1))
            logger.info("Resampling vegetation LAI from {:s} to {:s}".format(
                os.path.abspath(lai_in), os.path.abspath(lai_out)))
            wt.gdal_warp(
                lai_in,
                clone_map,
                lai_out,
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Bilinear,
                gdal_type=gdalconst.GDT_Float32,
            )

    # report soil map
    if other_maps == None:
        logger.info("No other maps used. Skipping other maps.")
    else:
        logger.info("Resampling list of other maps...")
        for map_file in other_maps:
            map_name = os.path.split(map_file)[1]
            logger.info("Resampling a map from {:s} to {:s}".format(
                os.path.abspath(map_file),
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
            ))
            wt.gdal_warp(
                map_file,
                clone_map,
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Mode,
                gdal_type=gdalconst.GDT_Float32,
            )

    if clean:
        wt.DeleteList(glob.glob(os.path.join(destination, "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "clim", "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "*highres*")),
                      logger=logger)
コード例 #29
0
def main():
    clone_map = "mask\mask.map"
    clone_shp = "mask\mask.shp"
    clone_prj = "mask\mask.prj"
    workdir = "work\\"
    resultdir = "staticmaps\\"
    ''' read commandline arguments '''
    argv = sys.argv
    clone_EPSG = False

    try:
        opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA')
    except getopt.error:
        print 'fout'
        Usage()
        sys.exit(1)

    inifile = None
    rivshp = None
    catchshp = None
    dem_in = None
    landuse = None
    soiltype = None
    clean = False
    gaugeshp = None
    alltouching = False

    for o, a in opts:
        if o == '-i': inifile = a
        if o == '-p': clone_EPSG = 'EPSG:' + a
        if o == '-r': rivshp = a
        if o == '-c': catchshp = a
        if o == '-d': dem_in = a
        if o == '-l': landuse = a
        if o == '-s': soiltype = a
        if o == '-C': clean = True
        if o == '-g': gaugeshp = a
        if o == '-A': alltouching = True

    if inifile == None or rivshp == None or catchshp == None or dem_in == None:
        print 'the following files are compulsory:'
        print ' - ini-file'
        print ' - DEM (raster)'
        print ' - river (shape)'
        print ' - catchment (shape)'
        Usage()
        sys.exit(1)

    if landuse == None:
        print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain'

    if soiltype == None:
        print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain'
    ''' read mask '''
    if not os.path.exists(clone_map):
        print 'Mask not found. Make sure the file mask\mask.map exists'
        print 'This file is usually created with the CreateGrid script'
        sys.exit(1)
    else:
        pcr.setclone(clone_map)
        ds = gdal.Open(clone_map, GA_ReadOnly)
        clone_trans = ds.GetGeoTransform()
        cellsize = clone_trans[1]
        clone_rows = ds.RasterYSize
        clone_columns = ds.RasterXSize
        extent_mask = [
            clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize,
            clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3]
        ]
        xmin, ymin, xmax, ymax = map(str, extent_mask)
        ds = None
        ones = pcr.scalar(pcr.readmap(clone_map))
        zeros = ones * 0
        empty = pcr.ifthen(ones == 0, pcr.scalar(0))
    ''' read projection from mask.shp '''
    # TODO: check how to deal with projections (add .prj to mask.shp in creategrid)
    if not os.path.exists(clone_prj):
        print 'please add prj-file to mask.shp'
        sys.exit(1)
    if os.path.exists(clone_shp):
        ds = ogr.Open(clone_shp)
        file_att = os.path.splitext(os.path.basename(clone_shp))[0]
        lyr = ds.GetLayerByName(file_att)
        spatialref = lyr.GetSpatialRef()
        if not spatialref == None:
            srs_clone = osr.SpatialReference()
            srs_clone.ImportFromWkt(spatialref.ExportToWkt())
            srs_clone.AutoIdentifyEPSG()
            unit_clone = False
            unit_clone = srs_clone.GetAttrValue('UNIT').lower()
            #clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1)
            # TODO: fix hard EPSG code below
            clone_EPSG = 'EPSG:' + '4167'
            print 'EPSG-code is read from mask.shp: ' + clone_EPSG
            spatialref == None
    if not clone_EPSG:
        print 'EPSG-code cannot be read from mask.shp'
        print 'please add prj-file to mask.shp or specify on command line'
        print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)'

    ds = None
    clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)])
    ''' open config-file '''
    config = wt.OpenConf(inifile)
    ''' read settings '''
    snapgaugestoriver = bool(
        int(wt.configget(config, "settings", "snapgaugestoriver", "1")))
    burnalltouching = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "1")))
    burninorder = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "0")))
    verticetollerance = float(
        wt.configget(config, "settings", "vertice_tollerance", "0.0001"))
    ''' read parameters '''
    burn_outlets = int(
        wt.configget(config, "parameters", "burn_outlets", 10000))
    burn_rivers = int(wt.configget(config, "parameters", "burn_rivers", 200))
    burn_connections = int(
        wt.configget(config, "parameters", "burn_connections", 100))
    burn_gauges = int(wt.configget(config, "parameters", "burn_gauges", 100))
    minorder = int(wt.configget(config, "parameters", "riverorder_min", 3))
    exec "percentile=tr.array(" + wt.configget(config, "parameters",
                                               "statisticmaps", [0, 100]) + ")"
    if not unit_clone:
        print 'failed to read unit (meter or degree) from mask projection'
        unit_clone = str(wt.configget(config, "settings", "unit", 'meter'))
        print 'unit read from settings: ' + unit_clone
    if unit_clone == 'degree':
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_degree", 0.0005))
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_metre", 50))

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    ''' read staticmap locations '''
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")
    ''' read mask location (optional) '''
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)
    ''' create directories '''
    if os.path.isdir(workdir):
        shutil.rmtree(workdir)
    os.makedirs(workdir)

    if os.path.isdir(resultdir):
        shutil.rmtree(resultdir)
    os.makedirs(resultdir)
    ''' Preperation steps '''
    zero_map = workdir + "zero.map"
    zero_tif = workdir + "zero.tif"
    pcr.report(zeros, zero_map)
    # TODO: replace gdal_translate call
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, zero_tif))
    pcr.setglobaloption("lddin")
    ''' resample DEM '''
    dem_resample = workdir + "dem_resampled.tif"
    ds = gdal.Open(dem_in, GA_ReadOnly)
    band = ds.GetRasterBand(1)
    nodata = band.GetNoDataValue()
    proj = ds.GetGeoTransform()
    cellsize_dem = proj[1]
    ''' read DEM projection '''
    spatialref == None
    spatialref = ds.GetProjection()
    if not spatialref == None:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(spatialref)
        srs.AutoIdentifyEPSG()
        dem_EPSG = 'EPSG:' + srs.GetAttrValue("AUTHORITY", 1)
        print 'EPSG-code is read from ' + os.path.basename(
            dem_in) + ': ' + dem_EPSG
        spatialref == None
        dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)])
        srs_DEM = osr.SpatialReference()
        srs_DEM.ImportFromEPSG(dem_EPSG_int)
        clone2dem_transform = osr.CoordinateTransformation(srs_clone, srs_DEM)
    else:
        dem_EPSG = clone_EPSG
        print 'No projection defined for ' + os.path.basename(dem_in)
        print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    ds = None
    print 'Resampling DEM...'
    if nodata == None:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin,
              xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-dstnodata',
              str(-9999), '-r', 'cubic', dem_in, dem_resample))
    else:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj,
              '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-srcnodata', str(nodata), '-dstnodata',
              str(nodata), '-r', 'cubic', dem_in, dem_resample))
    ''' create dem.map and statistic maps '''
    dem_resample_map = resultdir + dem_map
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', dem_resample, dem_resample_map))
    print 'Computing DEM statistics ....'
    stats = wt.windowstats(dem_in, clone_rows, clone_columns, clone_trans,
                           srs_clone, resultdir, percentile)
    ''' burn DEM '''
    ds = ogr.Open(rivshp)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    rivshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    # strip rivers to nodes
    xminc = str(float(xmin) + 0.5 * cellsize)
    yminc = str(float(ymin) + 0.5 * cellsize)
    xmaxc = str(float(xmax) - 0.5 * cellsize)
    ymaxc = str(float(ymax) - 0.5 * cellsize)
    if rivshp_EPSG == clone_EPSG:
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivshp))
    else:
        rivprojshp = workdir + 'rivshape_proj.shp'
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', rivshp_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, rivprojshp, rivshp))
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivprojshp))

    rivshp = rivclipshp

    #### BURNING BELOW ####

    # TODO: check if extraction can be done within memory and retun a burn layer
    shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int,
                            cellsize * verticetollerance, workdir)

    outlets = shapes[1]
    connections = shapes[2]
    outlets_att = os.path.splitext(os.path.basename(outlets))[0]
    connections_att = os.path.splitext(os.path.basename(connections))[0]
    dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0]
    connections_tif = workdir + connections_att + ".tif"
    outlets_tif = workdir + outlets_att + ".tif"
    # TODO: make the burning in memory
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, connections_tif))
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', outlets_att, outlets,
          outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', connections_att, connections,
          connections_tif))

    # convert rivers to order
    rivshp_att = os.path.splitext(os.path.basename(rivshp))[0]
    rivers_tif = workdir + rivshp_att + ".tif"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, rivers_tif))
    if burninorder:  # make river shape with an order attribute
        OrderSHPs = wt.ReachOrder(rivshp, clone_EPSG_int,
                                  cellsize * verticetollerance, workdir)
        wt.Burn2Tif(OrderSHPs, 'order', rivers_tif)
    else:
        call(('gdal_rasterize', '-burn', '1', '-l', rivshp_att, rivshp,
              rivers_tif))

    # convert 2 maps
    connections_map = workdir + connections_att + ".map"
    rivers_map = workdir + rivshp_att + ".map"
    outlets_map = workdir + outlets_att + ".map"
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', connections_tif, connections_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', rivers_tif, rivers_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', outlets_tif, outlets_map))

    # burn the layers in DEM
    outletsburn = pcr.scalar(
        pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets)
    connectionsburn = pcr.scalar(
        pcr.readmap(connections_map)) * pcr.scalar(burn_connections)
    riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers)
    ldddem = pcr.cover(dem_resample_map,
                       pcr.ifthen(riverburn > 0, pcr.scalar(0)))
    ldddem = ldddem - outletsburn - connectionsburn - riverburn
    ldddem = pcr.cover(ldddem, pcr.scalar(0))
    pcr.report(ldddem, workdir + "dem_burn.map")
    ''' create ldd for multi-catchments '''
    ldd = pcr.ldd(empty)
    # reproject catchment shape-file
    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    catchshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    if not rivshp_EPSG == clone_EPSG:
        catchprojshp = workdir + 'catchshape_proj.shp'
        call(('ogr2ogr', '-s_srs', catchshp_EPSG, '-t_srs', clone_ESPG,
              catchprojshp, catchshp))
        catchshp = catchprojshp
    ds.Destroy()

    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)

    fieldDef = ogr.FieldDefn("ID", ogr.OFTString)
    fieldDef.SetWidth(12)
    TEMP_out = Driver.CreateDataSource(workdir + "temp.shp")
    if not srs == None:
        TEMP_LYR = TEMP_out.CreateLayer("temp",
                                        srs,
                                        geom_type=ogr.wkbMultiPolygon)
    else:
        TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon)
    TEMP_LYR.CreateField(fieldDef)

    for i in range(lyr.GetFeatureCount()):
        orgfeature = lyr.GetFeature(i)
        geometry = orgfeature.geometry()
        feature = ogr.Feature(TEMP_LYR.GetLayerDefn())
        feature.SetGeometry(geometry)
        feature.SetField("ID", str(i + 1))
        TEMP_LYR.CreateFeature(feature)
    TEMP_out.Destroy()
    ds.Destroy

    # rasterize catchment map
    catchments_tif = workdir + "catchments.tif"
    catchments_map = workdir + "catchments.map"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map,
          catchments_tif))
    if alltouching:
        call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp",
              workdir + 'temp.shp', catchments_tif))
    else:
        call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp',
              catchments_tif))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG,
          catchments_tif, catchments_map))
    catchments = pcr.readmap(catchments_map)
    riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn)))
    rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique)
    #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0))
    catchments = pcr.cover(
        pcr.ifthen(catchments > 0, pcr.ordinal(catchments)),
        pcr.ifthen(
            riverburn > 0,
            pcr.ordinal(
                pcr.spreadzone(pcr.nominal(catchments),
                               pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1))))
    rivercatch_map = workdir + "catchments_river.map"
    catchclip_map = workdir + "catchments_clip.map"
    pcr.report(rivercatch, rivercatch_map)
    pcr.report(catchments, catchclip_map)

    ds = ogr.Open(workdir + "temp.shp")
    lyr = ds.GetLayerByName("temp")

    print 'calculating ldd'
    for i in range(lyr.GetFeatureCount()):
        feature = lyr.GetFeature(i)
        catch = int(feature.GetField("ID"))
        print "calculating ldd for catchment: " + str(i + 1) + "/" + str(
            lyr.GetFeatureCount()) + "...."
        ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch,
                                              catchments)) * 0 + 1 * ldddem
        ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"),
                                   float("1E35"), float("1E35"))
        ldd = pcr.cover(ldd, ldd_select)
    pcr.report(ldd, resultdir + ldd_map)
    ds.Destroy()
    ''' report stream order, river and dem '''
    streamorder = pcr.ordinal(pcr.streamorder(ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999)))
    dem_resample_map = pcr.cover(dem_resample_map,
                                 pcr.scalar(river) * 0 + mindem)
    pcr.report(dem_resample_map, resultdir + dem_map)
    pcr.report(streamorder, resultdir + streamorder_map)
    pcr.report(river, resultdir + river_map)
    ''' deal with your catchments '''
    if gaugeshp == None:
        print 'No gauges defined, using outlets instead'
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, resultdir + gauges_map)


#    ds = ogr.Open(gaugeshp)
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    lyr = ds.GetLayerByName(file_att)
#    spatialref = lyr.GetSpatialRef()
##    if not spatialref == None:
##        srs = osr.SpatialReference()
##        srs.ImportFromWkt(spatialref.ExportToWkt())
##        srs.AutoIdentifyEPSG()
##        gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
##        spatialref == None
#    #else:
#    gaugeshp_EPSG = clone_EPSG
#    print 'No projection defined for ' + file_att + '.shp'
#    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'
#
#    # reproject gauge shape if necesarry
#    if not gaugeshp_EPSG == clone_EPSG:
#        gaugeprojshp = workdir + 'gaugeshape_proj.shp'
#        call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp))
#        gaugeshp = gaugeprojshp
#
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    gaugestif = workdir + file_att + '.tif'
#    gaugesmap = workdir + file_att + '.map'
#    call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif))
#    call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif))
#    call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap))
#    gaugelocs = pcr.readmap(gaugesmap)
#    snapgaugestoriver = True
#
#    if snapgaugestoriver:
#        print "Snapping gauges to river"
#        gauges = pcr.uniqueid(pcr.boolean(gaugelocs))
#        gauges= wt.snaptomap(pcr.ordinal(gauges),river)
#
#    gaugesmap = pcr.ifthen(gauges > 0, gauges)
    ''' report riverlengthfrac '''
    riv_hr = workdir + 'river_highres.tif'
    wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs_clone, 0)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    call(('gdal_rasterize', '-burn', '1', '-l', file_att, rivshp, riv_hr))
    print 'Computing river length...'
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wt.windowstats(riv_hr, clone_rows, clone_columns,
                                 clone_trans, srs_clone, resultdir, 'frac')
    ''' report outlet map '''
    pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)),
               resultdir + outlet_map)
    ''' report  map '''
    catchment = pcr.ifthen(catchments > 0, pcr.ordinal(1))
    pcr.report(catchment, resultdir + catchment_map)
    ''' report subcatchment map '''
    subcatchment = pcr.subcatchment(ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment), resultdir + subcatch_map)
    ''' report landuse map '''
    if landuse == None:
        pcr.report(pcr.nominal(ones), resultdir + landuse_map)
    else:
        landuse_resample = workdir + 'landuse.tif'
        landuse_map = resultdir + landuse_map
        transform = wt.GetRasterTranform(landuse, srs_clone)
        if not transform[0]:
            call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        else:
            call(('gdalwarp', '-overwrite', '-s_srs', transform[1], '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              landuse_resample, landuse_map))
        landuse_work = pcr.readmap(landuse_map)
        pcr.report(pcr.nominal(landuse_work), landuse_map)
    ''' report soil map '''
    if soiltype == None:
        pcr.report(pcr.nominal(ones), resultdir + soil_map)
    else:
        soiltype_resample = workdir + 'soiltype.tif'
        soil_map = resultdir + soil_map
        #transform = wt.GetRasterTranform(soiltype,srs_clone)
        #        if not transform[0]:
        call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
              clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-r', 'mode', soiltype, soiltype_resample))
        #        else:
        #        call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              soiltype_resample, soil_map))
        soiltype_work = pcr.readmap(soil_map)
        pcr.report(pcr.nominal(soiltype_work), soil_map)

    if clean:
        wt.DeleteList(glob.glob(os.getcwd() + '\\' + resultdir + '/*.xml'))
コード例 #30
0
ファイル: wflow_prepare_step1.py プロジェクト: teije01/wflow
def main():
    """
        
    :ivar masterdem: digital elevation model
    :ivar dem: digital elevation model
    :ivar river: optional river map
    """

    # Default values
    strRiver = 8
    masterdem = "dem.map"
    step1dir = "step1"
    step2dir = "step2"
    workdir = "."
    inifile = "wflow_prepare.ini"
    recreate = False
    snapgaugestoriver = False

    try:
        opts, args = getopt.getopt(sys.argv[1:], "W:hI:f",['version'])
    except getopt.error as msg:
        usage(msg)

    for o, a in opts:
        if o == "-W":
            workdir = a
        if o == "-I":
            inifile = a
        if o == "-h":
            usage()
        if o == "-f":
            recreate = True
        if o == "--version":
            import wflow
            print("wflow version: ", wflow.__version__)
            sys.exit(0)

    pcr.setglobaloption("unitcell")
    os.chdir(workdir)

    config = OpenConf(workdir + "/" + inifile)

    masterdem = configget(config, "files", "masterdem", "dem.map")
    pcr.setclone(masterdem)

    strRiver = int(configget(config, "settings", "riverorder", "4"))

    try:
        gauges_x = config.get("settings", "gauges_x")
        gauges_y = config.get("settings", "gauges_y")
    except:
        print("gauges_x and  gauges_y are required entries in the ini file")
        sys.exit(1)

    step1dir = configget(config, "directories", "step1dir", "step1")
    step2dir = configget(config, "directories", "step2dir", "step2")
    # upscalefactor = float(config.get("settings","upscalefactor"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35")
    )
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(configget(config, "settings", "lddoutflowdepth", "1E35"))

    initialscale = int(configget(config, "settings", "initialscale", "1"))
    csize = float(configget(config, "settings", "cellsize", "1"))

    snapgaugestoriver = bool(
        int(configget(config, "settings", "snapgaugestoriver", "1"))
    )
    lddglobaloption = configget(config, "settings", "lddglobaloption", "lddout")
    pcr.setglobaloption(lddglobaloption)
    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    # X/Y coordinates of the gauges the system    
    X = np.fromstring(gauges_x, sep=',')
    Y = np.fromstring(gauges_y, sep=',')

    tr.Verbose = 1

    # make the directories to save results in
    if not os.path.isdir(step1dir + "/"):
        os.makedirs(step1dir)
    if not os.path.isdir(step2dir):
        os.makedirs(step2dir)

    if initialscale > 1:
        print("Initial scaling of DEM...")
        os.system(
            "resample -r "
            + str(initialscale)
            + " "
            + masterdem
            + " "
            + step1dir
            + "/dem_scaled.map"
        )
        print("Reading dem...")
        dem = pcr.readmap(step1dir + "/dem_scaled.map")
        ldddem = dem
    else:
        print ("Reading dem...")
        dem = pcr.readmap(masterdem)
        ldddem = dem

    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask...")
    else:
        print("clipping DEM with mask.....")
        mask = pcr.readmap(catchmask)
        ldddem = pcr.ifthen(pcr.boolean(mask), ldddem)
        dem = pcr.ifthen(pcr.boolean(mask), dem)

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        outletpointX = float(configget(config, "settings", "outflowpointX", "0.0"))
        outletpointY = float(configget(config, "settings", "outflowpointY", "0.0"))
    else:
        print("river file specified.....")
        try:
            outletpointX = float(configget(config, "settings", "outflowpointX", "0.0"))
            outletpointY = float(configget(config, "settings", "outflowpointY", "0.0"))
        except:
            print(
                "Need to specify the river outletpoint (a point at the end of the river within the current map)"
            )
            exit(1)

        outletpointmap = tr.points_to_map(dem, outletpointX, outletpointY, 0.5)
        pcr.report(outletpointmap, step1dir + "/outletpoint.map")
        # rivshpattr = config.get("files","riverattr")
        pcr.report(dem * 0.0, step1dir + "/nilmap.map")
        thestr = (
            "gdal_translate -of GTiff "
            + step1dir
            + "/nilmap.map "
            + step1dir
            + "/riverburn.tif"
        )
        os.system(thestr)
        rivshpattr = os.path.splitext(os.path.basename(rivshp))[0]
        os.system(
            "gdal_rasterize -burn 1 -l "
            + rivshpattr
            + " "
            + rivshp
            + " "
            + step1dir
            + "/riverburn.tif"
        )
        thestr = (
            "gdal_translate -of PCRaster "
            + step1dir
            + "/riverburn.tif "
            + step1dir
            + "/riverburn.map"
        )
        os.system(thestr)
        riverburn = pcr.readmap(step1dir + "/riverburn.map")
        # Determine regional slope assuming that is the way the river should run
        # Determine regional slope assuming that is the way the river should run
        # pcr.setglobaloption("unitcell")
        # demregional=pcr.windowaverage(dem,100)
        ldddem = pcr.ifthenelse(riverburn >= 1.0, dem - 1000, dem)

    pcr.setglobaloption("unittrue")
    upscalefactor = int(csize / pcr.celllength())

    print("Creating ldd...")
    ldd = tr.lddcreate_save(
        step1dir + "/ldd.map",
        ldddem,
        recreate,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    print("Determining streamorder...")
    stro = pcr.streamorder(ldd)
    pcr.report(stro, step1dir + "/streamorder.map")
    strdir = pcr.ifthen(stro >= strRiver, stro)
    pcr.report(strdir, step1dir + "/streamorderrive.map")
    pcr.report(pcr.boolean(pcr.ifthen(stro >= strRiver, stro)), step1dir + "/rivers.map")

    pcr.setglobaloption("unittrue")
    # outlet (and other gauges if given)
    # TODO: check is x/y set if not skip this
    print("Outlet...")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)

    if snapgaugestoriver:
        print("Snapping gauges to nearest river cells...")
        pcr.report(outlmap, step1dir + "/orggauges.map")
        outlmap = tr.snaptomap(outlmap, strdir)

    # noutletmap = tr.points_to_map(dem,XX,YY,0.5)
    # pcr.report(noutletmap,'noutlet.map')

    pcr.report(outlmap, step1dir + "/gauges.map")

    # check if there is a pre-define catchment map
    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask, finding outlet")
        # Find catchment (overall)
        outlet = tr.find_outlet(ldd)
        sub = tr.subcatch(ldd, outlet)
        pcr.report(sub, step1dir + "/catchment_overall.map")
    else:
        print("reading and converting catchment mask.....")
        os.system(
            "resample -r "
            + str(initialscale)
            + " "
            + catchmask
            + " "
            + step1dir
            + "/catchment_overall.map"
        )
        sub = pcr.readmap(step1dir + "/catchment_overall.map")

    print("Scatch...")
    sd = tr.subcatch(ldd, pcr.ifthen(outlmap > 0, outlmap))
    pcr.report(sd, step1dir + "/scatch.map")

    pcr.setglobaloption("unitcell")
    print("Upscalefactor: " + str(upscalefactor))

    if upscalefactor > 1:
        gc.collect()
        print("upscale river length1 (checkerboard map)...")
        ck = tr.checkerboard(dem, upscalefactor)
        pcr.report(ck, step1dir + "/ck.map")
        pcr.report(dem, step1dir + "/demck.map")
        print("upscale river length2...")
        fact = tr.area_riverlength_factor(ldd, ck, upscalefactor)
        pcr.report(fact, step1dir + "/riverlength_fact.map")

        # print("make dem statistics...")
        dem_ = pcr.areaaverage(dem, ck)
        pcr.report(dem_, step1dir + "/demavg.map")

        print("Create DEM statistics...")
        dem_ = pcr.areaminimum(dem, ck)
        pcr.report(dem_, step1dir + "/demmin.map")
        dem_ = pcr.areamaximum(dem, ck)
        pcr.report(dem_, step1dir + "/demmax.map")
        # calculate percentiles
        order = pcr.areaorder(dem, ck)
        n = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), ck)
        #: calculate 25 percentile
        perc = tr.area_percentile(dem, ck, n, order, 25.0)
        pcr.report(perc, step1dir + "/dem25.map")
        perc = tr.area_percentile(dem, ck, n, order, 10.0)
        pcr.report(perc, step1dir + "/dem10.map")
        perc = tr.area_percentile(dem, ck, n, order, 50.0)
        pcr.report(perc, step1dir + "/dem50.map")
        perc = tr.area_percentile(dem, ck, n, order, 33.0)
        pcr.report(perc, step1dir + "/dem33.map")
        perc = tr.area_percentile(dem, ck, n, order, 66.0)
        pcr.report(perc, step1dir + "/dem66.map")
        perc = tr.area_percentile(dem, ck, n, order, 75.0)
        pcr.report(perc, step1dir + "/dem75.map")
        perc = tr.area_percentile(dem, ck, n, order, 90.0)
        pcr.report(perc, step1dir + "/dem90.map")
    else:
        print("No fancy scaling done. Going strait to step2....")
        pcr.report(dem, step1dir + "/demavg.map")
        Xul = float(config.get("settings", "Xul"))
        Yul = float(config.get("settings", "Yul"))
        Xlr = float(config.get("settings", "Xlr"))
        Ylr = float(config.get("settings", "Ylr"))
        gdalstr = (
            "gdal_translate  -projwin "
            + str(Xul)
            + " "
            + str(Yul)
            + " "
            + str(Xlr)
            + " "
            + str(Ylr)
            + " -of PCRaster  "
        )
        # gdalstr = "gdal_translate  -a_ullr " + str(Xul) + " " + str(Yul) + " " +str(Xlr) + " " +str(Ylr) + " -of PCRaster  "
        print(gdalstr)
        pcr.report(pcr.cover(1.0), step1dir + "/wflow_riverlength_fact.map")
        # Now us gdat tp convert the maps
        os.system(
            gdalstr
            + step1dir
            + "/wflow_riverlength_fact.map"
            + " "
            + step2dir
            + "/wflow_riverlength_fact.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_dem.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmin.map"
        )
        os.system(
            gdalstr + step1dir + "/demavg.map" + " " + step2dir + "/wflow_demmax.map"
        )
        os.system(
            gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_gauges.map"
        )
        os.system(
            gdalstr + step1dir + "/rivers.map" + " " + step2dir + "/wflow_river.map"
        )
        os.system(
            gdalstr
            + step1dir
            + "/streamorder.map"
            + " "
            + step2dir
            + "/wflow_streamorder.map"
        )
        os.system(
            gdalstr + step1dir + "/gauges.map" + " " + step2dir + "/wflow_outlet.map"
        )
        os.system(
            gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_catchment.map"
        )
        os.system(gdalstr + step1dir + "/ldd.map" + " " + step2dir + "/wflow_ldd.map")
        os.system(
            gdalstr + step1dir + "/scatch.map" + " " + step2dir + "/wflow_subcatch.map"
        )

        if lu_water:
            os.system(gdalstr + lu_water + " " + step2dir + "/WaterFrac.map")

        if lu_paved:
            os.system(gdalstr + lu_paved + " " + step2dir + "/PathFrac.map")

        try:
            lumap = config.get("files", "landuse")
        except:
            print("no landuse map...creating uniform map")
            # clone=pcr.readmap(step2dir + "/wflow_dem.map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_landuse.map")
        else:
            os.system(
                "resample --clone "
                + step2dir
                + "/wflow_dem.map "
                + lumap
                + " "
                + step2dir
                + "/wflow_landuse.map"
            )

        try:
            soilmap = config.get("files", "soil")
        except:
            print("no soil map..., creating uniform map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_soil.map")
        else:
            os.system(
                "resample --clone "
                + step2dir
                + "/wflow_dem.map "
                + soilmap
                + " "
                + step2dir
                + "/wflow_soil.map"
            )
コード例 #31
0
def pcr_inun(dem, ids, h_bounds, ids_coastline,
                resistance=0.,  water_perc=None, zero_resistance_waterp=1.0,
                cellres=1, dist_method='eucledian', ldd=None):
    """ planar inundation routine per segment

    :param dem:             pcr dem
    :param ids:             local ids of boundary conditions, starting a 1 (not zero!)
    :param h_bounds:        water level boundary at diva segment
    :param ids_coastline:   pcraster map with coastal segments ids
    :param resistance:      constant or pcrmap unit km-1; (default 0: no resistance is calculated)
    :param cellres:         cell resolution in km, varies with latitude degrees
    :param ldd:             pcraster map with local drainage direction to calculate resistance along ldd;
                            if None (default) resistance is calculated using 'plain' nearest neighbour


    :return:                pcrmap with flood depth
    """
    pcr.setglobaloption("unitcell")
    if resistance > 0:
        coastline = pcr.cover(pcr.ifthenelse(pcr.scalar(ids_coastline) > 0, pcr.boolean(1), 0), pcr.boolean(0))
        mask = pcr.ifthen(dem > -9999, pcr.scalar(1))
        if dist_method == 'ldd':
            # Distance to coast along ldd
            dist2coast0 = pcr.ldddist(ldd, coastline, cellres)
            # find edge of area with distances -> water divide
            dist2coast_mask = pcr.cover(pcr.ifthenelse(dist2coast0 > 0, pcr.boolean(0), pcr.boolean(1)),
                                        pcr.boolean(1))
            start = pcr.ifthenelse(
                ((pcr.window4total(pcr.scalar(dist2coast_mask)) > 0) & (dist2coast_mask == pcr.boolean(0))) |
                coastline,
                pcr.boolean(1), pcr.boolean(0))
            # continue distance beyond water divide with eucledian dist
            dist2coast1 = pcr.spread(start, dist2coast0, cellres*mask)
            dist2coast = pcr.ifthenelse(dist2coast_mask, dist2coast1, dist2coast0)

        elif dist_method == 'eucledian':
            # dist to coast using nearest neighbor
            if water_perc is None:
                dist2coast = pcr.spread(coastline, 0, cellres*mask)
            else:
                # zero resistance for cells with water_perc >= zero_resistance_waterp
                zrw = float(zero_resistance_waterp)
                water_perc = pcr.ifthenelse(water_perc >= zrw,
                                            pcr.scalar(1),
                                            water_perc / zrw)
                dist2coast = pcr.spread(coastline, 0, cellres*mask*(1 - water_perc))

        dem_adjust = dem + pcr.cover(dist2coast, 0) * pcr.scalar(resistance)   # raise the elevation using a damping factor
    else:
        dem_adjust = dem
        dist2coast = pcr.scalar(1)

    fld_depth = pcr.ifthen(dem > -9999, pcr.scalar(0))

    for i, h in zip(ids, h_bounds):
        coast_segment = pcr.ifthenelse(ids_coastline == i, pcr.boolean(1), pcr.boolean(0))

        # find area below flood_level
        fld_prone = pcr.ifthenelse(dem_adjust <= pcr.scalar(float(h)), pcr.boolean(1), pcr.boolean(0))
        # make contiguous groups of cells which are below flood level
        fld_clump = pcr.clump(fld_prone)
        # find flooded area connected to diva segment
        fld_coast = pcr.ifthenelse(pcr.areamaximum(pcr.scalar(fld_clump) * pcr.scalar(coast_segment), fld_clump) > 0,
                                   pcr.boolean(1), pcr.boolean(0))
        # get max fld depth map
        fld_depth = pcr.max(fld_depth, pcr.ifthenelse(fld_coast, pcr.scalar(pcr.scalar(float(h)) - dem_adjust), 0))

    return fld_depth, dist2coast, dem_adjust
コード例 #32
0
    def __init__(self):
        # Print model info
        print('The Spatial Processes in HYdrology (SPHY) model is')
        print(
            'developed and owned by FutureWater, Wageningen, The Netherlands')
        print('Version 3.0, released June 2019')
        print(' ')

        #-Missing value definition
        self.MV = -9999

        # Read the modules to be used
        self.GlacFLAG = config.getint('MODULES', 'GlacFLAG')
        self.SnowFLAG = config.getint('MODULES', 'SnowFLAG')
        self.RoutFLAG = config.getint('MODULES', 'RoutFLAG')
        self.ResFLAG = config.getint('MODULES', 'ResFLAG')
        self.LakeFLAG = config.getint('MODULES', 'LakeFLAG')
        self.DynVegFLAG = config.getint('MODULES', 'DynVegFLAG')
        self.GroundFLAG = config.getint('MODULES', 'GroundFLAG')
        self.SedFLAG = config.getint('MODULES', 'SedFLAG')
        self.SedTransFLAG = config.getint('MODULES', 'SedTransFLAG')

        # import the required modules
        import datetime, calendar, ET, rootzone, subzone
        import utilities.reporting as reporting
        import utilities.timecalc as timecalc
        import utilities.netcdf2PCraster as netcdf2PCraster
        from math import pi
        #-standard python modules
        self.datetime = datetime
        self.calendar = calendar
        self.pi = pi
        #-FW defined modules
        self.reporting = reporting
        self.timecalc = timecalc
        self.netcdf2PCraster = netcdf2PCraster
        self.ET = ET
        self.rootzone = rootzone
        self.subzone = subzone
        del datetime, calendar, pi, reporting, timecalc, ET, rootzone, subzone
        #-import additional modules if required
        if self.GlacFLAG == 1:
            self.SnowFLAG = 1
            self.GroundFLAG = 1

        #-read the input and output directories from the configuration file
        self.inpath = config.get('DIRS', 'inputdir')
        self.outpath = config.get('DIRS', 'outputdir')

        #-set the timing criteria
        sy = config.getint('TIMING', 'startyear')
        sm = config.getint('TIMING', 'startmonth')
        sd = config.getint('TIMING', 'startday')
        ey = config.getint('TIMING', 'endyear')
        em = config.getint('TIMING', 'endmonth')
        ed = config.getint('TIMING', 'endday')
        self.startdate = self.datetime.datetime(sy, sm, sd)
        self.enddate = self.datetime.datetime(ey, em, ed)
        self.dateAfterUpdate = self.startdate - self.datetime.timedelta(
            days=1
        )  #-only required for glacier retreat (create dummy value here to introduce the variable)

        #-set date input for reporting
        self.startYear = sy
        self.endYear = ey
        self.spinUpYears = config.getint('TIMING', 'spinupyears')
        self.simYears = self.endYear - self.startYear - self.spinUpYears + 1

        #-set the 2000 julian date number
        self.julian_date_2000 = 2451545
        #-read name of reporting table
        self.RepTab = config.get('REPORTING', 'RepTab')
        #-set the option to calculate the fluxes in mm for the upstream area
        self.mm_rep_FLAG = config.getint('REPORTING', 'mm_rep_FLAG')

        #-set the option to calculate the fluxes per component in mm for the upstream area
        pars = [
            'Prec', 'ETa', 'GMelt', 'QSNOW', 'QROOTR', 'QROOTD', 'QRAIN',
            'QGLAC', 'QBASE', 'QTOT', 'Seep'
        ]
        for i in pars:
            var = i + '_mm_FLAG'
            setattr(self, var, config.getint('REPORTING', var))

        #-set the option to calculate the timeseries of the water balance
        self.wbal_TSS_FLAG = config.getint('REPORTING', 'wbal_TSS_FLAG')

        #-setting clone map
        self.clonefile = self.inpath + config.get('GENERAL', 'mask')
        pcr.setclone(self.clonefile)
        self.clone = pcr.ifthen(pcr.readmap(self.clonefile), pcr.boolean(1))

        self.cellArea = pcr.cellvalue(pcr.cellarea(), 1)[0]

        #-read general maps
        self.DEM = pcr.readmap(self.inpath + config.get('GENERAL', 'dem'))
        self.Slope = pcr.readmap(self.inpath + config.get('GENERAL', 'Slope'))
        self.Locations = pcr.readmap(self.inpath +
                                     config.get('GENERAL', 'locations'))

        #-read soil calibration fractions
        self.RootFieldFrac = config.getfloat('SOIL_CAL', 'RootFieldFrac')
        self.RootSatFrac = config.getfloat('SOIL_CAL', 'RootSatFrac')
        self.RootDryFrac = config.getfloat('SOIL_CAL', 'RootDryFrac')
        self.RootWiltFrac = config.getfloat('SOIL_CAL', 'RootWiltFrac')
        self.RootKsatFrac = config.getfloat('SOIL_CAL', 'RootKsatFrac')

        #-read soil maps
        #-check for PedotransferFLAG
        self.PedotransferFLAG = config.getint('PEDOTRANSFER',
                                              'PedotransferFLAG')
        #-if pedotransfer functions are used read the sand, clay, organic matter and bulk density maps, otherwise read the soil hydraulic properties
        if self.PedotransferFLAG == 1:
            import utilities.pedotransfer
            self.pedotransfer = utilities.pedotransfer
            del utilities.pedotransfer

            #-read init processes pedotransfer
            self.pedotransfer.init(self, pcr, config, np)
        else:
            #self.Soil = pcr.readmap(self.inpath + config.get('SOIL','Soil'))
            self.RootFieldMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootFieldMap')) * self.RootFieldFrac
            self.RootSatMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootSatMap')) * self.RootSatFrac
            self.RootDryMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootDryMap')) * self.RootDryFrac
            self.RootWiltMap = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootWiltMap')) * self.RootWiltFrac
            self.RootKsat = pcr.readmap(self.inpath + config.get(
                'SOIL', 'RootKsat')) * self.RootKsatFrac
            self.SubSatMap = pcr.readmap(self.inpath +
                                         config.get('SOIL', 'SubSatMap'))
            self.SubFieldMap = pcr.readmap(self.inpath +
                                           config.get('SOIL', 'SubFieldMap'))
            self.SubKsat = pcr.readmap(self.inpath +
                                       config.get('SOIL', 'SubKsat'))
            self.RootDrainVel = self.RootKsat * self.Slope

        #-Read and set the soil parameters
        pars = ['CapRiseMax', 'RootDepthFlat', 'SubDepthFlat']
        for i in pars:
            try:
                setattr(self, i,
                        pcr.readmap(self.inpath + config.get('SOILPARS', i)))
            except:
                setattr(self, i, config.getfloat('SOILPARS', i))

        # groundwater storage as third storage layer. This is used instead of a fixed bottomflux
        if self.GroundFLAG == 1:
            import modules.groundwater
            self.groundwater = modules.groundwater
            del modules.groundwater

            #-read init processes groundwater
            self.groundwater.init(self, pcr, config)

        else:
            # if groundwater module is not used, read seepage and gwl_base
            self.SeepStatFLAG = config.getint('SOILPARS', 'SeepStatic')
            if self.SeepStatFLAG == 0:  # set the seepage map series
                self.Seepmaps = self.inpath + config.get('SOILPARS', 'SeePage')
            else:  #-set a static map or value for seepage
                try:
                    self.SeePage = pcr.readmap(
                        self.inpath + config.get('SOILPARS', 'SeePage'))
                except:
                    self.SeePage = config.getfloat('SOILPARS', 'SeePage')
            try:
                self.GWL_base = pcr.readmap(self.inpath +
                                            config.get('SOILPARS', 'GWL_base'))
            except:
                self.GWL_base = config.getfloat('SOILPARS', 'GWL_base')

            self.SubDrainVel = self.SubKsat * self.Slope

        #-calculate soil properties
        self.RootField = self.RootFieldMap * self.RootDepthFlat
        self.RootSat = self.RootSatMap * self.RootDepthFlat
        self.RootDry = self.RootDryMap * self.RootDepthFlat
        self.RootWilt = self.RootWiltMap * self.RootDepthFlat
        self.SubSat = self.SubSatMap * self.SubDepthFlat
        self.SubField = self.SubFieldMap * self.SubDepthFlat
        self.RootTT = pcr.max((self.RootSat - self.RootField) / self.RootKsat,
                              0.0001)
        self.SubTT = pcr.max((self.SubSat - self.SubField) / self.SubKsat,
                             0.0001)
        # soil max and soil min for scaling of gwl if groundwater module is not used
        if self.GroundFLAG == 0:
            self.SoilMax = self.RootSat + self.SubSat
            self.SoilMin = self.RootDry + self.SubField

        #-read land use map
        self.LandUse = pcr.readmap(self.inpath +
                                   config.get('LANDUSE', 'LandUse'))

        #-Use the dynamic vegetation module
        if self.DynVegFLAG == 1:
            #-import dynamic vegetation module
            import modules.dynamic_veg
            self.dynamic_veg = modules.dynamic_veg
            del modules.dynamic_veg

            #-read init processes dynamic vegetation
            self.dynamic_veg.init(self, pcr, config)
        #-read the crop coefficient table if the dynamic vegetation module is not used
        else:
            self.KcStatFLAG = config.getint('LANDUSE', 'KCstatic')
            if self.KcStatFLAG == 1:
                #-read land use map and kc table
                self.kc_table = self.inpath + config.get('LANDUSE', 'CropFac')
                self.Kc = pcr.lookupscalar(self.kc_table, self.LandUse)
            else:
                #-set the kc map series
                self.Kcmaps = self.inpath + config.get('LANDUSE', 'KC')

        #-read the p factor table if the plant water stress module is used
        self.PlantWaterStressFLAG = config.getint('PWS', 'PWS_FLAG')
        if self.PlantWaterStressFLAG == 1:
            PFactor = self.inpath + config.get('PWS', 'PFactor')
            self.PMap = pcr.lookupscalar(PFactor, self.LandUse)

        #-read and set glacier maps and parameters if glacier module is used
        if self.GlacFLAG:
            #-import glacier module
            import modules.glacier
            self.glacier = modules.glacier
            del modules.glacier

            #-read init processes glacier module
            self.glacier.init(self, pcr, config, pd, np, os)

        #-read and set snow maps and parameters if snow modules are used
        if self.SnowFLAG == 1:
            #-import snow module
            import modules.snow
            self.snow = modules.snow
            del modules.snow

            #-read init processes glacier module
            self.snow.init(self, pcr, config)

        #-read and set climate forcing and the calculation of etref

        #-read precipitation data
        #-read flag for precipitation forcing by netcdf
        self.precNetcdfFLAG = config.getint('CLIMATE', 'precNetcdfFLAG')
        if self.precNetcdfFLAG == 1:
            #-read configuration for forcing by netcdf
            self.netcdf2PCraster.getConfigNetcdf(self, config, 'Prec',
                                                 'CLIMATE')

            #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
            self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Prec')
        else:
            #-read precipitation forcing folder
            self.Prec = self.inpath + config.get('CLIMATE', 'Prec')

        #-read precipitation data
        #-read flag for temperature forcing by netcdf
        self.tempNetcdfFLAG = config.getint('CLIMATE', 'tempNetcdfFLAG')
        if self.tempNetcdfFLAG == 1:
            #-read configuration for forcing by netcdf
            self.netcdf2PCraster.getConfigNetcdf(self, config, 'Temp',
                                                 'CLIMATE')

            #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
            self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Temp')
        else:
            #-read temperature forcing folder
            self.Tair = self.inpath + config.get('CLIMATE', 'Tair')
        #-read flag for etref time series input
        self.ETREF_FLAG = config.getint('ETREF', 'ETREF_FLAG')
        #-determine the use of a given etref time-series or calculate etref using Hargreaves
        if self.ETREF_FLAG == 1:
            self.ETref = self.inpath + config.get('ETREF', 'ETref')
        else:
            self.Lat = pcr.readmap(self.inpath + config.get('ETREF', 'Lat'))
            #-read flag for minimum temperature forcing by netcdf
            self.TminNetcdfFLAG = config.getint('ETREF', 'TminNetcdfFLAG')
            if self.TminNetcdfFLAG == 1:
                #-read configuration for forcing by netcdf
                self.netcdf2PCraster.getConfigNetcdf(self, config, 'Tmin',
                                                     'ETREF')

                #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
                self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Tmin')
            else:
                self.Tmin = self.inpath + config.get('ETREF', 'Tmin')
            #-read flag for maximum temperature forcing by netcdf
            self.TmaxNetcdfFLAG = config.getint('ETREF', 'TmaxNetcdfFLAG')
            if self.TmaxNetcdfFLAG == 1:
                #-read configuration for forcing by netcdf
                self.netcdf2PCraster.getConfigNetcdf(self, config, 'Tmax',
                                                     'ETREF')

                #-determine x,y-coordinates of netcdf file and model domain and indices of netcdf corresponding to model domain
                self.netcdf2PCraster.netcdf2pcrInit(self, pcr, 'Tmax')
            else:
                self.Tmax = self.inpath + config.get('ETREF', 'Tmax')
            self.Gsc = config.getfloat('ETREF', 'Gsc')
            import hargreaves
            self.Hargreaves = hargreaves
            del hargreaves

        #-read and set routing maps and parameters
        if self.RoutFLAG == 1:
            import modules.routing
            self.routing = modules.routing
            del modules.routing

            #-read init processes routing
            self.routing.init(self, pcr, config)

        #-read and set routing maps and parameters
        if self.ResFLAG == 1 or self.LakeFLAG == 1:
            #-import advanced routing module
            import modules.advanced_routing
            self.advanced_routing = modules.advanced_routing
            del modules.advanced_routing

            #-read init processes advanced routing
            self.advanced_routing.init(self, pcr, config)

        #-read lake maps and parameters if lake module is used
        if self.LakeFLAG == 1:
            #-import lakes module
            import modules.lakes
            self.lakes = modules.lakes
            del modules.lakes

            #-read init processes lakes
            self.lakes.init(self, pcr, config)

        #-read reservior maps and parameters if reservoir module is used
        if self.ResFLAG == 1:
            #-import reservoirs module
            import modules.reservoirs
            self.reservoirs = modules.reservoirs
            del modules.reservoirs

            #-read init processes reservoirs
            self.reservoirs.init(self, pcr, config)

        #-read flag for calculation of ET in reservoirs
        self.ETOpenWaterFLAG = config.getint('OPENWATER', 'ETOpenWaterFLAG')
        if self.ETOpenWaterFLAG == 1:
            #-read kc value for open water
            self.kcOpenWater = config.getfloat('OPENWATER', 'kcOpenWater')
            #-read openwater fraction map
            self.openWaterFrac = pcr.readmap(
                self.inpath + config.get('OPENWATER', 'openWaterFrac'))
            #-determine openwater map with values of each reservoir/lake in the extent of the openwater
            self.openWater = pcr.ifthenelse(self.openWaterFrac > 0,
                                            pcr.scalar(1), pcr.scalar(0))
            self.openWaterNominal = pcr.clump(pcr.nominal(self.openWater))
            self.openWaterNominal = pcr.nominal(
                pcr.areamaximum(pcr.scalar(self.ResID), self.openWaterNominal))
        else:
            #-set all cells to 0 for openwater fraction map
            self.openWaterFrac = self.DEM * 0
            self.openWater = 0
            self.ETOpenWater = 0

        #-read maps and parameters for infiltration excess
        self.InfilFLAG = config.getfloat('INFILTRATION', 'Infil_excess')
        if self.InfilFLAG == 1:
            self.K_eff = config.getfloat('INFILTRATION', 'K_eff')
            try:
                self.Alpha = config.getfloat('INFILTRATION', 'Alpha')
            except:
                self.Alpha = pcr.readmap(self.inpath +
                                         config.get('INFILTRATION', 'Alpha'))
            try:
                self.Labda_Infil = config.getfloat('INFILTRATION',
                                                   'Labda_infil')
            except:
                self.Labda_Infil = pcr.readmap(
                    self.inpath + config.get('INFILTRATION', 'Labda_infil'))
            try:
                self.paved_table = self.inpath + config.get(
                    'INFILTRATION', 'PavedFrac')
                self.pavedFrac = pcr.lookupscalar(self.paved_table,
                                                  self.LandUse)
            except:
                self.pavedFrac = 0

        #-read maps and parameters for soil erosion
        if self.SedFLAG == 1:
            #-read soil erosion model selector (1 for MUSLE, 2 for MMF)
            self.SedModel = config.getfloat('SEDIMENT', 'SedModel')

            #-read rock fraction map
            self.RockFrac = pcr.readmap(self.inpath +
                                        config.get('SEDIMENT', 'RockFrac'))

            #-read MUSLE input parameters
            if self.SedModel == 1:
                #-import musle module
                import modules.musle
                self.musle = modules.musle
                del modules.musle

                #-read init processes musle
                self.musle.init(self, pcr, config)

            #-read MMF input parameters
            if self.SedModel == 2:
                #-import mmf module
                import modules.mmf
                self.mmf = modules.mmf
                del modules.mmf

                #-read init processes mmf
                self.mmf.init(self, pcr, config)

            #-read input parameters for sediment transport
            if self.SedTransFLAG == 1:
                #-import sediment transport module
                import modules.sediment_transport
                self.sediment_transport = modules.sediment_transport
                del modules.sediment_transport

                #-read init processes sediment transport
                self.sediment_transport.init(self, pcr, config, csv, np)

        #-set the global option for radians
        pcr.setglobaloption('radians')
コード例 #33
0
ファイル: coupling_functions.py プロジェクト: joycezw/glofrim
def getPCRcoords(PCRmap, missing_value_pcr=-999):
    """
    Get all vertices coordinates of a PCRaster map.

    Input:
	-----
	pcraster map (preferrably landmask)
	value for MV (optional, default at -999)

    Output:
	------
	list of (x,y) coordinates of each polygon

    """
    # Get coordinates as numpy array:
    # upper left coordinates
    pcr.setglobaloption("coorul")

    xcoord_pcr_ul_map = pcr.xcoordinate(PCRmap)
    xcoord_pcr_ul_np = pcr.pcr2numpy(xcoord_pcr_ul_map, missing_value_pcr)

    ycoord_pcr_ul_map = pcr.ycoordinate(PCRmap)
    ycoord_pcr_ul_np = pcr.pcr2numpy(ycoord_pcr_ul_map, missing_value_pcr)

    # lower right coordinates
    pcr.setglobaloption("coorlr")

    xcoord_pcr_lr_map = pcr.xcoordinate(PCRmap)
    xcoord_pcr_lr_np = pcr.pcr2numpy(xcoord_pcr_lr_map, missing_value_pcr)

    ycoord_pcr_lr_map = pcr.ycoordinate(PCRmap)
    ycoord_pcr_lr_np = pcr.pcr2numpy(ycoord_pcr_lr_map, missing_value_pcr)

    # centroid coordinates
    pcr.setglobaloption("coorcentre")

    xcoord_pcr_centr_map = pcr.xcoordinate(PCRmap)
    xcoord_pcr_centr_np = pcr.pcr2numpy(xcoord_pcr_centr_map,
                                        missing_value_pcr)

    ycoord_pcr_centr_map = pcr.ycoordinate(PCRmap)
    ycoord_pcr_centr_np = pcr.pcr2numpy(ycoord_pcr_centr_map,
                                        missing_value_pcr)

    # Construct collection of polygon vertices:
    # number of arrays/elements to loop over and/or construct new arrays
    array_count_pcr = len(ycoord_pcr_lr_np)
    elements_per_array_pcr = np.size(ycoord_pcr_lr_np) / array_count_pcr
    nonmiss_val_per_array_pcr = np.sum(ycoord_pcr_lr_np != missing_value_pcr)

    # filling empty arrays while looping over data
    i, j = np.where(xcoord_pcr_lr_np != missing_value_pcr)
    xcoord_pcr_lr_np_nonmiss = xcoord_pcr_lr_np[i, j]
    xcoord_pcr_ul_np_nonmiss = xcoord_pcr_ul_np[i, j]
    xcoord_pcr_ll_np_nonmiss = xcoord_pcr_ul_np[i, j]
    xcoord_pcr_ur_np_nonmiss = xcoord_pcr_lr_np[i, j]

    ycoord_pcr_lr_np_nonmiss = ycoord_pcr_lr_np[i, j]
    ycoord_pcr_ul_np_nonmiss = ycoord_pcr_ul_np[i, j]
    ycoord_pcr_ll_np_nonmiss = ycoord_pcr_lr_np[i, j]
    ycoord_pcr_ur_np_nonmiss = ycoord_pcr_ul_np[i, j]

    xcoord_pcr_centr_np_nonmiss = xcoord_pcr_centr_np[i, j]
    ycoord_pcr_centr_np_nonmiss = ycoord_pcr_centr_np[i, j]

    # empty collection for polygons
    ll = zip(xcoord_pcr_ll_np_nonmiss, ycoord_pcr_ll_np_nonmiss)
    lr = zip(xcoord_pcr_lr_np_nonmiss, ycoord_pcr_lr_np_nonmiss)
    ur = zip(xcoord_pcr_ur_np_nonmiss, ycoord_pcr_ur_np_nonmiss)
    ul = zip(xcoord_pcr_ul_np_nonmiss, ycoord_pcr_ul_np_nonmiss)
    # wrap all cell coordinates into a list of lists (one list per cell, with multiple tuples per cell corner)
    all_cell_coords_pcr = [[ll[i], lr[i], ur[i], ul[i]]
                           for i in range(len(ll))]

    return all_cell_coords_pcr
コード例 #34
0
ファイル: pcrGlobalGeometry.py プロジェクト: wk1984/glofrim
import os, sys
import numpy as np
import pcraster as pcr
from math import pi as m_pi
from types import NoneType

pcr.setglobaloption('radians')
deg2Rad= m_pi/180.

def getArcDistance(latA, lonA, latB, lonB, radius= 6371221.3, testVerbose= False):
  '''Computes the distance between two points, positioned by \
their geographic coordinates along the surface of a perfect sphere \
in units given by the radius used. Input variables include:\n
- latA, latB: the latitude of the points considered in decimal degrees,\n
- lonA, lonB: the longitude of the points considered in decimal degrees,\n
- radius: the radius of the sphere in metres, set by default to \
that of Earth (6371221.3 m).'''
	#-make arrays if needed
  if isinstance(latA,float):
		latA= np.array(latA)
  if isinstance(lonA,float):
		lonA= np.array(lonA)
  if isinstance(latB,float):
		latB= np.array(latB)
  if isinstance(lonB,float):
		lonB= np.array(lonB)
  #-pad latitudes, longitudes
  if latA.size <> latB.size:
    latA= np.ones(latB.shape)*latA
  if lonA.size <> lonB.size:
    lonA= np.ones(lonB.shape)*lonA
コード例 #35
0
def main():
    """
        
    :ivar masterdem: digital elevation model
    :ivar dem: digital elevation model
    :ivar river: optional river map
    """

    # Default values
    strRiver = 8
    masterdem = "dem.map"
    step1dir = "step1"
    step2dir = "step2"
    workdir = "."
    inifile = "wflow_prepare.ini"
    recreate = False
    snapgaugestoriver = False

    try:
        opts, args = getopt.getopt(sys.argv[1:], "W:hI:f")
    except getopt.error as msg:
        usage(msg)

    for o, a in opts:
        if o == "-W":
            workdir = a
        if o == "-I":
            inifile = a
        if o == "-h":
            usage()
        if o == "-f":
            recreate = True

    pcr.setglobaloption("unitcell")
    os.chdir(workdir)

    config = OpenConf(workdir + "/" + inifile)

    masterdem = configget(config, "files", "masterdem", "dem.map")
    pcr.setclone(masterdem)

    strRiver = int(configget(config, "settings", "riverorder", "4"))

    try:
        gauges_x = config.get("settings", "gauges_x")
        gauges_y = config.get("settings", "gauges_y")
    except:
        print("gauges_x and  gauges_y are required entries in the ini file")
        sys.exit(1)

    step1dir = configget(config, "directories", "step1dir", "step1")
    step2dir = configget(config, "directories", "step2dir", "step2")
    # upscalefactor = float(config.get("settings","upscalefactor"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35"))
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(
        configget(config, "settings", "lddoutflowdepth", "1E35"))

    initialscale = int(configget(config, "settings", "initialscale", "1"))
    csize = float(configget(config, "settings", "cellsize", "1"))

    snapgaugestoriver = bool(
        int(configget(config, "settings", "snapgaugestoriver", "1")))
    lddglobaloption = configget(config, "settings", "lddglobaloption",
                                "lddout")
    pcr.setglobaloption(lddglobaloption)
    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    # X/Y coordinates of the gauges the system
    exec("X=tr.array(" + gauges_x + ")")
    exec("Y=tr.array(" + gauges_y + ")")

    tr.Verbose = 1

    # make the directories to save results in
    mkoutputdirs(step1dir, step2dir)

    ldddem = readdem(initialscale, masterdem, step1dir)
    dem = ldddem

    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask...")
    else:
        print("clipping DEM with mask.....")
        mask = pcr.readmap(catchmask)
        ldddem = pcr.ifthen(pcr.boolean(mask), ldddem)
        dem = pcr.ifthen(pcr.boolean(mask), dem)

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        outletpointX = float(
            configget(config, "settings", "outflowpointX", "0.0"))
        outletpointY = float(
            configget(config, "settings", "outflowpointY", "0.0"))
    else:
        print("river file specified.....")
        try:
            outletpointX = float(
                configget(config, "settings", "outflowpointX", "0.0"))
            outletpointY = float(
                configget(config, "settings", "outflowpointY", "0.0"))
        except:
            print(
                "Need to specify the river outletpoint (a point at the end of the river within the current map)"
            )
            exit(1)

        outletpointmap = tr.points_to_map(dem, outletpointX, outletpointY, 0.5)
        pcr.report(outletpointmap, step1dir + "/outletpoint.map")
        rivshpattr = config.get("files", "riverattr")
        pcr.report(dem * 0.0, step1dir + "/nilmap.map")
        thestr = ("gdal_translate -of GTiff " + step1dir + "/nilmap.map " +
                  step1dir + "/riverburn.tif")
        os.system(thestr)
        os.system("gdal_rasterize -burn 1 -l " + rivshpattr + " " + rivshp +
                  " " + step1dir + "/riverburn.tif")
        thestr = ("gdal_translate -of PCRaster " + step1dir +
                  "/riverburn.tif " + step1dir + "/riverburn.map")
        os.system(thestr)
        riverburn = pcr.readmap(step1dir + "/riverburn.map")
        # Determine regional slope assuming that is the way the river should run
        pcr.setglobaloption("unitcell")
        demregional = pcr.windowaverage(dem, 100)
        ldddem = pcr.ifthenelse(riverburn >= 1.0, demregional - 1000, dem)

    pcr.setglobaloption("unittrue")
    upscalefactor = int(csize / pcr.celllength())

    print("Creating ldd...")
    ldd = tr.lddcreate_save(
        step1dir + "/ldd.map",
        ldddem,
        recreate,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    print("Determining streamorder...")
    stro = pcr.streamorder(ldd)
    pcr.report(stro, step1dir + "/streamorder.map")
    strdir = pcr.ifthen(stro >= strRiver, stro)
    pcr.report(strdir, step1dir + "/streamorderrive.map")
    pcr.report(pcr.boolean(pcr.ifthen(stro >= strRiver, stro)),
               step1dir + "/rivers.map")

    pcr.setglobaloption("unittrue")
    # outlet (and other gauges if given)
    # TODO: check is x/y set if not skip this
    print("Outlet...")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)

    if snapgaugestoriver:
        print("Snapping gauges to nearest river cells...")
        pcr.report(outlmap, step1dir + "/orggauges.map")
        outlmap = tr.snaptomap(outlmap, strdir)

    # noutletmap = tr.points_to_map(dem,XX,YY,0.5)
    # pcr.report(noutletmap,'noutlet.map')

    pcr.report(outlmap, step1dir + "/gauges.map")

    # check if there is a pre-define catchment map
    try:
        catchmask = config.get("files", "catchment_mask")
    except:
        print("No catchment mask, finding outlet")
        # Find catchment (overall)
        outlet = tr.find_outlet(ldd)
        sub = pcr.subcatch(ldd, outlet)
        pcr.report(sub, step1dir + "/catchment_overall.map")
    else:
        print("reading and converting catchment mask.....")
        os.system("resample -r " + str(initialscale) + " " + catchmask + " " +
                  step1dir + "/catchment_overall.map")
        sub = pcr.readmap(step1dir + "/catchment_overall.map")

    print("Scatch...")
    sd = pcr.subcatch(ldd, pcr.ifthen(outlmap > 0, outlmap))
    pcr.report(sd, step1dir + "/scatch.map")

    pcr.setglobaloption("unitcell")
    print("Upscalefactor: " + str(upscalefactor))

    if upscalefactor > 1:
        gc.collect()
        print("upscale river length1 (checkerboard map)...")
        ck = tr.checkerboard(dem, upscalefactor)
        pcr.report(ck, step1dir + "/ck.map")
        pcr.report(dem, step1dir + "/demck.map")
        print("upscale river length2...")
        fact = tr.area_riverlength_factor(ldd, ck, upscalefactor)
        pcr.report(fact, step1dir + "/riverlength_fact.map")

        # print("make dem statistics...")
        dem_ = pcr.areaaverage(dem, ck)
        pcr.report(dem_, step1dir + "/demavg.map")

        print("Create DEM statistics...")
        dem_ = pcr.areaminimum(dem, ck)
        pcr.report(dem_, step1dir + "/demmin.map")
        dem_ = pcr.areamaximum(dem, ck)
        pcr.report(dem_, step1dir + "/demmax.map")
        # calculate percentiles
        order = pcr.areaorder(dem, ck)
        n = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), ck)
        #: calculate 25 percentile
        perc = tr.area_percentile(dem, ck, n, order, 25.0)
        pcr.report(perc, step1dir + "/dem25.map")
        perc = tr.area_percentile(dem, ck, n, order, 10.0)
        pcr.report(perc, step1dir + "/dem10.map")
        perc = tr.area_percentile(dem, ck, n, order, 50.0)
        pcr.report(perc, step1dir + "/dem50.map")
        perc = tr.area_percentile(dem, ck, n, order, 33.0)
        pcr.report(perc, step1dir + "/dem33.map")
        perc = tr.area_percentile(dem, ck, n, order, 66.0)
        pcr.report(perc, step1dir + "/dem66.map")
        perc = tr.area_percentile(dem, ck, n, order, 75.0)
        pcr.report(perc, step1dir + "/dem75.map")
        perc = tr.area_percentile(dem, ck, n, order, 90.0)
        pcr.report(perc, step1dir + "/dem90.map")
    else:
        print("No fancy scaling done. Going strait to step2....")
        pcr.report(dem, step1dir + "/demavg.map")
        Xul = float(config.get("settings", "Xul"))
        Yul = float(config.get("settings", "Yul"))
        Xlr = float(config.get("settings", "Xlr"))
        Ylr = float(config.get("settings", "Ylr"))
        gdalstr = ("gdal_translate  -projwin " + str(Xul) + " " + str(Yul) +
                   " " + str(Xlr) + " " + str(Ylr) + " -of PCRaster  ")
        # gdalstr = "gdal_translate  -a_ullr " + str(Xul) + " " + str(Yul) + " " +str(Xlr) + " " +str(Ylr) + " -of PCRaster  "
        print(gdalstr)
        pcr.report(pcr.cover(1.0), step1dir + "/wflow_riverlength_fact.map")
        # Now us gdat tp convert the maps
        os.system(gdalstr + step1dir + "/wflow_riverlength_fact.map" + " " +
                  step2dir + "/wflow_riverlength_fact.map")
        os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir +
                  "/wflow_dem.map")
        os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir +
                  "/wflow_demmin.map")
        os.system(gdalstr + step1dir + "/demavg.map" + " " + step2dir +
                  "/wflow_demmax.map")
        os.system(gdalstr + step1dir + "/gauges.map" + " " + step2dir +
                  "/wflow_gauges.map")
        os.system(gdalstr + step1dir + "/rivers.map" + " " + step2dir +
                  "/wflow_river.map")
        os.system(gdalstr + step1dir + "/streamorder.map" + " " + step2dir +
                  "/wflow_streamorder.map")
        os.system(gdalstr + step1dir + "/gauges.map" + " " + step2dir +
                  "/wflow_outlet.map")
        os.system(gdalstr + step1dir + "/scatch.map" + " " + step2dir +
                  "/wflow_catchment.map")
        os.system(gdalstr + step1dir + "/ldd.map" + " " + step2dir +
                  "/wflow_ldd.map")
        os.system(gdalstr + step1dir + "/scatch.map" + " " + step2dir +
                  "/wflow_subcatch.map")

        if lu_water:
            os.system(gdalstr + lu_water + " " + step2dir + "/WaterFrac.map")

        if lu_paved:
            os.system(gdalstr + lu_paved + " " + step2dir + "/PathFrac.map")

        try:
            lumap = config.get("files", "landuse")
        except:
            print("no landuse map...creating uniform map")
            # clone=pcr.readmap(step2dir + "/wflow_dem.map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_landuse.map")
        else:
            os.system("resample --clone " + step2dir + "/wflow_dem.map " +
                      lumap + " " + step2dir + "/wflow_landuse.map")

        try:
            soilmap = config.get("files", "soil")
        except:
            print("no soil map..., creating uniform map")
            pcr.setclone(step2dir + "/wflow_dem.map")
            pcr.report(pcr.nominal(1), step2dir + "/wflow_soil.map")
        else:
            os.system("resample --clone " + step2dir + "/wflow_dem.map " +
                      soilmap + " " + step2dir + "/wflow_soil.map")

    ##################################
    # Step 2 starts here
    ##################################

    pcr.setclone(step2dir + "/cutout.map")

    strRiver = int(configget(config, "settings", "riverorder_step2", "4"))

    corevolume = float(configget(config, "settings", "corevolume", "1E35"))
    catchmentprecipitation = float(
        configget(config, "settings", "catchmentprecipitation", "1E35"))
    corearea = float(configget(config, "settings", "corearea", "1E35"))
    outflowdepth = float(
        configget(config, "settings", "lddoutflowdepth", "1E35"))
    lddmethod = configget(config, "settings", "lddmethod", "dem")
    lddglobaloption = configget(config, "settings", "lddglobaloption",
                                "lddout")
    pcr.setglobaloption(lddglobaloption)

    nrrow = round(abs(Yul - Ylr) / csize)
    nrcol = round(abs(Xlr - Xul) / csize)
    mapstr = ("mapattr -s -S -R " + str(nrrow) + " -C " + str(nrcol) + " -l " +
              str(csize) + " -x " + str(Xul) + " -y " + str(Yul) +
              " -P yb2t " + step2dir + "/cutout.map")

    os.system(mapstr)
    pcr.setclone(step2dir + "/cutout.map")

    lu_water = configget(config, "files", "lu_water", "")
    lu_paved = configget(config, "files", "lu_paved", "")

    if lu_water:
        os.system("resample --clone " + step2dir + "/cutout.map " + lu_water +
                  " " + step2dir + "/wflow_waterfrac.map")

    if lu_paved:
        os.system("resample --clone " + step2dir + "/cutout.map " + lu_paved +
                  " " + step2dir + "/PathFrac.map")

    #
    try:
        lumap = config.get("files", "landuse")
    except:
        print("no landuse map...creating uniform map")
        clone = pcr.readmap(step2dir + "/cutout.map")
        pcr.report(pcr.nominal(clone), step2dir + "/wflow_landuse.map")
    else:
        os.system("resample --clone " + step2dir + "/cutout.map " + lumap +
                  " " + step2dir + "/wflow_landuse.map")

    try:
        soilmap = config.get("files", "soil")
    except:
        print("no soil map..., creating uniform map")
        clone = pcr.readmap(step2dir + "/cutout.map")
        pcr.report(pcr.nominal(clone), step2dir + "/wflow_soil.map")
    else:
        os.system("resample --clone " + step2dir + "/cutout.map " + soilmap +
                  " " + step2dir + "/wflow_soil.map")

    resamplemaps(step1dir, step2dir)

    dem = pcr.readmap(step2dir + "/wflow_dem.map")
    demmin = pcr.readmap(step2dir + "/wflow_demmin.map")
    demmax = pcr.readmap(step2dir + "/wflow_demmax.map")
    catchcut = pcr.readmap(step2dir + "/catchment_cut.map")
    # now apply the area of interest (catchcut) to the DEM
    # dem=pcr.ifthen(catchcut >=1 , dem)
    #

    # See if there is a shape file of the river to burn in
    try:
        rivshp = config.get("files", "river")
    except:
        print("no river file specified")
        riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map")
    else:
        print("river file speficied.....")
        rivshpattr = config.get("files", "riverattr")
        pcr.report(dem * 0.0, step2dir + "/nilmap.map")
        thestr = ("gdal_translate -of GTiff " + step2dir + "/nilmap.map " +
                  step2dir + "/wflow_riverburnin.tif")
        os.system(thestr)
        os.system("gdal_rasterize -burn 1 -l " + rivshpattr + " " + rivshp +
                  " " + step2dir + "/wflow_riverburnin.tif")
        thestr = ("gdal_translate -of PCRaster " + step2dir +
                  "/wflow_riverburnin.tif " + step2dir +
                  "/wflow_riverburnin.map")
        os.system(thestr)
        riverburn = pcr.readmap(step2dir + "/wflow_riverburnin.map")
        # ldddem = pcr.ifthenelse(riverburn >= 1.0, dem -1000 , dem)

    # Only burn within the original catchment
    riverburn = pcr.ifthen(pcr.scalar(catchcut) >= 1, riverburn)
    # Now setup a very high wall around the catchment that is scale
    # based on the distance to the catchment so that it slopes away from the
    # catchment
    if lddmethod != "river":
        print("Burning in highres-river ...")
        disttocatch = pcr.spread(pcr.nominal(catchcut), 0.0, 1.0)
        demmax = pcr.ifthenelse(
            pcr.scalar(catchcut) >= 1.0,
            demmax,
            demmax + (pcr.celllength() * 100.0) / disttocatch,
        )
        pcr.setglobaloption("unitcell")
        demregional = pcr.windowaverage(demmin, 100)
        demburn = pcr.cover(
            pcr.ifthen(pcr.boolean(riverburn), demregional - 100.0), demmax)
    else:
        print("using average dem..")
        demburn = dem

    ldd = tr.lddcreate_save(
        step2dir + "/ldd.map",
        demburn,
        True,
        outflowdepth=outflowdepth,
        corevolume=corevolume,
        catchmentprecipitation=catchmentprecipitation,
        corearea=corearea,
    )

    # Find catchment (overall)
    outlet = tr.find_outlet(ldd)
    sub = pcr.subcatch(ldd, outlet)
    pcr.report(sub, step2dir + "/wflow_catchment.map")
    pcr.report(outlet, step2dir + "/wflow_outlet.map")

    # make river map
    strorder = pcr.streamorder(ldd)
    pcr.report(strorder, step2dir + "/wflow_streamorder.map")

    river = pcr.ifthen(pcr.boolean(strorder >= strRiver), strorder)
    pcr.report(river, step2dir + "/wflow_river.map")

    # make subcatchments
    # os.system("col2map --clone " + step2dir + "/cutout.map gauges.col " + step2dir + "/wflow_gauges.map")
    exec("X=tr.array(" + gauges_x + ")")
    exec("Y=tr.array(" + gauges_y + ")")

    pcr.setglobaloption("unittrue")

    outlmap = tr.points_to_map(dem, X, Y, 0.5)
    pcr.report(outlmap, step2dir + "/wflow_gauges_.map")

    if snapgaugestoriver:
        print("Snapping gauges to river")
        pcr.report(outlmap, step2dir + "/wflow_orggauges.map")
        outlmap = tr.snaptomap(outlmap, river)

    outlmap = pcr.ifthen(outlmap > 0, outlmap)
    pcr.report(outlmap, step2dir + "/wflow_gauges.map")

    scatch = pcr.subcatch(ldd, outlmap)
    pcr.report(scatch, step2dir + "/wflow_subcatch.map")
コード例 #36
0
def main():
	#-initialization
	# MVs
	MV= -999.
	# minimum catchment size to process
	catchmentSizeLimit= 0.0
	# period of interest, start and end year
	startYear= 1961
	endYear= 2010
	# maps
	cloneMapFileName= '/data/hydroworld/PCRGLOBWB20/input30min/global/Global_CloneMap_30min.map'
	lddFileName= '/data/hydroworld/PCRGLOBWB20/input30min/routing/lddsound_30min.map'
	cellAreaFileName= '/data/hydroworld/PCRGLOBWB20/input30min/routing/cellarea30min.map'
	# set clone 
	pcr.setclone(cloneMapFileName)
	# output
	outputPath= '/scratch/rens/reservedrecharge'
	percentileMapFileName= os.path.join(outputPath,'q%03d_cumsec.map')
	textFileName= os.path.join(outputPath,'groundwater_environmentalflow_%d.txt')
	fractionReservedRechargeMapFileName= os.path.join(outputPath,'fraction_reserved_recharge%d.map')
	fractionMinimumReservedRechargeMapFileName= os.path.join(outputPath,'minimum_fraction_reserved_recharge%d.map')
	# input
	inputPath= '/nfsarchive/edwin-emergency-backup-DO-NOT-DELETE/rapid/edwin/05min_runs_results/2015_04_27/non_natural_2015_04_27/global/netcdf/'
	# define data to be read from netCDF files
	ncData= {}
	variableName= 'totalRunoff'
	ncData[variableName]= {}
	ncData[variableName]['fileName']= os.path.join(inputPath,'totalRunoff_monthTot_output.nc')
	ncData[variableName]['fileRoot']= os.path.join(outputPath,'qloc')
	ncData[variableName]['annualAverage']= pcr.scalar(0)	
	variableName= 'gwRecharge'
	ncData[variableName]= {}
	ncData[variableName]['fileName']= os.path.join(inputPath,'gwRecharge_monthTot_output.nc')
	ncData[variableName]['fileRoot']= os.path.join(outputPath,'gwrec')
	ncData[variableName]['annualAverage']= pcr.scalar(0)
	variableName= 'discharge'
	ncData[variableName]= {}
	ncData[variableName]['fileName']= os.path.join(inputPath,'totalRunoff_monthTot_output.nc')
	ncData[variableName]['fileRoot']= os.path.join(outputPath,'qc')
	ncData[variableName]['annualAverage']= pcr.scalar(0)
	ncData[variableName]['mapStack']= np.array([])
	# percents and environmental flow condition set as percentile
	percents= range(10,110,10)
	environmentalFlowPercent= 10
	if environmentalFlowPercent not in percents:
		percents.append(environmentalFlowPercent)
		percents.sort()

	#-start
	# obtain attributes
	pcr.setclone(cloneMapFileName)
	cloneSpatialAttributes= spatialAttributes(cloneMapFileName)
	years= range(startYear,endYear+1)
	# output path
	if not os.path.isdir(outputPath):
		os.makedirs(outputPath)
	os.chdir(outputPath)
	# compute catchments
	ldd= pcr.readmap(lddFileName)
	cellArea= pcr.readmap(cellAreaFileName)
	catchments= pcr.catchment(ldd,pcr.pit(ldd))
	fractionWater= pcr.scalar(0.0) # temporary!
	lakeMask= pcr.boolean(0) # temporary!
	pcr.report(catchments,os.path.join(outputPath,'catchments.map'))
	maximumCatchmentID= int(pcr.cellvalue(pcr.mapmaximum(pcr.scalar(catchments)),1)[0])
	# iterate over years
	weight= float(len(years))**-1
	for year in years:
		#-echo year
		print ' - processing year %d' % year
		#-process data
		startDate= datetime.datetime(year,1,1)
		endDate= datetime.datetime(year,12,31)
		timeSteps= endDate.toordinal()-startDate.toordinal()+1
		dynamicIncrement= 1
		for variableName in ncData.keys():
			print '   extracting %s' % variableName,
			ncFileIn= ncData[variableName]['fileName']
			#-process data
			pcrDataSet= pcrObject(variableName, ncData[variableName]['fileRoot'],\
				ncFileIn,cloneSpatialAttributes, pcrVALUESCALE= pcr.Scalar, resamplingAllowed= True,\
				dynamic= True, dynamicStart= startDate, dynamicEnd= endDate, dynamicIncrement= dynamicIncrement, ncDynamicDimension= 'time')
			pcrDataSet.initializeFileInfo()
			pcrDataSet.processFileInfo()
			for fileInfo in pcrDataSet.fileProcessInfo.values()[0]:
				tempFileName= fileInfo[1]
				variableField= pcr.readmap(tempFileName)
				variableField= pcr.ifthen(pcr.defined(ldd),pcr.cover(variableField,0))
				if variableName == 'discharge':
					dayNumber= int(os.path.splitext(tempFileName)[1].strip('.'))
					date= datetime.date(year,1,1)+datetime.timedelta(dayNumber-1)
					numberDays= calendar.monthrange(year,date.month)[1]
					variableField= pcr.max(0,pcr.catchmenttotal(variableField*cellArea,ldd)/(numberDays*24*3600))
				ncData[variableName]['annualAverage']+= weight*variableField
				if 'mapStack' in ncData[variableName].keys():
					tempArray= pcr2numpy(variableField,MV)
					mask= tempArray != MV
					if ncData[variableName]['mapStack'].size != 0:
						ncData[variableName]['mapStack']= np.vstack((ncData[variableName]['mapStack'],tempArray[mask]))
					else:
						ncData[variableName]['mapStack']= tempArray[mask]
						coordinates= np.zeros((ncData[variableName]['mapStack'].size,2))
						pcr.setglobaloption('unitcell')
						tempArray= pcr2numpy(pcr.ycoordinate(pcr.boolean(1))+0.5,MV)
						coordinates[:,0]= tempArray[mask]
						tempArray= pcr2numpy(pcr.xcoordinate(pcr.boolean(1))+0.5,MV)
						coordinates[:,1]= tempArray[mask]      
				os.remove(tempFileName)				
			# delete object
			pcrDataSet= None
			del pcrDataSet
			# close line on screen
			print
	# report annual averages
	key= 'annualAverage'
	ncData['discharge'][key]/= 12
	for variableName in ncData.keys():
		ncData[variableName][key]= pcr.max(0,ncData[variableName][key])
		pcr.report(ncData[variableName][key],\
			os.path.join(outputPath,'%s_%s.map' % (variableName,key)))
	# remove aux.xml
	for tempFileName in os.listdir(outputPath):
		if 'aux.xml' in tempFileName:
			os.remove(tempFileName)
	# sort data
	print 'sorting discharge data'
	variableName= 'discharge'
	key= 'mapStack'
	indices= np.zeros((ncData[variableName][key].shape),np.uint)
	for iCnt in xrange(ncData[variableName][key].shape[1]):
		indices[:,iCnt]= ncData[variableName][key][:,iCnt].argsort(kind= 'mergesort')
		ncData[variableName][key][:,iCnt]= ncData[variableName][key][:,iCnt][indices[:,iCnt]]
	# extract values for percentiles
	print 'returning maps'
	for percent in percents:
		percentile= 0.01*percent
		index0= min(ncData[variableName][key].shape[0]-1,int(percentile*ncData[variableName][key].shape[0]))
		index1= min(ncData[variableName][key].shape[0]-1,int(percentile*ncData[variableName][key].shape[0])+1)
		x0= float(index0)/ncData[variableName][key].shape[0]
		x1= float(index1)/ncData[variableName][key].shape[0]
		if x0 <> x1:
			y= ncData[variableName][key][index0,:]+(percentile-x0)*\
				 (ncData[variableName][key][index1,:]-ncData[variableName][key][index0,:])/(x1-x0)
		else:
			y= ncData[variableName][key][index0,:]
		# convert a slice of the stack into an array
		tempArray= np.ones((cloneSpatialAttributes.numberRows,cloneSpatialAttributes.numberCols))*MV
		for iCnt in xrange(coordinates.shape[0]):
			row= coordinates[iCnt,0]-1
			col= coordinates[iCnt,1]-1
			tempArray[row,col]= y[iCnt]
		variableField= numpy2pcr(pcr.Scalar,tempArray,MV)
		pcr.report(variableField,percentileMapFileName % percent)
		if percent == environmentalFlowPercent:
			ncData[variableName]['environmentalFlow']= variableField
		tempArray= None; variableField= None
		del tempArray, variableField
	# process environmental flow
	# initialize map of reserved recharge fraction
	fractionReservedRechargeMap= pcr.ifthen(ncData[variableName]['environmentalFlow'] < 0,pcr.scalar(0))
	fractionMinimumReservedRechargeMap= pcr.ifthen(ncData[variableName]['environmentalFlow'] < 0,pcr.scalar(0))
	textFile= open(textFileName % environmentalFlowPercent,'w')
	hStr= 'Environmental flow analysis per basin, resulting in a map of renewable, exploitable recharge, for the %d%s quantile of discharge\n' % (environmentalFlowPercent,'%')
	hStr+= 'Returns Q_%d/R, the fraction of reserved recharge needed to sustain fully the environental flow requirement defined as the %d percentile,\n' % (environmentalFlowPercent, environmentalFlowPercent)
	hStr+= 'and Q*_%d/R, a reduced fraction that takes the availability of surface water into account\n' % environmentalFlowPercent
	textFile.write(hStr)
	print hStr
	# create header to display on screen and write to file
	# reported are: 1: ID, 2: Area, 3: average discharge, 4: environmental flow, 5: average recharge,
	# 6: Q_%d/Q, 7: Q_%d/R_Avg, 8: R_Avg/Q_Avg, 9: Q*_%d/R_Avg
	hStr= '%6s,%15s,%15s,%15s,%15s,%15s,%15s,%15s,%15s\n' % \
		('ID','Area [km2]','Q_Avg [m3]','Q_%d [m3]' % environmentalFlowPercent ,'R_Avg [m3]','Q_%d/Q_Avg [-]' % environmentalFlowPercent,\
			'Q_%d/Q_Avg [-]' % environmentalFlowPercent,'R_Avg/Q_Avg [-]','Q*_%d/Q_Avg [-]' % environmentalFlowPercent)
	textFile.write(hStr)
	print hStr
	for catchment in xrange(1,maximumCatchmentID+1):
		# create catchment mask and check whether it does not coincide with a lake
		catchmentMask= catchments == catchment
		catchmentSize= pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,cellArea*1.e-6)),1)[0]
		#~ ##~ if pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,pcr.scalar(lakeMask))),1) <> \
				#~ ##~ pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,pcr.scalar(catchmentMask))),1)[0] and \
				#~ ##~ catchmentSize > catchmentSizeLimit:
		key= 'annualAverage'
		variableName= 'discharge'			
		if bool(pcr.cellvalue(pcr.maptotal(pcr.ifthen((ldd == 5) & catchmentMask,\
				pcr.scalar(ncData[variableName][key] > 0))),1)[0]) and catchmentSize >= catchmentSizeLimit:
			# valid catchment, process
			# all volumes are in m3 per year
			key= 'annualAverage'
			catchmentAverageDischarge= pcr.cellvalue(pcr.mapmaximum(pcr.ifthen(catchmentMask & (ldd == 5),\
				ncData[variableName][key])),1)[0]*365.25*3600*24
			variableName= 'gwRecharge'
			catchmentRecharge= pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,ncData[variableName][key]*\
				(1.-fractionWater)*cellArea)),1)[0]
			variableName= 'totalRunoff'
			catchmentRunoff= pcr.cellvalue(pcr.maptotal(pcr.ifthen(catchmentMask,ncData[variableName][key]*\
				cellArea)),1)[0]
			key= 'environmentalFlow'
			variableName= 'discharge'			
			catchmentEnvironmentalFlow= pcr.cellvalue(pcr.mapmaximum(pcr.ifthen(catchmentMask & (ldd == 5),\
				ncData[variableName][key])),1)[0]*365.25*3600*24
			catchmentRunoff= max(catchmentRunoff,catchmentEnvironmentalFlow)
			if catchmentAverageDischarge > 0.:
				fractionEnvironmentalFlow= catchmentEnvironmentalFlow/catchmentAverageDischarge
				fractionGroundWaterContribution= catchmentRecharge/catchmentAverageDischarge
			else:
				fractionEnvironmentalFlow= 0.
				fractionGroundWaterContribution= 0.
			if catchmentRecharge > 0:
				fractionReservedRecharge= min(1,catchmentEnvironmentalFlow/catchmentRecharge)
			else:
				fractionReservedRecharge= 1.0
			fractionMinimumReservedRecharge= (fractionReservedRecharge+fractionGroundWaterContribution-\
				fractionReservedRecharge*fractionGroundWaterContribution)*fractionReservedRecharge
			#~ # echo to screen, and write to file and map
			wStr= '%6s,%15.1f,%15.6g,%15.6g,%15.6g,%15.6f,%15.6f,%15.6f,%15.6f\n' % \
				(catchment,catchmentSize,catchmentAverageDischarge,catchmentEnvironmentalFlow,catchmentRecharge,\
					fractionEnvironmentalFlow,fractionReservedRecharge,fractionGroundWaterContribution,fractionMinimumReservedRecharge)
			print wStr
			textFile.write(wStr)
			# update maps
			fractionReservedRechargeMap= pcr.ifthenelse(catchmentMask,\
				pcr.scalar(fractionReservedRecharge),fractionReservedRechargeMap)
			fractionMinimumReservedRechargeMap= pcr.ifthenelse(catchmentMask,\
				pcr.scalar(fractionMinimumReservedRecharge),fractionMinimumReservedRechargeMap)
	#-report map and close text file
	pcr.report(fractionReservedRechargeMap,fractionReservedRechargeMapFileName % environmentalFlowPercent)
	pcr.report(fractionMinimumReservedRechargeMap,fractionMinimumReservedRechargeMapFileName % environmentalFlowPercent)
	# close text file
	textFile.close()
	# finished
	print 'all done!'