Exemplo n.º 1
0
def snaptomap(points, mmap):
    """
    Snap the points in _points_ to nearest non missing
    values in _mmap_. Can be used to move gauge locations
    to the nearest rivers.

    Input:
        - points - map with points to move
        - mmap - map with points to move to

    Return:
        - map with shifted points
    """
    points = pcr.cover(points, 0)
    # Create unique id map of mmap cells
    unq = pcr.nominal(pcr.cover(pcr.uniqueid(pcr.defined(mmap)), pcr.scalar(0.0)))
    # Now fill holes in mmap map with lues indicating the closes mmap cell.
    dist_cellid = pcr.scalar(pcr.spreadzone(unq, 0, 1))
    # Get map with values at location in points with closes mmap cell
    dist_cellid = pcr.ifthenelse(points > 0, dist_cellid, 0)
    # Spread this out
    dist_fill = pcr.spreadzone(pcr.nominal(dist_cellid), 0, 1)
    # Find the new (moved) locations
    npt = pcr.uniqueid(pcr.boolean(pcr.ifthen(dist_fill == unq, unq)))
    # Now recreate the original value in the points maps
    ptcover = pcr.spreadzone(pcr.cover(points, 0), 0, 1)
    # Now get the org point value in the pt map
    nptorg = pcr.ifthen(npt > 0, ptcover)

    return nptorg
Exemplo n.º 2
0
def snaptomap(points, mmap):
    """
    Snap the points in _points_ to nearest non missing
    values in _mmap_. Can be used to move gauge locations
    to the nearest rivers.

    Input:
        - points - map with points to move
        - mmap - map with points to move to

    Return:
        - map with shifted points
    """
    points = pcr.cover(points, 0)
    # Create unique id map of mmap cells
    unq = pcr.nominal(pcr.cover(pcr.uniqueid(pcr.defined(mmap)), pcr.scalar(0.0)))
    # Now fill holes in mmap map with lues indicating the closes mmap cell.
    dist_cellid = pcr.scalar(pcr.spreadzone(unq, 0, 1))
    # Get map with values at location in points with closes mmap cell
    dist_cellid = pcr.ifthenelse(points > 0, dist_cellid, 0)
    # Spread this out
    dist_fill = pcr.spreadzone(pcr.nominal(dist_cellid), 0, 1)
    # Find the new (moved) locations
    npt = pcr.uniqueid(pcr.boolean(pcr.ifthen(dist_fill == unq, unq)))
    # Now recreate the original value in the points maps
    ptcover = pcr.spreadzone(pcr.cover(points, 0), 0, 1)
    # Now get the org point value in the pt map
    nptorg = pcr.ifthen(npt > 0, ptcover)

    return nptorg
Exemplo n.º 3
0
def subcatch_order_a(ldd, oorder):
    """
    Determines subcatchments using the catchment order

    This version uses the last cell BELOW order to derive the
    catchments. In general you want the _b version

    Input:
        - ldd
        - order - order to use

    Output:
        - map with catchment for the given streamorder
    """
    outl = find_outlet(ldd)
    large = pcr.subcatchment(ldd, pcr.boolean(outl))
    stt = pcr.streamorder(ldd)
    sttd = pcr.downstream(ldd, stt)
    pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
    dif = pcr.upstream(
        ldd,
        pcr.cover(
            pcr.ifthen(
                large,
                pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts))),
            ),
            0,
        ),
    )
    dif = pcr.cover(pcr.scalar(outl), dif)  # Add catchment outlet
    dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(dif)))
    sc = pcr.subcatchment(ldd, dif)

    return sc, dif, stt
Exemplo n.º 4
0
def subcatch_order_a(ldd, oorder):
    """
    Determines subcatchments using the catchment order

    This version uses the last cell BELOW order to derive the
    catchments. In general you want the _b version

    Input:
        - ldd
        - order - order to use

    Output:
        - map with catchment for the given streamorder
    """
    outl = find_outlet(ldd)
    large = pcr.subcatchment(ldd, pcr.boolean(outl))
    stt = pcr.streamorder(ldd)
    sttd = pcr.downstream(ldd, stt)
    pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
    dif = pcr.upstream(
        ldd,
        pcr.cover(
            pcr.ifthen(
                large,
                pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts))),
            ),
            0,
        ),
    )
    dif = pcr.cover(pcr.scalar(outl), dif)  # Add catchment outlet
    dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(dif)))
    sc = pcr.subcatchment(ldd, dif)

    return sc, dif, stt
Exemplo n.º 5
0
def subcatch_stream(ldd, stream, threshold):
    """
    Derive catchments based upon strahler threshold
    Input:
        ldd -- pcraster object direction, local drain directions
        stream -- pcraster object direction, streamorder
        threshold -- integer, strahler threshold, subcatchments ge threshold are
                 derived
    output:
        stream_ge -- pcraster object, streams of strahler order ge threshold
        subcatch -- pcraster object, subcatchments of strahler order ge threshold

    """
    # derive stream order

    # stream = pcr.streamorder(ldd)
    stream_ge = pcr.ifthen(stream >= threshold, stream)
    stream_up_sum = pcr.ordinal(
        pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0)))
    # detect any transfer of strahler order, to a higher strahler order.
    transition_strahler = pcr.ifthenelse(
        pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1),
        pcr.ifthenelse(
            pcr.nominal(ldd) == 5, pcr.boolean(1),
            pcr.ifthenelse(
                pcr.downstream(ldd, pcr.scalar(stream_up_sum)) >
                pcr.scalar(stream_ge), pcr.boolean(1), pcr.boolean(0))))

    # make unique ids (write to file)
    transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler))

    # derive upstream catchment areas (write to file)
    subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique))
    return stream_ge, subcatch
Exemplo n.º 6
0
def subcatch_stream(ldd, stream, threshold):
    """
    Derive catchments based upon strahler threshold
    Input:
        ldd -- pcraster object direction, local drain directions
        stream -- pcraster object direction, streamorder
        threshold -- integer, strahler threshold, subcatchments ge threshold are
                 derived
    output:
        stream_ge -- pcraster object, streams of strahler order ge threshold
        subcatch -- pcraster object, subcatchments of strahler order ge threshold

    """
    # derive stream order

    # stream = pcr.streamorder(ldd)
    stream_ge = pcr.ifthen(stream >= threshold, stream)
    stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0)))
    # detect any transfer of strahler order, to a higher strahler order.
    transition_strahler = pcr.ifthenelse(pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1),
                                         pcr.ifthenelse(pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse(pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1),
                                                                                           pcr.boolean(0))))

    # make unique ids (write to file)
    transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler))

    # derive upstream catchment areas (write to file)
    subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique))
    return stream_ge, subcatch
Exemplo n.º 7
0
def spatialInterpolation2PCR(fieldArray, pcrType, MV):
    #-interpolates the field array to the full extent
    field = pcr.numpy2pcr(pcrType, fieldArray, MV)
    cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field)))
    zoneID = pcr.spreadzone(cellID, 0, 1)
    if pcrType == pcr.Scalar:
        field = pcr.areaaverage(field, zoneID)
    else:
        field = pcr.areamajority(field, zoneID)
    return field
Exemplo n.º 8
0
def interpolategauges(inputmap, method):
    """"
    Interpolate time series gauge data onto a grid using different methods
    inputmap: map with points data for a single timestep
    method: string indicating the method
        inv
        pol
        
    input: inputmap, method
    returns: interpolated map
    """

    if method == "inv":
        result = pcr.inversedistance(1, inputmap, 3, 0, 0)
    elif method == "pol":
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)
    else:
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)

    return result
Exemplo n.º 9
0
def interpolategauges(inputmap, method):
    """"
    Interpolate time series gauge data onto a grid using different methods
    inputmap: map with points data for a single timestep
    method: string indicating the method
        inv
        pol
        
    input: inputmap, method
    returns: interpolated map
    """

    if method == "inv":
        result = pcr.inversedistance(1, inputmap, 3, 0, 0)
    elif method == "pol":
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)
    else:
        Unq = pcr.uniqueid(pcr.boolean(inputmap + 1))
        result = pcr.spreadzone(pcr.ordinal(pcr.cover(Unq, 0)), 0, 1)
        result = pcr.areaaverage(inputmap, result)

    return result
Exemplo n.º 10
0
def pt_flow_in_river(ldd, river):
    """
    Returns all points (True) that flow into the mak river (boolean map with river set to True)

    :param ldd: Drainage network
    :param river: Map of river (True River, False non-river)
    :return ifmap: map with infrlo points into the river (True)
    :return ctach: catchment of each of the inflow points
    """

    dspts = pcr.downstream(ldd, pcr.cover(river, 0))
    dspts = pcr.ifthenelse(pcr.cover(river, 0) == 1, 0, dspts)

    catch = pcr.subcatchment(ldd, pcr.nominal(pcr.uniqueid(dspts)))

    return dspts, catch
Exemplo n.º 11
0
def pt_flow_in_river(ldd, river):
    """
    Returns all points (True) that flow into the mak river (boolean map with river set to True)

    :param ldd: Drainage network
    :param river: Map of river (True River, False non-river)
    :return ifmap: map with infrlo points into the river (True)
    :return ctach: catchment of each of the inflow points
    """

    dspts = pcr.downstream(ldd, pcr.cover(river, 0))
    dspts = pcr.ifthenelse(pcr.cover(river, 0) == 1, 0, dspts)

    catch = pcr.subcatchment(ldd, pcr.nominal(pcr.uniqueid(dspts)))

    return dspts, catch
Exemplo n.º 12
0
    def premcloop(self):
        self.clone = pcr.boolean(cfg.cloneString)
        self.dem = pcr.scalar(cfg.dem)
        self.createInstancesPremcloop()

        # required for reporting as numpy
        self.locations = pcr.cover(pcr.nominal(cfg.locations), 0)
        pcr.report(self.locations, 'locats')

        self.forestNoForest = pcr.boolean(cfg.forestNoForest)
        idMap = pcr.uniqueid(self.clone)
        oneLocationPerArea = pcr.areamaximum(idMap,
                                             self.forestNoForest) == idMap
        self.locationsForParameters = pcr.cover(
            pcr.nominal(
                pcr.scalar(pcr.ifthen(oneLocationPerArea, self.forestNoForest))
                + 1), 0)
Exemplo n.º 13
0
def subcatch_order_b(
    ldd, oorder, sizelimit=0, fill=False, fillcomplete=False, stoporder=0
):
    """
    Determines subcatchments using the catchment order

    This version tries to keep the number op upstream/downstream catchment the
    small by first dederivingatchment connected to the major river(the order) given, and fill
    up from there.

    Input:
        - ldd
        - oorder - order to use
        - sizelimit - smallest catchments to include, default is all (sizelimit=0) in number of cells
        - if fill is set to True the higer order catchment are filled also
        - if fillcomplete is set to True the whole ldd is filled with catchments.


    :returns sc, dif, nldd; Subcatchment, Points, subcatchldd
    """
    # outl = find_outlet(ldd)
    # large = pcr.subcatchment(ldd,pcr.boolean(outl))

    if stoporder == 0:
        stoporder = oorder

    stt = pcr.streamorder(ldd)
    sttd = pcr.downstream(ldd, stt)
    pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
    maxorder = pcraster.framework.getCellValue(pcr.mapmaximum(stt), 1, 1)
    dif = pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts)))

    if fill:
        for order in range(oorder, maxorder):
            m_pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(order)) > 0.0, sttd)
            m_dif = pcr.uniqueid(
                pcr.boolean(pcr.ifthen(stt == pcr.ordinal(order), m_pts))
            )
            dif = pcr.uniqueid(pcr.boolean(pcr.cover(m_dif, dif)))

        for myorder in range(oorder - 1, stoporder, -1):
            sc = pcr.subcatchment(ldd, pcr.nominal(dif))
            m_pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
            m_dif = pcr.uniqueid(
                pcr.boolean(pcr.ifthen(stt == pcr.ordinal(myorder - 1), m_pts))
            )
            dif = pcr.uniqueid(
                pcr.boolean(pcr.cover(pcr.ifthen(pcr.scalar(sc) == 0, m_dif), dif))
            )

        if fillcomplete:
            sc = pcr.subcatchment(ldd, pcr.nominal(dif))
            cs, m_dif, stt = subcatch_order_a(ldd, stoporder)
            dif = pcr.uniqueid(
                pcr.boolean(
                    pcr.cover(
                        pcr.ifthen(pcr.scalar(sc) == 0, pcr.ordinal(m_dif)),
                        pcr.ordinal(dif),
                    )
                )
            )

    scsize = pcr.catchmenttotal(1, ldd)
    dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(pcr.ifthen(scsize >= sizelimit, dif))))
    sc = pcr.subcatchment(ldd, dif)

    # Make pit ldd
    nldd = pcr.lddrepair(pcr.ifthenelse(pcr.cover(dif, 0) > 0, 5, ldd))

    return sc, dif, nldd
Exemplo n.º 14
0
        linescover = pcr.ifthen(lines==1,pcr.scalar(0))
        pointscover = pcr.ifthen(pcr.scalar(points)==1,pcr.scalar(0))
        #pcr.report(linescover,'lines.map')
        #pcr.report(pointscover,'points.map')
        dem = pcr.cover(dem,linescover,pointscover)
        #pcr.report(dem,'dem1.map')
        dem = dem + burn
        #pcr.report(dem,'dem2.map')
        ldd = pcr.lddcreate(dem,float("1E35"),float("1E35"),float("1E35"),float("1E35"))
    else:
        ldd = pcr.lddcreate(dem,burnvalue/2,float("1E35"),float("1E35"),float("1E35"))

streamorder = pcr.ordinal(pcr.streamorder(ldd))
river = pcr.boolean(pcr.ifthen(streamorder >= int(min(np.max(pcr.pcr2numpy(streamorder,-9999)),minorder)), streamorder))
outlets = pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.boolean(1))
outlets = pcr.nominal(pcr.uniqueid(outlets))
catchments = pcr.nominal(pcr.catchment(ldd, outlets))

if not keepall:
    catchments = pcr.nominal(pcr.ifthen(pcr.mapmaximum(pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments))) == pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments)),catchments))
    
pcr.report(ldd,ldd_map)
pcr.report(streamorder,streamorder_map)
pcr.report(river,river_map)
pcr.report(catchments,catchments_map)
if not EPSG == None:
    call(('gdal_translate','-of','GTiff','-stats','-a_srs',EPSG,'-ot','Float32',catchments_map,catchments_tif))
else: call(('gdal_translate','-of','GTiff','-stats','-ot','Float32',catchments_map,catchments_tif))
wt.Raster2Pol(catchments_tif,catchshp,srs)

riversid_map = workdir + 'riverid.map'
Exemplo n.º 15
0
ldd = fillMVBoundingBox(ldd, 1, 58, 62, 51, 60)
ldd = fillMVBoundingBox(ldd, 1, -108, -101, 31, 40)
ldd = pcr.lddrepair(pcr.ldd(ldd))
pits = pcr.pit(ldd)

pcr.setglobaloption('unitcell')

continentMasks = pcr.cover(mask48, pcr.windowmajority(mask48,5))

#- adjust area masks with 5 min ldd
continentMasksNew = pcr.ifthen(pcr.boolean(pcr.readmap(cloneMap)) == 0, pcr.scalar(1))
for i in areas[0:]:
    print 'area mask = ', i
    mask = pcr.ifthen(continentMasks == i, pcr.boolean(1))
    pitsContinent = pcr.pcrand(mask, pcr.boolean(pits))
    pitsContinent = pcr.nominal(pcr.uniqueid(pcr.ifthen(pitsContinent == 1, pcr.boolean(1))))
    catchments    = pcr.catchment(ldd, pitsContinent)
    newMask       = pcr.ifthen(pcr.boolean(catchments) == 1, pcr.scalar(i))
    continentMasksNew = pcr.cover(continentMasksNew, newMask)
updateCatchments = selectCatchments(ldd, pits, pitsDict, areas, cloneMap)
outMask = pcr.cover(updateCatchments, continentMasksNew)
pcr.report(pcr.nominal(outMask), 'mask5min.map')
pcr.report(ldd, 'ldd5min.map')

#- create clone, ldd, and mask map for each area.
outMask  = pcr.readmap('mask5min.map')
ldd      = pcr.readmap('ldd5min.map')
ldd      = pcr.lddrepair(pcr.ldd(pcr.ifthen(pcr.boolean(outMask) == 1, pcr.scalar(ldd))))

clone    = pcr.cover(pcr.boolean(outMask), pcr.boolean(1))
pcr.report(clone, 'globalClone5min.map')
Exemplo n.º 16
0
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5))
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5))
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.0))
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.5))
basin_map = pcr.ifthen(landmask, basin_map)
#~ pcr.aguila(basin_map)

msg = "Redefining the basin map (so that it is consistent with the ldd map used in PCR-GLOBWB):"
logger.info(msg)
# - calculate the upstream area of every pixel:
upstream_area = pcr.catchmenttotal(cell_area, ldd)
# - calculate the catchment area of every basin:
upstream_area_maximum = pcr.areamaximum(upstream_area, basin_map)
# - identify the outlet of every basin (in order to rederive the basin so that it is consistent with the ldd)
outlet = pcr.nominal(
    pcr.uniqueid(
        pcr.ifthen(upstream_area == upstream_area_maximum, pcr.boolean(1.0))))
# - ignoring outlets with small upstream areas
threshold = 50. * 1000. * 1000.  # unit: m2
outlet = pcr.ifthen(upstream_area_maximum > threshold, outlet)
#~ pcr.aguila(outlet)
outlet = pcr.cover(outlet, pcr.nominal(0.0))
# - recalculate the basin
basin_map = pcr.nominal(pcr.subcatchment(ldd, outlet))
basin_map = pcr.clump(basin_map)
basin_map = pcr.ifthen(landmask, basin_map)
pcr.report(basin_map, "basin_map.map")
#~ pcr.aguila(basin_map)
# - calculate the basin area
basin_area = pcr.areatotal(cell_area, basin_map)
pcr.report(basin_area, "basin_area.map")
#~ pcr.aguila(basin_area)
Exemplo n.º 17
0
def main(
        source,
        destination,
        inifile,
        dem_in,
        rivshp,
        catchshp,
        gaugeshp=None,
        landuse=None,
        soil=None,
        lai=None,
        other_maps=None,
        logfilename="wtools_static_maps.log",
        verbose=True,
        clean=True,
        alltouch=False,
        outlets=([], []),
):
    # parse other maps into an array
    if not other_maps == None:
        if type(other_maps) == str:
            print other_maps
            other_maps = (other_maps.replace(" ", "").replace("[", "").replace(
                "]", "").split(","))

    source = os.path.abspath(source)
    clone_map = os.path.join(source, "mask.map")
    clone_shp = os.path.join(source, "mask.shp")
    clone_prj = os.path.join(source, "mask.prj")

    if None in (rivshp, catchshp, dem_in):
        msg = """The following files are compulsory:
        - DEM (raster)
        - river (shape)
        - catchment (shape)
        """
        print(msg)
        parser.print_help()
        sys.exit(1)
    if (inifile is not None) and (not os.path.exists(inifile)):
        print "path to ini file cannot be found"
        sys.exit(1)
    if not os.path.exists(rivshp):
        print "path to river shape cannot be found"
        sys.exit(1)
    if not os.path.exists(catchshp):
        print "path to catchment shape cannot be found"
        sys.exit(1)
    if not os.path.exists(dem_in):
        print "path to DEM cannot be found"
        sys.exit(1)

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wt.setlogger(logfilename, "WTOOLS", verbose)

    # create directories # TODO: check if workdir is still necessary, try to
    # keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(destination), destination is not source):
        shutil.rmtree(destination)
    if destination is not source:
        os.makedirs(destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            "Clone file {:s} not found. Please run create_grid first.".format(
                clone_map))
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = wt.gdal_readmap(clone_map, "GTiff")
        trans = wt.get_geotransform(clone_map)
        extent = wt.get_extent(clone_map)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wt.get_projection(clone_map)
        unit_clone = srs.GetAttrValue("UNIT").lower()

    # READ CONFIG FILE
    # open config-file
    if inifile is None:
        config = ConfigParser.SafeConfigParser()
        config.optionxform = str
    else:
        config = wt.OpenConf(inifile)

    # read settings
    snapgaugestoriver = wt.configget(config,
                                     "settings",
                                     "snapgaugestoriver",
                                     True,
                                     datatype="boolean")
    burnalltouching = wt.configget(config,
                                   "settings",
                                   "burncatchalltouching",
                                   True,
                                   datatype="boolean")
    burninorder = wt.configget(config,
                               "settings",
                               "burncatchalltouching",
                               False,
                               datatype="boolean")
    verticetollerance = wt.configget(config,
                                     "settings",
                                     "vertice_tollerance",
                                     0.0001,
                                     datatype="float")
    """ read parameters """
    burn_outlets = wt.configget(config,
                                "parameters",
                                "burn_outlets",
                                10000,
                                datatype="int")
    burn_rivers = wt.configget(config,
                               "parameters",
                               "burn_rivers",
                               200,
                               datatype="int")
    burn_connections = wt.configget(config,
                                    "parameters",
                                    "burn_connections",
                                    100,
                                    datatype="int")
    burn_gauges = wt.configget(config,
                               "parameters",
                               "burn_gauges",
                               100,
                               datatype="int")
    minorder = wt.configget(config,
                            "parameters",
                            "riverorder_min",
                            3,
                            datatype="int")
    try:
        percentiles = np.array(
            config.get("parameters", "statisticmaps",
                       "0, 100").replace(" ", "").split(","),
            dtype="float",
        )
    except ConfigParser.NoOptionError:
        percentiles = [0.0, 100.0]
    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == "degree":
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_degree",
                                   0.0005,
                                   datatype="float")
    elif (unit_clone == "metre") or (unit_clone == "meter"):
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_metre",
                                   50,
                                   datatype="float")

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    clone_hr = os.path.join(destination, "clone_highres.tif")
    # make a highres clone as well!
    wt.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")

    # read mask location (optional)
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)

    # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0))

    # TODO: check if extents are correct this way
    # TODO: check what the role of missing values is in zeros and ones (l. 123
    # in old code)

    # first add a missing value to dem_in
    ds = gdal.Open(dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info("Resampling dem from {:s} to {:s}".format(
        os.path.abspath(dem_in), os.path.join(destination, dem_map)))
    wt.gdal_warp(
        dem_in,
        clone_map,
        os.path.join(destination, dem_map),
        format="PCRaster",
        gdal_interp=gdalconst.GRA_Average,
    )
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wt.get_projection(dem_in)
    except:
        logger.warning(
            "No projection found in DEM, assuming WGS 1984 lat long")
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    # if srs.ExportToProj4() == srs_dem.ExportToProj4():

    wt.windowstats(
        dem_in,
        len(yax),
        len(xax),
        trans,
        srs,
        destination,
        percentiles,
        transform=clone2dem_transform,
        logger=logger,
    )

    ## read catchment shape-file to create catchment map
    src = rasterio.open(clone_map)
    shapefile = fiona.open(catchshp, "r")
    catchment_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(catchment_shapes,
                               out_shape=src.shape,
                               all_touched=True,
                               transform=src.transform)
    catchment_domain = pcr.numpy2pcr(pcr.Ordinal, image.copy(), 0)

    ## read river shape-file and create burn layer
    shapefile = fiona.open(rivshp, "r")
    river_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(river_shapes,
                               out_shape=src.shape,
                               all_touched=False,
                               transform=src.transform)
    rivers = pcr.numpy2pcr(pcr.Nominal, image.copy(), 0)
    riverdem = pcr.scalar(rivers) * pcr.readmap(
        os.path.join(destination, dem_map))
    pcr.setglobaloption("lddin")
    riverldd = pcr.lddcreate(riverdem, 1e35, 1e35, 1e35, 1e35)

    riveroutlet = pcr.cover(
        pcr.ifthen(pcr.scalar(riverldd) == 5, pcr.scalar(1000)), 0)
    burn_layer = pcr.cover(
        (pcr.scalar(
            pcr.ifthen(
                pcr.streamorder(riverldd) > 1, pcr.streamorder(riverldd))) - 1)
        * 1000 + riveroutlet,
        0,
    )

    outlets_x, outlets_y = outlets
    n_outlets = len(outlets_x)
    logger.info("Number of outlets: {}".format(n_outlets))
    if n_outlets >= 1:
        outlets_map_numbered = tr.points_to_map(pcr.scalar(0), outlets_x,
                                                outlets_y, 0.5)
        outlets_map = pcr.boolean(outlets_map_numbered)
        # snap outlets to closest river (max 1 cell closer to river)
        outlets_map = pcr.boolean(
            pcr.cover(tr.snaptomap(pcr.ordinal(outlets_map), rivers), 0))

    ## create ldd per catchment
    logger.info("Calculating ldd")
    ldddem = pcr.scalar(clone_map)

    # per subcatchment, burn dem, then create modified dem that fits the ldd of the subcatchment
    # this ldd dem is merged over catchments, to create a global ldd that abides to the subcatchment boundaries
    for idx, shape in enumerate(catchment_shapes):
        logger.info("Computing ldd for catchment " + str(idx + 1) + "/" +
                    str(len(catchment_shapes)))
        image = features.rasterize([shape],
                                   out_shape=src.shape,
                                   all_touched=True,
                                   transform=src.transform)
        catchment = pcr.numpy2pcr(pcr.Scalar, image.copy(), 0)
        dem_burned_catchment = (
            pcr.readmap(os.path.join(destination, dem_map)) *
            pcr.scalar(catchment_domain) * catchment) - burn_layer
        # ldddem_catchment = pcr.lddcreatedem(
        #    dem_burned_catchment, 1e35, 1e35, 1e35, 1e35)
        ldddem = pcr.cover(ldddem, dem_burned_catchment)

    pcr.report(ldddem, os.path.join(destination, "ldddem.map"))

    wflow_ldd = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    if n_outlets >= 1:
        # set outlets to pit
        wflow_ldd = pcr.ifthenelse(outlets_map, pcr.ldd(5), wflow_ldd)
        wflow_ldd = pcr.lddrepair(wflow_ldd)

    pcr.report(wflow_ldd, os.path.join(destination, "wflow_ldd.map"))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(wflow_ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(destination, dem_map))
    pcr.report(streamorder, os.path.join(destination, streamorder_map))
    pcr.report(river, os.path.join(destination, river_map))

    # deal with your catchments
    if gaugeshp == None:
        logger.info("No gauges defined, using outlets instead")
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(
                    pcr.ifthen(pcr.scalar(wflow_ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, os.path.join(destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # since the products here (river length fraction) are not yet used
    # this is disabled for now, as it also takes a lot of computation time
    if False:
        # report river length
        # make a high resolution empty map
        dem_hr_file = os.path.join(destination, "dem_highres.tif")
        burn_hr_file = os.path.join(destination, "burn_highres.tif")
        demburn_hr_file = os.path.join(destination, "demburn_highres.map")
        riv_hr_file = os.path.join(destination, "riv_highres.map")
        wt.gdal_warp(dem_in, clone_hr, dem_hr_file)
        # wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
        # open the shape layer
        ds = ogr.Open(rivshp)
        lyr = ds.GetLayer(0)
        wt.ogr_burn(
            lyr,
            clone_hr,
            -100,
            file_out=burn_hr_file,
            format="GTiff",
            gdal_type=gdal.GDT_Float32,
            fill_value=0,
        )
        # read dem and burn values and add
        xax_hr, yax_hr, burn_hr, fill = wt.gdal_readmap(burn_hr_file, "GTiff")
        burn_hr[burn_hr == fill] = 0
        xax_hr, yax_hr, dem_hr, fill = wt.gdal_readmap(dem_hr_file, "GTiff")
        dem_hr[dem_hr == fill] = np.nan
        demburn_hr = dem_hr + burn_hr
        demburn_hr[np.isnan(demburn_hr)] = -9999
        wt.gdal_writemap(demburn_hr_file, "PCRaster", xax_hr, yax_hr,
                         demburn_hr, -9999.)
        pcr.setclone(demburn_hr_file)
        demburn_hr = pcr.readmap(demburn_hr_file)

        logger.info("Calculating ldd to determine river length")
        ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
        pcr.report(ldd_hr, os.path.join(destination, "ldd_hr.map"))
        pcr.setglobaloption("unitcell")
        riv_hr = pcr.scalar(
            pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr)
        pcr.report(riv_hr, riv_hr_file)
        pcr.setglobaloption("unittrue")
        pcr.setclone(clone_map)
        logger.info("Computing river length")
        wt.windowstats(
            riv_hr_file,
            len(yax),
            len(xax),
            trans,
            srs,
            destination,
            stat="fact",
            transform=False,
            logger=logger,
        )
        # TODO: nothing happens with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(
        pcr.ifthen(pcr.ordinal(wflow_ldd) == 5, pcr.ordinal(1)),
        os.path.join(destination, outlet_map),
    )

    # report subcatchment map
    subcatchment = pcr.subcatchment(wflow_ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment),
               os.path.join(destination, subcatch_map))

    # Report land use map
    if landuse == None:
        logger.info(
            "No land use map used. Preparing {:s} with only ones.".format(
                os.path.join(destination, landuse_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, landuse_map))
    else:
        logger.info("Resampling land use from {:s} to {:s}".format(
            os.path.abspath(landuse),
            os.path.join(destination, os.path.abspath(landuse_map)),
        ))
        wt.gdal_warp(
            landuse,
            clone_map,
            os.path.join(destination, landuse_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    # report soil map
    if soil == None:
        logger.info("No soil map used. Preparing {:s} with only ones.".format(
            os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        logger.info("Resampling soil from {:s} to {:s}".format(
            os.path.abspath(soil),
            os.path.join(destination, os.path.abspath(soil_map)),
        ))
        wt.gdal_warp(
            soil,
            clone_map,
            os.path.join(destination, soil_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    if lai == None:
        logger.info(
            "No vegetation LAI maps used. Preparing default maps {:s} with only ones."
            .format(os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        dest_lai = os.path.join(destination, "clim")
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(lai, "LAI00000.{:03d}".format(month + 1))
            lai_out = os.path.join(dest_lai,
                                   "LAI00000.{:03d}".format(month + 1))
            logger.info("Resampling vegetation LAI from {:s} to {:s}".format(
                os.path.abspath(lai_in), os.path.abspath(lai_out)))
            wt.gdal_warp(
                lai_in,
                clone_map,
                lai_out,
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Bilinear,
                gdal_type=gdalconst.GDT_Float32,
            )

    # report soil map
    if other_maps == None:
        logger.info("No other maps used. Skipping other maps.")
    else:
        logger.info("Resampling list of other maps...")
        for map_file in other_maps:
            map_name = os.path.split(map_file)[1]
            logger.info("Resampling a map from {:s} to {:s}".format(
                os.path.abspath(map_file),
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
            ))
            wt.gdal_warp(
                map_file,
                clone_map,
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Mode,
                gdal_type=gdalconst.GDT_Float32,
            )

    if clean:
        wt.DeleteList(glob.glob(os.path.join(destination, "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "clim", "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "*highres*")),
                      logger=logger)
Exemplo n.º 18
0
def subcatch_stream(ldd, threshold, stream=None, min_strahler=-999, max_strahler=999, assign_edge=False, assign_existing=False, up_area=None, basin=None):
    """
    Derive catchments based upon strahler threshold
    Input:
        ldd -- pcraster object direction, local drain directions
        threshold -- integer, strahler threshold, subcatchments ge threshold
            are derived
        stream=None -- pcraster object ordinal, stream order map (made with pcr.streamorder), if provided, stream order
            map is not generated on the fly but used from this map. Useful when a subdomain within a catchment is
            provided, which would cause edge effects in the stream order map
        min_strahler=-999 -- integer, minimum strahler threshold of river catchments
            to return
        max_strahler=999 -- integer, maximum strahler threshold of river catchments
            to return
        assign_unique=False -- if set to True, unassigned connected areas at
            the edges of the domain are assigned a unique id as well. If set
            to False, edges are not assigned
        assign_existing=False == if set to True, unassigned edges are assigned
            to existing basins with an upstream weighting. If set to False,
            edges are assigned to unique IDs, or not assigned
    output:
        stream_ge -- pcraster object, streams of strahler order ge threshold
        subcatch -- pcraster object, subcatchments of strahler order ge threshold

    """
    # derive stream order

    if stream is None:
        stream = pcr.streamorder(ldd)

    stream_ge = pcr.ifthen(stream >= threshold, stream)
    stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0)))
    # detect any transfer of strahler order, to a higher strahler order.
    transition_strahler = pcr.ifthenelse(pcr.downstream(ldd, stream_ge) != stream_ge, pcr.boolean(1),
                                         pcr.ifthenelse(pcr.nominal(ldd) == 5, pcr.boolean(1), pcr.ifthenelse(pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge), pcr.boolean(1),
                                                                                                              pcr.boolean(0))))
    # make unique ids (write to file)
    transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler))

    # derive upstream catchment areas (write to file)
    subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique))
    # mask out areas outside basin
    if basin is not None:
        subcatch = pcr.ifthen(basin, subcatch)

    if assign_edge:
        # fill unclassified areas (in pcraster equal to zero) with a unique id, above the maximum id assigned so far
        unique_edge = pcr.clump(pcr.ifthen(subcatch==0, pcr.ordinal(0)))
        subcatch = pcr.ifthenelse(subcatch==0, pcr.nominal(pcr.mapmaximum(pcr.scalar(subcatch)) + pcr.scalar(unique_edge)), pcr.nominal(subcatch))
    elif assign_existing:
        # unaccounted areas are added to largest nearest draining basin
        if up_area is None:
            up_area = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)), pcr.accuflux(ldd, 1))
        riverid = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)), subcatch)

        friction = 1./pcr.scalar(pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0)) # *(pcr.scalar(ldd)*0+1)
        delta = pcr.ifthen(pcr.scalar(ldd)>=0, pcr.ifthen(pcr.cover(subcatch, 0)==0, pcr.spreadzone(pcr.cover(riverid, 0), 0, friction)))
        subcatch = pcr.ifthenelse(pcr.boolean(pcr.cover(subcatch, 0)),
                                      subcatch,
                                      delta)

    # finally, only keep basins with minimum and maximum river order flowing through them
    strahler_subcatch = pcr.areamaximum(stream, subcatch)
    subcatch = pcr.ifthen(pcr.ordinal(strahler_subcatch) >= min_strahler, pcr.ifthen(pcr.ordinal(strahler_subcatch) <= max_strahler, subcatch))

    return stream_ge, pcr.ordinal(subcatch)
    def __init__(self, input_netcdf,\
                       output_netcdf,\
                       modelTime,\
                       tmpDir = "/dev/shm/"):
        DynamicModel.__init__(self) 

        self.input_netcdf = input_netcdf
        self.output_netcdf = output_netcdf 
        self.tmpDir = tmpDir
        self.modelTime = modelTime

        # a dictionary contains input clone properties (based on the input netcdf file)
        #~ self.input_clone = vos.netcdfCloneAttributes(self.input_netcdf['file_name'],\
                                                     #~ np.round(self.input_netcdf['cell_resolution']*60.,1),\
                                                     #~ True)
        pcr.setclone(self.input_netcdf['clone_file'])
        self.input_clone = {}
        self.input_clone['cellsize'] = pcr.clone().cellSize()
        self.input_clone['rows']     = int(pcr.clone().nrRows()) 
        self.input_clone['cols']     = int(pcr.clone().nrCols())
        self.input_clone['xUL']      = round(pcr.clone().west(), 2)
        self.input_clone['yUL']      = round(pcr.clone().north(), 2)

        # resampling factor: ratio between output and input resolutions
        self.resample_factor = self.output_netcdf["cell_resolution"]/\
                                self.input_netcdf['cell_resolution']
        
        # clone map 
        if self.resample_factor > 1.0: # upscaling

            # the resample factor must be a rounded value without decimal
            self.resample_factor = round(self.resample_factor)
            
            # output clone properties
            self.output_netcdf['rows'    ] = int(round(float(self.input_clone['rows'])/float(self.resample_factor))) 
            self.output_netcdf['cols'    ] = int(round(float(self.input_clone['cols'])/float(self.resample_factor)))
            self.output_netcdf['cellsize'] = self.output_netcdf["cell_resolution"]
            self.output_netcdf['xUL'     ] = self.input_clone['xUL']
            self.output_netcdf['yUL'     ] = self.input_clone['yUL']

            # get the unique ids for the output resolution
            # - use the clone for the output resolution (only for a temporary purpose)
            pcr.setclone(self.output_netcdf['rows'    ],
                         self.output_netcdf['cols'    ],
                         self.output_netcdf['cellsize'],
                         self.output_netcdf['xUL'     ],
                         self.output_netcdf['yUL'     ])
            # - unique_ids in a numpy object
            cell_unique_ids = pcr.pcr2numpy(pcr.scalar(pcr.uniqueid(pcr.boolean(1.))),vos.MV)

            # the remaining pcraster calculations are performed at the input resolution
            pcr.setclone(self.input_clone['rows'    ],
                         self.input_clone['cols'    ],
                         self.input_clone['cellsize'],
                         self.input_clone['xUL'     ],
                         self.input_clone['yUL'     ])
            
            # clone map file 
            self.clone_map_file = self.input_netcdf['clone_file']
            
            # cell unique ids in a pcraster object
            self.unique_ids = pcr.nominal(pcr.numpy2pcr(pcr.Scalar,
                              vos.regridData2FinerGrid(self.resample_factor,cell_unique_ids, vos.MV), vos.MV))

            # cell area (m2)
            self.cell_area = vos.readPCRmapClone(\
                             self.input_netcdf["cell_area"],\
                             self.clone_map_file,\
                             self.tmpDir)
            
        else: # downscaling / resampling to smaller cell length

            # all pcraster calculations are performed at the output resolution
            pcr.setclone(self.output_netcdf['rows'    ],
                         self.output_netcdf['cols'    ],
                         self.output_netcdf['cellsize'],
                         self.output_netcdf['xUL'     ],
                         self.output_netcdf['yUL'     ])

            # clone map file
            self.clone_map_file = self.output_netcdf['clone_file']
        
        # an object for netcdf reporting
        self.output = OutputNetcdf(mapattr_dict = self.output_netcdf,\
                                   cloneMapFileName = None,\
                                   netcdf_format = self.output_netcdf['format'],\
                                   netcdf_zlib = self.output_netcdf['zlib'],\
                                   netcdf_attribute_dict = self.output_netcdf['netcdf_attribute'],\
                                   netcdf_attribute_description = None)
        
        # preparing the netcdf file at coarse resolution:
        self.output.createNetCDF(self.output_netcdf['file_name'],\
                                 self.output_netcdf['variable_name'],\
                                 self.output_netcdf['variable_unit'])
Exemplo n.º 20
0
def main():

    ### Read input arguments #####
    logfilename = 'wtools_static_maps.log'
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option('-q',
                      '--quiet',
                      dest='verbose',
                      default=True,
                      action='store_false',
                      help='do not print status messages to stdout')
    parser.add_option('-i',
                      '--ini',
                      dest='inifile',
                      default=None,
                      help='ini file with settings for static_maps.exe')
    parser.add_option('-s',
                      '--source',
                      dest='source',
                      default='wflow',
                      help='Source folder containing clone (default=./wflow)')
    parser.add_option('-d',
                      '--destination',
                      dest='destination',
                      default='staticmaps',
                      help='Destination folder (default=./staticmaps)')
    parser.add_option('-r',
                      '--river',
                      dest='rivshp',
                      default=None,
                      help='river network polyline layer (ESRI Shapefile)')
    parser.add_option('-c',
                      '--catchment',
                      dest='catchshp',
                      default=None,
                      help='catchment polygon layer (ESRI Shapefile)')
    parser.add_option('-g',
                      '--gauges',
                      dest='gaugeshp',
                      default=None,
                      help='gauge point layer (ESRI Shapefile)')
    parser.add_option('-D',
                      '--dem',
                      dest='dem_in',
                      default=None,
                      help='digital elevation model (GeoTiff)')
    parser.add_option('-L',
                      '--landuse',
                      dest='landuse',
                      default=None,
                      help='land use / land cover layer (GeoTiff)')
    parser.add_option('-S',
                      '--soiltype',
                      dest='soil',
                      default=None,
                      help='soil type layer (GeoTiff)')
    parser.add_option(
        '-V',
        '--vegetation',
        dest='lai',
        default=None,
        help=
        'vegetation LAI layer location (containing 12 GeoTiffs <LAI00000.XXX.tif>)'
    )
    parser.add_option(
        '-O',
        '--other_maps',
        dest='other_maps',
        default=None,
        help=
        'bracketed [] comma-separated list of paths to other maps that should be reprojected'
    )
    parser.add_option(
        '-C',
        '--clean',
        dest='clean',
        default=False,
        action='store_true',
        help='Clean the .xml files from static maps folder when finished')
    parser.add_option(
        '-A',
        '--alltouch',
        dest='alltouch',
        default=False,
        action='store_true',
        help=
        'option to burn catchments "all touching".\nUseful when catchment-size is small compared to cellsize'
    )
    (options, args) = parser.parse_args()
    # parse other maps into an array
    options.other_maps = options.other_maps.replace(' ', '').replace(
        '[', '').replace(']', '').split(',')

    options.source = os.path.abspath(options.source)
    clone_map = os.path.join(options.source, 'mask.map')
    clone_shp = os.path.join(options.source, 'mask.shp')
    clone_prj = os.path.join(options.source, 'mask.prj')

    if None in (options.inifile, options.rivshp, options.catchshp,
                options.dem_in):
        msg = """The following files are compulsory:
        - ini file
        - DEM (raster)
        - river (shape)
        - catchment (shape)
        """
        print(msg)
        parser.print_help()
        sys.exit(1)
    if not os.path.exists(options.inifile):
        print 'path to ini file cannot be found'
        sys.exit(1)
    if not os.path.exists(options.rivshp):
        print 'path to river shape cannot be found'
        sys.exit(1)
    if not os.path.exists(options.catchshp):
        print 'path to catchment shape cannot be found'
        sys.exit(1)
    if not os.path.exists(options.dem_in):
        print 'path to DEM cannot be found'
        sys.exit(1)

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wtools_lib.setlogger(logfilename, 'WTOOLS', options.verbose)

    # create directories # TODO: check if workdir is still necessary, try to keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(options.destination), options.destination
                      is not options.source):
        shutil.rmtree(options.destination)
    if options.destination is not options.source:
        os.makedirs(options.destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            'Clone file {:s} not found. Please run create_grid first.'.format(
                clone_map))
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = gis.gdal_readmap(clone_map, 'GTiff')
        trans = wtools_lib.get_geotransform(clone_map)
        extent = wtools_lib.get_extent(clone_map)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wtools_lib.get_projection(clone_map)
        unit_clone = srs.GetAttrValue('UNIT').lower()

    ### READ CONFIG FILE
    # open config-file
    config = wtools_lib.OpenConf(options.inifile)

    # read settings
    snapgaugestoriver = wtools_lib.configget(config,
                                             'settings',
                                             'snapgaugestoriver',
                                             True,
                                             datatype='boolean')
    burnalltouching = wtools_lib.configget(config,
                                           'settings',
                                           'burncatchalltouching',
                                           True,
                                           datatype='boolean')
    burninorder = wtools_lib.configget(config,
                                       'settings',
                                       'burncatchalltouching',
                                       False,
                                       datatype='boolean')
    verticetollerance = wtools_lib.configget(config,
                                             'settings',
                                             'vertice_tollerance',
                                             0.0001,
                                             datatype='float')
    ''' read parameters '''
    burn_outlets = wtools_lib.configget(config,
                                        'parameters',
                                        'burn_outlets',
                                        10000,
                                        datatype='int')
    burn_rivers = wtools_lib.configget(config,
                                       'parameters',
                                       'burn_rivers',
                                       200,
                                       datatype='int')
    burn_connections = wtools_lib.configget(config,
                                            'parameters',
                                            'burn_connections',
                                            100,
                                            datatype='int')
    burn_gauges = wtools_lib.configget(config,
                                       'parameters',
                                       'burn_gauges',
                                       100,
                                       datatype='int')
    minorder = wtools_lib.configget(config,
                                    'parameters',
                                    'riverorder_min',
                                    3,
                                    datatype='int')
    percentiles = np.array(config.get('parameters', 'statisticmaps',
                                      '0, 100').replace(' ', '').split(','),
                           dtype='float')

    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == 'degree':
        cellsize_hr = wtools_lib.configget(config,
                                           'parameters',
                                           'highres_degree',
                                           0.0005,
                                           datatype='float')
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = wtools_lib.configget(config,
                                           'parameters',
                                           'highres_metre',
                                           50,
                                           datatype='float')

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    clone_hr = os.path.join(options.destination, 'clone_highres.tif')
    # make a highres clone as well!
    wtools_lib.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    catchment_map = wtools_lib.configget(config, 'staticmaps', 'catchment',
                                         'wflow_catchment.map')
    dem_map = wtools_lib.configget(config, 'staticmaps', 'dem',
                                   'wflow_dem.map')
    demmax_map = wtools_lib.configget(config, 'staticmaps', 'demmax',
                                      'wflow_demmax.map')
    demmin_map = wtools_lib.configget(config, 'staticmaps', 'demmin',
                                      'wflow_demmin.map')
    gauges_map = wtools_lib.configget(config, 'staticmaps', 'gauges',
                                      'wflow_gauges.map')
    landuse_map = wtools_lib.configget(config, 'staticmaps', 'landuse',
                                       'wflow_landuse.map')
    ldd_map = wtools_lib.configget(config, 'staticmaps', 'ldd',
                                   'wflow_ldd.map')
    river_map = wtools_lib.configget(config, 'staticmaps', 'river',
                                     'wflow_river.map')
    outlet_map = wtools_lib.configget(config, 'staticmaps', 'outlet',
                                      'wflow_outlet.map')
    riverlength_fact_map = wtools_lib.configget(config, 'staticmaps',
                                                'riverlength_fact',
                                                'wflow_riverlength_fact.map')
    soil_map = wtools_lib.configget(config, 'staticmaps', 'soil',
                                    'wflow_soil.map')
    streamorder_map = wtools_lib.configget(config, 'staticmaps', 'streamorder',
                                           'wflow_streamorder.map')
    subcatch_map = wtools_lib.configget(config, 'staticmaps', 'subcatch',
                                        'wflow_subcatch.map')

    # read mask location (optional)
    masklayer = wtools_lib.configget(config, 'mask', 'masklayer',
                                     options.catchshp)

    # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0))

    # TODO: check if extents are correct this way
    # TODO: check what the role of missing values is in zeros and ones (l. 123 in old code)

    # first add a missing value to dem_in
    ds = gdal.Open(options.dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info('Resampling dem from {:s} to {:s}'.format(
        os.path.abspath(options.dem_in),
        os.path.join(options.destination, dem_map)))
    gis.gdal_warp(options.dem_in,
                  clone_map,
                  os.path.join(options.destination, dem_map),
                  format='PCRaster',
                  gdal_interp=gdalconst.GRA_Average)
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wtools_lib.get_projection(options.dem_in)
    except:
        logger.warning(
            'No projection found in DEM, assuming WGS 1984 lat long')
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    #if srs.ExportToProj4() == srs_dem.ExportToProj4():
    for percentile in percentiles:
        if percentile >= 100:
            logger.info('computing window maximum')
            percentile_dem = os.path.join(options.destination,
                                          'wflow_dem_max.map')
        elif percentile <= 0:
            logger.info('computing window minimum')
            percentile_dem = os.path.join(options.destination,
                                          'wflow_dem_min.map')
        else:
            logger.info('computing window {:d} percentile'.format(
                int(percentile)))
            percentile_dem = os.path.join(
                options.destination,
                'wflow_dem_{:03d}.map'.format(int(percentile)))

        percentile_dem = os.path.join(
            options.destination,
            'wflow_dem_{:03d}.map'.format(int(percentile)))
        stats = wtools_lib.windowstats(options.dem_in,
                                       len(yax),
                                       len(xax),
                                       trans,
                                       srs,
                                       percentile_dem,
                                       percentile,
                                       transform=clone2dem_transform,
                                       logger=logger)


#    else:
#        logger.warning('Projections of DEM and clone are different. DEM statistics for different projections is not yet implemented')
    """

    # burn in rivers
    # first convert and clip the river shapefile
    # retrieve river shape projection, if not available assume EPSG:4326
    file_att = os.path.splitext(os.path.basename(options.rivshp))[0]
    ds = ogr.Open(options.rivshp)
    lyr = ds.GetLayerByName(file_att)
    extent = lyr.GetExtent()
    extent_in = [extent[0], extent[2], extent[1], extent[3]]
    try:
        # get spatial reference from shapefile
        srs_rivshp = lyr.GetSpatialRef()
        logger.info('Projection in river shapefile is {:s}'.format(srs_rivshp.ExportToProj4()))
    except:
        logger.warning('No projection found in {:s}, assuming WGS 1984 lat-lon'.format(options.rivshp))
        srs_rivshp = osr.SpatialReference()
        srs_rivshp.ImportFromEPSG(4326)
    rivprojshp = os.path.join(options.destination, 'rivshp_proj.shp')
    logger.info('Projecting and clipping {:s} to {:s}'.format(options.rivshp, rivprojshp))
    # TODO: Line below takes a very long time to process, the bigger the shapefile, the more time. How do we deal with this?
    call(('ogr2ogr','-s_srs', srs_rivshp.ExportToProj4(),'-t_srs', srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin), '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax), rivprojshp, options.rivshp))
    """

    # TODO: BURNING!!

    # project catchment layer to projection of clone
    file_att = os.path.splitext(os.path.basename(options.catchshp))[0]
    print options.catchshp
    ds = ogr.Open(options.catchshp)
    lyr = ds.GetLayerByName(file_att)
    extent = lyr.GetExtent()
    extent_in = [extent[0], extent[2], extent[1], extent[3]]
    try:
        # get spatial reference from shapefile
        srs_catchshp = lyr.GetSpatialRef()
        logger.info('Projection in catchment shapefile is {:s}'.format(
            srs_catchshp.ExportToProj4()))
    except:
        logger.warning(
            'No projection found in {:s}, assuming WGS 1984 lat-lon'.format(
                options.catchshp))
        srs_catchshp = osr.SpatialReference()
        srs_catchshp.ImportFromEPSG(4326)
    catchprojshp = os.path.join(options.destination, 'catchshp_proj.shp')
    logger.info('Projecting {:s} to {:s}'.format(options.catchshp,
                                                 catchprojshp))
    call(('ogr2ogr', '-s_srs', srs_catchshp.ExportToProj4(), '-t_srs',
          srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin),
          '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax),
          catchprojshp, options.catchshp))

    #
    logger.info('Calculating ldd')
    ldddem = pcr.readmap(os.path.join(options.destination, dem_map))
    ldd_select = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    pcr.report(ldd_select, os.path.join(options.destination, 'wflow_ldd.map'))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(ldd_select))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(options.destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(options.destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(options.destination, dem_map))
    pcr.report(streamorder, os.path.join(options.destination, streamorder_map))
    pcr.report(river, os.path.join(options.destination, river_map))

    # deal with your catchments
    if options.gaugeshp == None:
        logger.info('No gauges defined, using outlets instead')
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(
                    pcr.ifthen(pcr.scalar(ldd_select) == 5, pcr.boolean(1)))))
        pcr.report(gauges, os.path.join(options.destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # report river length
    # make a high resolution empty map
    dem_hr_file = os.path.join(options.destination, 'dem_highres.tif')
    burn_hr_file = os.path.join(options.destination, 'burn_highres.tif')
    demburn_hr_file = os.path.join(options.destination, 'demburn_highres.map')
    riv_hr_file = os.path.join(options.destination, 'riv_highres.map')
    gis.gdal_warp(options.dem_in, clone_hr, dem_hr_file)
    # wtools_lib.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
    file_att = os.path.splitext(os.path.basename(options.rivshp))[0]
    # open the shape layer
    ds = ogr.Open(options.rivshp)
    lyr = ds.GetLayerByName(file_att)
    gis.ogr_burn(lyr,
                 clone_hr,
                 -100,
                 file_out=burn_hr_file,
                 format='GTiff',
                 gdal_type=gdal.GDT_Float32,
                 fill_value=0)
    # read dem and burn values and add
    xax_hr, yax_hr, burn_hr, fill = gis.gdal_readmap(burn_hr_file, 'GTiff')
    burn_hr[burn_hr == fill] = 0
    xax_hr, yax_hr, dem_hr, fill = gis.gdal_readmap(dem_hr_file, 'GTiff')
    dem_hr[dem_hr == fill] = np.nan
    demburn_hr = dem_hr + burn_hr
    demburn_hr[np.isnan(demburn_hr)] = -9999
    gis.gdal_writemap(demburn_hr_file, 'PCRaster', xax_hr, yax_hr, demburn_hr,
                      -9999.)
    pcr.setclone(demburn_hr_file)
    demburn_hr = pcr.readmap(demburn_hr_file)
    ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
    pcr.report(ldd_hr, os.path.join(options.destination, 'ldd_hr.map'))
    pcr.setglobaloption('unitcell')
    riv_hr = pcr.scalar(
        pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr)
    pcr.report(riv_hr, riv_hr_file)
    pcr.setglobaloption('unittrue')
    pcr.setclone(clone_map)
    logger.info('Computing river length')
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wtools_lib.windowstats(riv_hr_file,
                                         len(yax),
                                         len(xax),
                                         trans,
                                         srs,
                                         os.path.join(options.destination,
                                                      riverlength_fact_map),
                                         stat='fact',
                                         logger=logger)
    # TODO: nothing happends with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(pcr.ifthen(pcr.ordinal(ldd_select) == 5, pcr.ordinal(1)),
               os.path.join(options.destination, outlet_map))

    # report subcatchment map
    subcatchment = pcr.subcatchment(ldd_select, gauges)
    pcr.report(pcr.ordinal(subcatchment),
               os.path.join(options.destination, subcatch_map))

    # Report land use map
    if options.landuse == None:
        logger.info(
            'No land use map used. Preparing {:s} with only ones.'.format(
                os.path.join(options.destination, landuse_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, landuse_map))
    else:
        logger.info('Resampling land use from {:s} to {:s}'.format(
            os.path.abspath(options.landuse),
            os.path.join(options.destination, os.path.abspath(landuse_map))))
        gis.gdal_warp(options.landuse,
                      clone_map,
                      os.path.join(options.destination, landuse_map),
                      format='PCRaster',
                      gdal_interp=gdalconst.GRA_Mode,
                      gdal_type=gdalconst.GDT_Int32)

    # report soil map
    if options.soil == None:
        logger.info('No soil map used. Preparing {:s} with only ones.'.format(
            os.path.join(options.destination, soil_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, soil_map))
    else:
        logger.info('Resampling soil from {:s} to {:s}'.format(
            os.path.abspath(options.soil),
            os.path.join(options.destination, os.path.abspath(soil_map))))
        gis.gdal_warp(options.soil,
                      clone_map,
                      os.path.join(options.destination, soil_map),
                      format='PCRaster',
                      gdal_interp=gdalconst.GRA_Mode,
                      gdal_type=gdalconst.GDT_Int32)

    if options.lai == None:
        logger.info(
            'No vegetation LAI maps used. Preparing default maps {:s} with only ones.'
            .format(os.path.join(options.destination, soil_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, soil_map))
    else:
        dest_lai = os.path.join(options.destination, 'clim')
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(options.lai,
                                  'LAI00000.{:03d}'.format(month + 1))
            lai_out = os.path.join(dest_lai,
                                   'LAI00000.{:03d}'.format(month + 1))
            logger.info('Resampling vegetation LAI from {:s} to {:s}'.format(
                os.path.abspath(lai_in), os.path.abspath(lai_out)))
            gis.gdal_warp(lai_in,
                          clone_map,
                          lai_out,
                          format='PCRaster',
                          gdal_interp=gdalconst.GRA_Bilinear,
                          gdal_type=gdalconst.GDT_Float32)

    # report soil map
    if options.other_maps == None:
        logger.info('No other maps used. Skipping other maps.')
    else:
        logger.info('Resampling list of other maps...')
        for map_file in options.other_maps:
            map_name = os.path.split(map_file)[1]
            logger.info('Resampling a map from {:s} to {:s}'.format(
                os.path.abspath(map_file),
                os.path.join(options.destination, map_name)))
            gis.gdal_warp(map_file,
                          clone_map,
                          os.path.join(options.destination, map_name),
                          format='PCRaster',
                          gdal_interp=gdalconst.GRA_Mode,
                          gdal_type=gdalconst.GDT_Float32)

    if options.clean:
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, '*.xml')),
                              logger=logger)
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, 'clim', '*.xml')),
                              logger=logger)
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, '*highres*')),
                              logger=logger)
Exemplo n.º 21
0
num_of_rows_30sec = str(vos.getMapAttributesALL(output_file)["rows"] / 10.)
num_of_cols_30sec = str(vos.getMapAttributesALL(output_file)["cols"] / 10.)
x_coordinate = str(vos.getMapAttributesALL(output_file)["xUL"])
y_coordinate = str(vos.getMapAttributesALL(output_file)["yUL"])
cellsize_30sec = "0.00833333333333333333333333333333333333333333333333333333333333333333333333333333"
output_file = output_folder + "/" + tile_code + ".30sec.clo.map"
cmd = "mapattr -s -R " + num_of_rows_30sec + " -C " + num_of_cols_30sec + " -B -P yb2t -x " + x_coordinate + " -y " + y_coordinate + " -l " + cellsize_30sec + " " + output_file
print(cmd)
os.system(cmd)

# give the ids for every 30 arc sec cell (e.g. pcrcalc dem_tif_n60w180_30sec.ids.map  = "nominal(uniqueid(dem_tif_n60w180_30sec.clo.map))")
input_file = output_file
output_file = output_folder + "/" + tile_code + ".30sec.ids.map"
pcr.setclone(input_file)
print("Making the clone map at 30 arcsec resolution.")
unique_ids_30sec = pcr.nominal(pcr.uniqueid(input_file))
pcr.report(unique_ids_30sec, output_file)
# - Note that this map has 30 arc sec resolution.

# resample the ids map to 3 arc sec resolution (e.g. gdalwarp -tr 0.00083333333333333333333333333333333333333 0.00083333333333333333333333333333333333333 dem_tif_n60w180_30sec.ids.map dem_tif_n60w180_30sec.ids.3sec.tif
input_file = output_file
output_file = output_folder + "/" + tile_code + ".30sec.ids.3sec.tif"
cellsize_3sec = "0.000833333333333333333333333333333333333333333333333333333333333333333333333333333"
cmd = "gdalwarp -tr " + cellsize_3sec + " " + cellsize_3sec + " " + input_file + " " + output_file
print(cmd)
os.system(cmd)
# - This still a tif file.

# convert the tif file to PCRaster map
input_file = output_file
output_file = output_folder + "/" + tile_code + ".30sec.ids.3sec.map"
Exemplo n.º 22
0
def subcatch_order_b(
    ldd, oorder, sizelimit=0, fill=False, fillcomplete=False, stoporder=0
):
    """
    Determines subcatchments using the catchment order

    This version tries to keep the number op upstream/downstream catchment the
    small by first dederivingatchment connected to the major river(the order) given, and fill
    up from there.

    Input:
        - ldd
        - oorder - order to use
        - sizelimit - smallest catchments to include, default is all (sizelimit=0) in number of cells
        - if fill is set to True the higer order catchment are filled also
        - if fillcomplete is set to True the whole ldd is filled with catchments.


    :returns sc, dif, nldd; Subcatchment, Points, subcatchldd
    """
    # outl = find_outlet(ldd)
    # large = pcr.subcatchment(ldd,pcr.boolean(outl))

    if stoporder == 0:
        stoporder = oorder

    stt = pcr.streamorder(ldd)
    sttd = pcr.downstream(ldd, stt)
    pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
    maxorder = pcraster.framework.getCellValue(pcr.mapmaximum(stt), 1, 1)
    dif = pcr.uniqueid(pcr.boolean(pcr.ifthen(stt == pcr.ordinal(oorder), pts)))

    if fill:
        for order in range(oorder, maxorder):
            m_pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(order)) > 0.0, sttd)
            m_dif = pcr.uniqueid(
                pcr.boolean(pcr.ifthen(stt == pcr.ordinal(order), m_pts))
            )
            dif = pcr.uniqueid(pcr.boolean(pcr.cover(m_dif, dif)))

        for myorder in range(oorder - 1, stoporder, -1):
            sc = pcr.subcatchment(ldd, pcr.nominal(dif))
            m_pts = pcr.ifthen((pcr.scalar(sttd) - pcr.scalar(stt)) > 0.0, sttd)
            m_dif = pcr.uniqueid(
                pcr.boolean(pcr.ifthen(stt == pcr.ordinal(myorder - 1), m_pts))
            )
            dif = pcr.uniqueid(
                pcr.boolean(pcr.cover(pcr.ifthen(pcr.scalar(sc) == 0, m_dif), dif))
            )

        if fillcomplete:
            sc = pcr.subcatchment(ldd, pcr.nominal(dif))
            cs, m_dif, stt = subcatch_order_a(ldd, stoporder)
            dif = pcr.uniqueid(
                pcr.boolean(
                    pcr.cover(
                        pcr.ifthen(pcr.scalar(sc) == 0, pcr.ordinal(m_dif)),
                        pcr.ordinal(dif),
                    )
                )
            )

    scsize = pcr.catchmenttotal(1, ldd)
    dif = pcr.ordinal(pcr.uniqueid(pcr.boolean(pcr.ifthen(scsize >= sizelimit, dif))))
    sc = pcr.subcatchment(ldd, dif)

    # Make pit ldd
    nldd = pcr.lddrepair(pcr.ifthenelse(pcr.cover(dif, 0) > 0, 5, ldd))

    return sc, dif, nldd
Exemplo n.º 23
0
def main():
    clone_map = "mask\mask.map"
    clone_shp = "mask\mask.shp"
    clone_prj = "mask\mask.prj"
    workdir = "work\\"
    resultdir = "staticmaps\\"
    ''' read commandline arguments '''
    argv = sys.argv
    clone_EPSG = False

    try:
        opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA')
    except getopt.error:
        print 'fout'
        Usage()
        sys.exit(1)

    inifile = None
    rivshp = None
    catchshp = None
    dem_in = None
    landuse = None
    soiltype = None
    clean = False
    gaugeshp = None
    alltouching = False

    for o, a in opts:
        if o == '-i': inifile = a
        if o == '-p': clone_EPSG = 'EPSG:' + a
        if o == '-r': rivshp = a
        if o == '-c': catchshp = a
        if o == '-d': dem_in = a
        if o == '-l': landuse = a
        if o == '-s': soiltype = a
        if o == '-C': clean = True
        if o == '-g': gaugeshp = a
        if o == '-A': alltouching = True

    if inifile == None or rivshp == None or catchshp == None or dem_in == None:
        print 'the following files are compulsory:'
        print ' - ini-file'
        print ' - DEM (raster)'
        print ' - river (shape)'
        print ' - catchment (shape)'
        Usage()
        sys.exit(1)

    if landuse == None:
        print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain'

    if soiltype == None:
        print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain'
    ''' read mask '''
    if not os.path.exists(clone_map):
        print 'Mask not found. Make sure the file mask\mask.map exists'
        print 'This file is usually created with the CreateGrid script'
        sys.exit(1)
    else:
        pcr.setclone(clone_map)
        ds = gdal.Open(clone_map, GA_ReadOnly)
        clone_trans = ds.GetGeoTransform()
        cellsize = clone_trans[1]
        clone_rows = ds.RasterYSize
        clone_columns = ds.RasterXSize
        extent_mask = [
            clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize,
            clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3]
        ]
        xmin, ymin, xmax, ymax = map(str, extent_mask)
        ds = None
        ones = pcr.scalar(pcr.readmap(clone_map))
        zeros = ones * 0
        empty = pcr.ifthen(ones == 0, pcr.scalar(0))
    ''' read projection from mask.shp '''
    # TODO: check how to deal with projections (add .prj to mask.shp in creategrid)
    if not os.path.exists(clone_prj):
        print 'please add prj-file to mask.shp'
        sys.exit(1)
    if os.path.exists(clone_shp):
        ds = ogr.Open(clone_shp)
        file_att = os.path.splitext(os.path.basename(clone_shp))[0]
        lyr = ds.GetLayerByName(file_att)
        spatialref = lyr.GetSpatialRef()
        if not spatialref == None:
            srs_clone = osr.SpatialReference()
            srs_clone.ImportFromWkt(spatialref.ExportToWkt())
            srs_clone.AutoIdentifyEPSG()
            unit_clone = False
            unit_clone = srs_clone.GetAttrValue('UNIT').lower()
            #clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1)
            # TODO: fix hard EPSG code below
            clone_EPSG = 'EPSG:' + '4167'
            print 'EPSG-code is read from mask.shp: ' + clone_EPSG
            spatialref == None
    if not clone_EPSG:
        print 'EPSG-code cannot be read from mask.shp'
        print 'please add prj-file to mask.shp or specify on command line'
        print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)'

    ds = None
    clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)])
    ''' open config-file '''
    config = wt.OpenConf(inifile)
    ''' read settings '''
    snapgaugestoriver = bool(
        int(wt.configget(config, "settings", "snapgaugestoriver", "1")))
    burnalltouching = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "1")))
    burninorder = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "0")))
    verticetollerance = float(
        wt.configget(config, "settings", "vertice_tollerance", "0.0001"))
    ''' read parameters '''
    burn_outlets = int(
        wt.configget(config, "parameters", "burn_outlets", 10000))
    burn_rivers = int(wt.configget(config, "parameters", "burn_rivers", 200))
    burn_connections = int(
        wt.configget(config, "parameters", "burn_connections", 100))
    burn_gauges = int(wt.configget(config, "parameters", "burn_gauges", 100))
    minorder = int(wt.configget(config, "parameters", "riverorder_min", 3))
    exec "percentile=tr.array(" + wt.configget(config, "parameters",
                                               "statisticmaps", [0, 100]) + ")"
    if not unit_clone:
        print 'failed to read unit (meter or degree) from mask projection'
        unit_clone = str(wt.configget(config, "settings", "unit", 'meter'))
        print 'unit read from settings: ' + unit_clone
    if unit_clone == 'degree':
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_degree", 0.0005))
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_metre", 50))

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    ''' read staticmap locations '''
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")
    ''' read mask location (optional) '''
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)
    ''' create directories '''
    if os.path.isdir(workdir):
        shutil.rmtree(workdir)
    os.makedirs(workdir)

    if os.path.isdir(resultdir):
        shutil.rmtree(resultdir)
    os.makedirs(resultdir)
    ''' Preperation steps '''
    zero_map = workdir + "zero.map"
    zero_tif = workdir + "zero.tif"
    pcr.report(zeros, zero_map)
    # TODO: replace gdal_translate call
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, zero_tif))
    pcr.setglobaloption("lddin")
    ''' resample DEM '''
    dem_resample = workdir + "dem_resampled.tif"
    ds = gdal.Open(dem_in, GA_ReadOnly)
    band = ds.GetRasterBand(1)
    nodata = band.GetNoDataValue()
    proj = ds.GetGeoTransform()
    cellsize_dem = proj[1]
    ''' read DEM projection '''
    spatialref == None
    spatialref = ds.GetProjection()
    if not spatialref == None:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(spatialref)
        srs.AutoIdentifyEPSG()
        dem_EPSG = 'EPSG:' + srs.GetAttrValue("AUTHORITY", 1)
        print 'EPSG-code is read from ' + os.path.basename(
            dem_in) + ': ' + dem_EPSG
        spatialref == None
        dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)])
        srs_DEM = osr.SpatialReference()
        srs_DEM.ImportFromEPSG(dem_EPSG_int)
        clone2dem_transform = osr.CoordinateTransformation(srs_clone, srs_DEM)
    else:
        dem_EPSG = clone_EPSG
        print 'No projection defined for ' + os.path.basename(dem_in)
        print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    ds = None
    print 'Resampling DEM...'
    if nodata == None:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin,
              xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-dstnodata',
              str(-9999), '-r', 'cubic', dem_in, dem_resample))
    else:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj,
              '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-srcnodata', str(nodata), '-dstnodata',
              str(nodata), '-r', 'cubic', dem_in, dem_resample))
    ''' create dem.map and statistic maps '''
    dem_resample_map = resultdir + dem_map
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', dem_resample, dem_resample_map))
    print 'Computing DEM statistics ....'
    stats = wt.windowstats(dem_in, clone_rows, clone_columns, clone_trans,
                           srs_clone, resultdir, percentile)
    ''' burn DEM '''
    ds = ogr.Open(rivshp)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    rivshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    # strip rivers to nodes
    xminc = str(float(xmin) + 0.5 * cellsize)
    yminc = str(float(ymin) + 0.5 * cellsize)
    xmaxc = str(float(xmax) - 0.5 * cellsize)
    ymaxc = str(float(ymax) - 0.5 * cellsize)
    if rivshp_EPSG == clone_EPSG:
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivshp))
    else:
        rivprojshp = workdir + 'rivshape_proj.shp'
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', rivshp_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, rivprojshp, rivshp))
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivprojshp))

    rivshp = rivclipshp

    #### BURNING BELOW ####

    # TODO: check if extraction can be done within memory and retun a burn layer
    shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int,
                            cellsize * verticetollerance, workdir)

    outlets = shapes[1]
    connections = shapes[2]
    outlets_att = os.path.splitext(os.path.basename(outlets))[0]
    connections_att = os.path.splitext(os.path.basename(connections))[0]
    dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0]
    connections_tif = workdir + connections_att + ".tif"
    outlets_tif = workdir + outlets_att + ".tif"
    # TODO: make the burning in memory
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, connections_tif))
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', outlets_att, outlets,
          outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', connections_att, connections,
          connections_tif))

    # convert rivers to order
    rivshp_att = os.path.splitext(os.path.basename(rivshp))[0]
    rivers_tif = workdir + rivshp_att + ".tif"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, rivers_tif))
    if burninorder:  # make river shape with an order attribute
        OrderSHPs = wt.ReachOrder(rivshp, clone_EPSG_int,
                                  cellsize * verticetollerance, workdir)
        wt.Burn2Tif(OrderSHPs, 'order', rivers_tif)
    else:
        call(('gdal_rasterize', '-burn', '1', '-l', rivshp_att, rivshp,
              rivers_tif))

    # convert 2 maps
    connections_map = workdir + connections_att + ".map"
    rivers_map = workdir + rivshp_att + ".map"
    outlets_map = workdir + outlets_att + ".map"
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', connections_tif, connections_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', rivers_tif, rivers_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', outlets_tif, outlets_map))

    # burn the layers in DEM
    outletsburn = pcr.scalar(
        pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets)
    connectionsburn = pcr.scalar(
        pcr.readmap(connections_map)) * pcr.scalar(burn_connections)
    riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers)
    ldddem = pcr.cover(dem_resample_map,
                       pcr.ifthen(riverburn > 0, pcr.scalar(0)))
    ldddem = ldddem - outletsburn - connectionsburn - riverburn
    ldddem = pcr.cover(ldddem, pcr.scalar(0))
    pcr.report(ldddem, workdir + "dem_burn.map")
    ''' create ldd for multi-catchments '''
    ldd = pcr.ldd(empty)
    # reproject catchment shape-file
    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    catchshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    if not rivshp_EPSG == clone_EPSG:
        catchprojshp = workdir + 'catchshape_proj.shp'
        call(('ogr2ogr', '-s_srs', catchshp_EPSG, '-t_srs', clone_ESPG,
              catchprojshp, catchshp))
        catchshp = catchprojshp
    ds.Destroy()

    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)

    fieldDef = ogr.FieldDefn("ID", ogr.OFTString)
    fieldDef.SetWidth(12)
    TEMP_out = Driver.CreateDataSource(workdir + "temp.shp")
    if not srs == None:
        TEMP_LYR = TEMP_out.CreateLayer("temp",
                                        srs,
                                        geom_type=ogr.wkbMultiPolygon)
    else:
        TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon)
    TEMP_LYR.CreateField(fieldDef)

    for i in range(lyr.GetFeatureCount()):
        orgfeature = lyr.GetFeature(i)
        geometry = orgfeature.geometry()
        feature = ogr.Feature(TEMP_LYR.GetLayerDefn())
        feature.SetGeometry(geometry)
        feature.SetField("ID", str(i + 1))
        TEMP_LYR.CreateFeature(feature)
    TEMP_out.Destroy()
    ds.Destroy

    # rasterize catchment map
    catchments_tif = workdir + "catchments.tif"
    catchments_map = workdir + "catchments.map"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map,
          catchments_tif))
    if alltouching:
        call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp",
              workdir + 'temp.shp', catchments_tif))
    else:
        call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp',
              catchments_tif))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG,
          catchments_tif, catchments_map))
    catchments = pcr.readmap(catchments_map)
    riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn)))
    rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique)
    #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0))
    catchments = pcr.cover(
        pcr.ifthen(catchments > 0, pcr.ordinal(catchments)),
        pcr.ifthen(
            riverburn > 0,
            pcr.ordinal(
                pcr.spreadzone(pcr.nominal(catchments),
                               pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1))))
    rivercatch_map = workdir + "catchments_river.map"
    catchclip_map = workdir + "catchments_clip.map"
    pcr.report(rivercatch, rivercatch_map)
    pcr.report(catchments, catchclip_map)

    ds = ogr.Open(workdir + "temp.shp")
    lyr = ds.GetLayerByName("temp")

    print 'calculating ldd'
    for i in range(lyr.GetFeatureCount()):
        feature = lyr.GetFeature(i)
        catch = int(feature.GetField("ID"))
        print "calculating ldd for catchment: " + str(i + 1) + "/" + str(
            lyr.GetFeatureCount()) + "...."
        ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch,
                                              catchments)) * 0 + 1 * ldddem
        ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"),
                                   float("1E35"), float("1E35"))
        ldd = pcr.cover(ldd, ldd_select)
    pcr.report(ldd, resultdir + ldd_map)
    ds.Destroy()
    ''' report stream order, river and dem '''
    streamorder = pcr.ordinal(pcr.streamorder(ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999)))
    dem_resample_map = pcr.cover(dem_resample_map,
                                 pcr.scalar(river) * 0 + mindem)
    pcr.report(dem_resample_map, resultdir + dem_map)
    pcr.report(streamorder, resultdir + streamorder_map)
    pcr.report(river, resultdir + river_map)
    ''' deal with your catchments '''
    if gaugeshp == None:
        print 'No gauges defined, using outlets instead'
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, resultdir + gauges_map)


#    ds = ogr.Open(gaugeshp)
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    lyr = ds.GetLayerByName(file_att)
#    spatialref = lyr.GetSpatialRef()
##    if not spatialref == None:
##        srs = osr.SpatialReference()
##        srs.ImportFromWkt(spatialref.ExportToWkt())
##        srs.AutoIdentifyEPSG()
##        gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
##        spatialref == None
#    #else:
#    gaugeshp_EPSG = clone_EPSG
#    print 'No projection defined for ' + file_att + '.shp'
#    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'
#
#    # reproject gauge shape if necesarry
#    if not gaugeshp_EPSG == clone_EPSG:
#        gaugeprojshp = workdir + 'gaugeshape_proj.shp'
#        call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp))
#        gaugeshp = gaugeprojshp
#
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    gaugestif = workdir + file_att + '.tif'
#    gaugesmap = workdir + file_att + '.map'
#    call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif))
#    call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif))
#    call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap))
#    gaugelocs = pcr.readmap(gaugesmap)
#    snapgaugestoriver = True
#
#    if snapgaugestoriver:
#        print "Snapping gauges to river"
#        gauges = pcr.uniqueid(pcr.boolean(gaugelocs))
#        gauges= wt.snaptomap(pcr.ordinal(gauges),river)
#
#    gaugesmap = pcr.ifthen(gauges > 0, gauges)
    ''' report riverlengthfrac '''
    riv_hr = workdir + 'river_highres.tif'
    wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs_clone, 0)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    call(('gdal_rasterize', '-burn', '1', '-l', file_att, rivshp, riv_hr))
    print 'Computing river length...'
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wt.windowstats(riv_hr, clone_rows, clone_columns,
                                 clone_trans, srs_clone, resultdir, 'frac')
    ''' report outlet map '''
    pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)),
               resultdir + outlet_map)
    ''' report  map '''
    catchment = pcr.ifthen(catchments > 0, pcr.ordinal(1))
    pcr.report(catchment, resultdir + catchment_map)
    ''' report subcatchment map '''
    subcatchment = pcr.subcatchment(ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment), resultdir + subcatch_map)
    ''' report landuse map '''
    if landuse == None:
        pcr.report(pcr.nominal(ones), resultdir + landuse_map)
    else:
        landuse_resample = workdir + 'landuse.tif'
        landuse_map = resultdir + landuse_map
        transform = wt.GetRasterTranform(landuse, srs_clone)
        if not transform[0]:
            call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        else:
            call(('gdalwarp', '-overwrite', '-s_srs', transform[1], '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              landuse_resample, landuse_map))
        landuse_work = pcr.readmap(landuse_map)
        pcr.report(pcr.nominal(landuse_work), landuse_map)
    ''' report soil map '''
    if soiltype == None:
        pcr.report(pcr.nominal(ones), resultdir + soil_map)
    else:
        soiltype_resample = workdir + 'soiltype.tif'
        soil_map = resultdir + soil_map
        #transform = wt.GetRasterTranform(soiltype,srs_clone)
        #        if not transform[0]:
        call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
              clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-r', 'mode', soiltype, soiltype_resample))
        #        else:
        #        call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              soiltype_resample, soil_map))
        soiltype_work = pcr.readmap(soil_map)
        pcr.report(pcr.nominal(soiltype_work), soil_map)

    if clean:
        wt.DeleteList(glob.glob(os.getcwd() + '\\' + resultdir + '/*.xml'))
                                   vos.netcdf2PCRobjClone(inputDirectory+inputFiles,\
                                                          inputVarNames,
                                                          fulldate,
                                                          None,
                                                          cloneMapFileName)) * 0.001   # unit: bcm/year
        
        abstraction = pcr.cover(abstraction_volume_30min, 0.0)
        
        # use window maximum to be in the conservative side:
        window_size = 1.0
        abstraction = pcr.windowmaximum(abstraction, 1.0) 
        
        # covering the map with zero
        pcrValue = pcr.cover(abstraction, 0.0)  # unit: m/day                       

        # the value should be higher than the previous year value
        if iYear > staYear: pcrValue = pcr.max(preValue, pcrValue)
        preValue = pcrValue
        
        region_ids = pcr.uniqueid(pcr.boolean(1.0))
        region_ids_masked = pcr.ifthen(landmask, region_ids)
        
        regional_pumping_limit = pcrValue
        regional_pumping_limit_masked = pcr.ifthen(landmask, regional_pumping_limit)

        for iVar in range(0,len(varNames)):      
            var = varNames[iVar]
            pcrRead = vars()[str(var)]
            varField = pcr.pcr2numpy(pcrRead, vos.MV)
            tssNetCDF.writePCR2NetCDF(ncFileName,var,varField,timeStamp,posCnt = index - 1)
Exemplo n.º 25
0
def subcatch_stream(
    ldd,
    threshold,
    min_strahler=-999,
    max_strahler=999,
    assign_edge=False,
    assign_existing=False,
    up_area=None,
):
    """
    (From Deltares Hydrotools)

    Derive catchments based upon strahler threshold
    Input:
        ldd -- pcraster object direction, local drain directions
        threshold -- integer, strahler threshold, subcatchments ge threshold
            are derived
        min_strahler -- integer, minimum strahler threshold of river catchments
            to return
        max_strahler -- integer, maximum strahler threshold of river catchments
            to return
        assign_unique=False -- if set to True, unassigned connected areas at
            the edges of the domain are assigned a unique id as well. If set
            to False, edges are not assigned
        assign_existing=False == if set to True, unassigned edges are assigned
            to existing basins with an upstream weighting. If set to False,
            edges are assigned to unique IDs, or not assigned
    output:
        stream_ge -- pcraster object, streams of strahler order ge threshold
        subcatch -- pcraster object, subcatchments of strahler order ge threshold

    """
    # derive stream order

    stream = pcr.streamorder(ldd)
    stream_ge = pcr.ifthen(stream >= threshold, stream)
    stream_up_sum = pcr.ordinal(pcr.upstream(ldd, pcr.cover(pcr.scalar(stream_ge), 0)))
    # detect any transfer of strahler order, to a higher strahler order.
    transition_strahler = pcr.ifthenelse(
        pcr.downstream(ldd, stream_ge) != stream_ge,
        pcr.boolean(1),
        pcr.ifthenelse(
            pcr.nominal(ldd) == 5,
            pcr.boolean(1),
            pcr.ifthenelse(
                pcr.downstream(ldd, pcr.scalar(stream_up_sum)) > pcr.scalar(stream_ge),
                pcr.boolean(1),
                pcr.boolean(0),
            ),
        ),
    )
    # make unique ids (write to file)
    transition_unique = pcr.ordinal(pcr.uniqueid(transition_strahler))

    # derive upstream catchment areas (write to file)
    subcatch = pcr.nominal(pcr.subcatchment(ldd, transition_unique))

    if assign_edge:
        # fill unclassified areas (in pcraster equal to zero) with a unique id, above the maximum id assigned so far
        unique_edge = pcr.clump(pcr.ifthen(subcatch == 0, pcr.ordinal(0)))
        subcatch = pcr.ifthenelse(
            subcatch == 0,
            pcr.nominal(pcr.mapmaximum(pcr.scalar(subcatch)) + pcr.scalar(unique_edge)),
            pcr.nominal(subcatch),
        )
    elif assign_existing:
        # unaccounted areas are added to largest nearest draining basin
        if up_area is None:
            up_area = pcr.ifthen(
                pcr.boolean(pcr.cover(stream_ge, 0)), pcr.accuflux(ldd, 1)
            )
        riverid = pcr.ifthen(pcr.boolean(pcr.cover(stream_ge, 0)), subcatch)

        friction = 1.0 / pcr.scalar(
            pcr.spreadzone(pcr.cover(pcr.ordinal(up_area), 0), 0, 0)
        )  # *(pcr.scalar(ldd)*0+1)
        delta = pcr.ifthen(
            pcr.scalar(ldd) >= 0,
            pcr.ifthen(
                pcr.cover(subcatch, 0) == 0,
                pcr.spreadzone(pcr.cover(riverid, 0), 0, friction),
            ),
        )
        subcatch = pcr.ifthenelse(pcr.boolean(pcr.cover(subcatch, 0)), subcatch, delta)

    # finally, only keep basins with minimum and maximum river order flowing through them
    strahler_subcatch = pcr.areamaximum(stream, subcatch)
    subcatch = pcr.ifthen(
        pcr.ordinal(strahler_subcatch) >= min_strahler,
        pcr.ifthen(pcr.ordinal(strahler_subcatch) <= max_strahler, subcatch),
    )

    return stream_ge, pcr.ordinal(subcatch)
Exemplo n.º 26
0
    def initial(self):
        """ initial part of the routing module
        """
        maskinfo = MaskInfo.instance()
        self.var.avgdis = maskinfo.in_zero()
        self.var.Beta = loadmap('beta')
        self.var.InvBeta = 1 / self.var.Beta
        # Inverse of beta for kinematic wave
        self.var.ChanLength = loadmap('ChanLength').astype(float)
        self.var.InvChanLength = 1 / self.var.ChanLength
        # Inverse of channel length [1/m]

        self.var.NoRoutSteps = int(
            np.maximum(1, round(self.var.DtSec / self.var.DtSecChannel, 0)))
        # Number of sub-steps based on value of DtSecChannel,
        # or 1 if DtSec is smaller than DtSecChannel
        settings = LisSettings.instance()
        option = settings.options
        if option['InitLisflood']:
            self.var.NoRoutSteps = 1
            # InitLisflood is used!
            # so channel routing step is the same as the general time step
        self.var.DtRouting = self.var.DtSec / self.var.NoRoutSteps
        # Corresponding sub-timestep (seconds)
        self.var.InvDtRouting = 1 / self.var.DtRouting
        self.var.InvNoRoutSteps = 1 / float(self.var.NoRoutSteps)
        # inverse for faster calculation inside the dynamic section

        # -------------------------- LDD

        self.var.Ldd = lddmask(loadmap('Ldd', pcr=True, lddflag=True),
                               self.var.MaskMap)
        # Cut ldd to size of MaskMap (NEW, 29/9/2004)
        # Prevents 'unsound' ldd if MaskMap covers sub-area of ldd

        # Count (inverse of) upstream area for each pixel
        # Needed if we want to calculate average values of variables
        # upstream of gauge locations

        self.var.UpArea = accuflux(self.var.Ldd, self.var.PixelAreaPcr)
        # Upstream contributing area for each pixel
        # Note that you might expext that values of UpArea would be identical to
        # those of variable CatchArea (see below) at the outflow points.
        # This is NOT actually the case, because outflow points are shifted 1
        # cell in upstream direction in the calculation of CatchArea!
        self.var.InvUpArea = 1 / self.var.UpArea
        # Calculate inverse, so we can multiply in dynamic (faster than divide)

        self.var.IsChannelPcr = boolean(loadmap('Channels', pcr=True))
        self.var.IsChannel = np.bool8(compressArray(self.var.IsChannelPcr))
        # Identify channel pixels
        self.var.IsChannelKinematic = self.var.IsChannel.copy()
        # Identify kinematic wave channel pixels
        # (identical to IsChannel, unless dynamic wave is used, see below)
        #self.var.IsStructureKinematic = pcraster.boolean(0)
        self.var.IsStructureKinematic = np.bool8(maskinfo.in_zero())

        # Map that identifies special inflow/outflow structures (reservoirs, lakes) within the
        # kinematic wave channel routing. Set to (dummy) value of zero modified in reservoir and lake
        # routines (if those are used)
        LddChan = lddmask(self.var.Ldd, self.var.IsChannelPcr)
        # ldd for Channel network
        self.var.MaskMap = boolean(self.var.Ldd)
        # Use boolean version of Ldd as calculation mask
        # (important for correct mass balance check
        # any water generated outside of Ldd won't reach
        # channel anyway)
        self.var.LddToChan = lddrepair(
            ifthenelse(self.var.IsChannelPcr, 5, self.var.Ldd))
        # Routing of runoff (incl. ground water)en
        AtOutflow = boolean(pit(self.var.Ldd))
        # find outlet points...

        if option['dynamicWave']:
            IsChannelDynamic = boolean(loadmap('ChannelsDynamic', pcr=True))
            # Identify channel pixels where dynamic wave is used
            self.var.IsChannelKinematic = (self.var.IsChannelPcr
                                           == 1) & (IsChannelDynamic == 0)
            # Identify (update) channel pixels where kinematic wave is used
            self.var.LddKinematic = lddmask(self.var.Ldd,
                                            self.var.IsChannelKinematic)
            # Ldd for kinematic wave: ends (pit) just before dynamic stretch
            LddDynamic = lddmask(self.var.Ldd, IsChannelDynamic)
            # Ldd for dynamic wave

            # Following statements produce an ldd network that connects the pits in
            # LddKinematic to the nearest downstream dynamic wave pixel
            LddToDyn = lddrepair(ifthenelse(IsChannelDynamic, 5, self.var.Ldd))
            # Temporary ldd: flow paths end in dynamic pixels
            PitsKinematic = cover(boolean(pit(self.var.LddKinematic)), 0)
            # Define start of each flow path at pit on LddKinematic
            PathKinToDyn = path(LddToDyn, PitsKinematic)
            # Identify paths that connect pits in LddKinematic to dynamic wave
            # pixels
            LddKinToDyn = lddmask(LddToDyn, PathKinToDyn)
            # Create ldd
            DynWaveBoundaryCondition = boolean(pit(LddDynamic))
            # NEW 12-7-2005 (experimental)
            # Location of boundary condition dynamic wave

            self.var.AtLastPoint = (downstream(
                self.var.Ldd,
                AtOutflow) == 1) & (AtOutflow != 1) & self.var.IsChannelPcr

            # NEW 23-6-2005
            # Dynamic wave routine gives no outflow out of pits, so we calculate this
            # one cell upstream (WvD)
            # (implies that most downstream cell is not taken into account in mass balance
            # calculations, even if dyn wave is not used)
            # Only include points that are on a channel (otherwise some small 'micro-catchments'
            # are included, for which the mass balance cannot be calculated
            # properly)

        else:
            self.var.LddKinematic = LddChan
            # No dynamic wave, so kinematic ldd equals channel ldd
            self.var.AtLastPoint = AtOutflow
            self.var.AtLastPointC = np.bool8(
                compressArray(self.var.AtLastPoint))
            # assign unique identifier to each of them
        maskinfo = MaskInfo.instance()
        lddC = compressArray(self.var.LddKinematic)
        inAr = decompress(np.arange(maskinfo.info.mapC[0], dtype="int32"))
        # giving a number to each non missing pixel as id
        self.var.downstruct = (compressArray(
            downstream(self.var.LddKinematic, inAr))).astype("int32")
        # each upstream pixel gets the id of the downstream pixel
        self.var.downstruct[lddC == 5] = maskinfo.info.mapC[0]
        # all pits gets a high number
        #d3=np.bincount(self.var.down, weights=loadmap('AvgDis'))[:-1]
        # upstream function in numpy

        OutflowPoints = nominal(uniqueid(self.var.AtLastPoint))
        # and assign unique identifier to each of them
        self.var.Catchments = (compressArray(
            catchment(self.var.Ldd, OutflowPoints))).astype(np.int32)
        CatchArea = np.bincount(
            self.var.Catchments,
            weights=self.var.PixelArea)[self.var.Catchments]
        #CatchArea = CatchArea[self.var.Catchments]
        # define catchment for each outflow point
        #CatchArea = areatotal(self.var.PixelArea, self.var.Catchments)

        # Compute area of each catchment [m2]
        # Note: in earlier versions this was calculated using the "areaarea" function,
        # changed to "areatotal" in order to enable handling of grids with spatially
        # variable cell areas (e.g. lat/lon grids)
        self.var.InvCatchArea = 1 / CatchArea
        # inverse of catchment area [1/m2]

        # ************************************************************
        # ***** CHANNEL GEOMETRY  ************************************
        # ************************************************************

        self.var.ChanGrad = np.maximum(loadmap('ChanGrad'),
                                       loadmap('ChanGradMin'))
        # avoid calculation of Alpha using ChanGrad=0: this creates MV!
        self.var.CalChanMan = loadmap('CalChanMan')
        self.var.ChanMan = self.var.CalChanMan * loadmap('ChanMan')
        # Manning's n is multiplied by ChanManCal
        # enables calibration for peak timing
        self.var.ChanBottomWidth = loadmap('ChanBottomWidth')
        ChanDepthThreshold = loadmap('ChanDepthThreshold')
        ChanSdXdY = loadmap('ChanSdXdY')
        self.var.ChanUpperWidth = self.var.ChanBottomWidth + 2 * ChanSdXdY * ChanDepthThreshold
        # Channel upper width [m]
        self.var.TotalCrossSectionAreaBankFull = 0.5 * \
            ChanDepthThreshold * (self.var.ChanUpperWidth + self.var.ChanBottomWidth)
        # Area (sq m) of bank full discharge cross section [m2]
        # (trapezoid area equation)
        TotalCrossSectionAreaHalfBankFull = 0.5 * self.var.TotalCrossSectionAreaBankFull
        # Cross-sectional area at half bankfull [m2]
        # This can be used to initialise channel flow (see below)

        TotalCrossSectionAreaInitValue = loadmap(
            'TotalCrossSectionAreaInitValue')
        self.var.TotalCrossSectionArea = np.where(
            TotalCrossSectionAreaInitValue == -9999,
            TotalCrossSectionAreaHalfBankFull, TotalCrossSectionAreaInitValue)
        # Total cross-sectional area [m2]: if initial value in binding equals -9999 the value at half bankfull is used,
        # otherwise TotalCrossSectionAreaInitValue (typically end map from previous simulation)

        if option['SplitRouting']:
            # in_zero = maskinfo.in_zero()
            CrossSection2AreaInitValue = loadmap('CrossSection2AreaInitValue')
            self.var.CrossSection2Area = np.where(
                CrossSection2AreaInitValue == -9999, maskinfo.in_zero(),
                CrossSection2AreaInitValue)
            # cross-sectional area [m2] for 2nd line of routing: if initial value in binding equals -9999 the value is set to 0
            # otherwise CrossSection2AreaInitValue (typically end map from previous simulation)

            PrevSideflowInitValue = loadmap('PrevSideflowInitValue')

            self.var.Sideflow1Chan = np.where(PrevSideflowInitValue == -9999,
                                              maskinfo.in_zero(),
                                              PrevSideflowInitValue)
            # sideflow from previous run for 1st line of routing: if initial value in binding equals -9999 the value is set to 0
            # otherwise PrevSideflowInitValue (typically end map from previous simulation)

        # ************************************************************
        # ***** CHANNEL ALPHA (KIN. WAVE)*****************************
        # ************************************************************
        # Following calculations are needed to calculate Alpha parameter in kinematic
        # wave. Alpha currently fixed at half of bankful depth (this may change in
        # future versions!)

        ChanWaterDepthAlpha = np.where(self.var.IsChannel,
                                       0.5 * ChanDepthThreshold, 0.0)
        # Reference water depth for calculation of Alpha: half of bankfull
        self.var.ChanWettedPerimeterAlpha = self.var.ChanBottomWidth + 2 * \
            np.sqrt(np.square(ChanWaterDepthAlpha) + np.square(ChanWaterDepthAlpha * ChanSdXdY))
        # Channel wetted perimeter [m](Pythagoras)
        AlpTermChan = (self.var.ChanMan /
                       (np.sqrt(self.var.ChanGrad)))**self.var.Beta
        self.var.AlpPow = 2.0 / 3.0 * self.var.Beta

        self.var.ChannelAlpha = (
            AlpTermChan *
            (self.var.ChanWettedPerimeterAlpha**self.var.AlpPow)).astype(float)
        self.var.InvChannelAlpha = 1 / self.var.ChannelAlpha
        # ChannelAlpha for kinematic wave

        # ************************************************************
        # ***** CHANNEL INITIAL DISCHARGE ****************************
        # ************************************************************

        self.var.ChanM3 = self.var.TotalCrossSectionArea * self.var.ChanLength
        # channel water volume [m3]
        self.var.ChanIniM3 = self.var.ChanM3.copy()
        self.var.ChanM3Kin = self.var.ChanIniM3.copy().astype(float)
        # Initialise water volume in kinematic wave channels [m3]
        self.var.ChanQKin = np.where(self.var.ChannelAlpha > 0,
                                     (self.var.TotalCrossSectionArea /
                                      self.var.ChannelAlpha)**self.var.InvBeta,
                                     0).astype(float)

        # Initialise discharge at kinematic wave pixels (note that InvBeta is
        # simply 1/beta, computational efficiency!)

        self.var.CumQ = maskinfo.in_zero()
        # ininialise sum of discharge to calculate average

        # ************************************************************
        # ***** CHANNEL INITIAL DYNAMIC WAVE *************************
        # ************************************************************
        if option['dynamicWave']:
            pass
            # TODO !!!!!!!!!!!!!!!!!!!!

    #     lookchan = lookupstate(TabCrossSections, ChanCrossSections, ChanBottomLevel, self.var.ChanLength,
    #                            DynWaveConstantHeadBoundary + ChanBottomLevel)
    #     ChanIniM3 = ifthenelse(AtOutflow, lookchan, ChanIniM3)
    # Correct ChanIniM3 for constant head boundary in pit (only if
    # dynamic wave is used)
    #     ChanM3Dyn = ChanIniM3
    # Set volume of water in dynamic wave channel to initial value
    # (note that initial condition is expressed as a state in [m3] for the dynamic wave,
    # and as a rate [m3/s] for the kinematic wave (a bit confusing)

    # Estimate number of iterations needed in first time step (based on Courant criterium)
    # TO DO !!!!!!!!!!!!!!!!!!!!
    #    Potential = lookuppotential(
    #        TabCrossSections, ChanCrossSections, ChanBottomLevel, self.var.ChanLength, ChanM3Dyn)
    # Potential
    #    WaterLevelDyn = Potential - ChanBottomLevel
    # Water level [m above bottom level)
    #    WaveCelerityDyn = pcraster.sqrt(9.81 * WaterLevelDyn)
    # Dynamic wave celerity [m/s]
    #    CourantDynamic = self.var.DtSec * \
    #        (WaveCelerityDyn + 2) / self.var.ChanLength
    # Courant number for dynamic wave
    # We don't know the water velocity at this time so
    # we just guess it's 2 m/s (Odra tests show that flow velocity
    # is typically much lower than wave celerity, and 2 m/s is quite
    # high already so this gives a pretty conservative/safe estimate
    # for DynWaveIterations)
    #    DynWaveIterationsTemp = max(
    #        1, roundup(CourantDynamic / CourantDynamicCrit))
    #    DynWaveIterations = ordinal(mapmaximum(DynWaveIterationsTemp))
    # Number of sub-steps needed for required numerical
    # accuracy. Always greater than or equal to 1
    # (otherwise division by zero!)

    # TEST
    # If polder option is used, we need an estimate of the initial channel discharge, but we don't know this
    # for the dynamic wave pixels (since only initial state is known)! Try if this works (dyn wave flux based on zero inflow 1 iteration)
    # Note that resulting ChanQ is ONLY used in the polder routine!!!
    # Since we need instantaneous estimate at start of time step, a
    # ChanQM3Dyn is calculated for one single one-second time step!!!

    #    ChanQDyn = dynwaveflux(TabCrossSections,
    #                           ChanCrossSections,
    #                           LddDynamic,
    #                           ChanIniM3,
    #                           0.0,
    #                           ChanBottomLevel,
    #                           self.var.ChanMan,
    #                           self.var.ChanLength,
    #                           1,
    #                           1,
    #                           DynWaveBoundaryCondition)
    # Compute volume and discharge in channel after dynamic wave
    # ChanM3Dyn in [cu m]
    # ChanQDyn in [cu m / s]
    #    self.var.ChanQ = ifthenelse(
    #        IsChannelDynamic, ChanQDyn, self.var.ChanQKin)
    # Channel discharge: combine results of kinematic and dynamic wave
        else:

            # ***** NO DYNAMIC WAVE *************************
            # Dummy code if dynamic wave is not used, in which case ChanQ equals ChanQKin
            # (needed only for polder routine)

            PrevDischarge = loadmap('PrevDischarge')
            self.var.ChanQ = np.where(PrevDischarge == -9999,
                                      self.var.ChanQKin, PrevDischarge)
            # initialise channel discharge: cold start: equal to ChanQKin
            # [m3/s]

        # Initialising cumulative output variables
        # These are all needed to compute the cumulative mass balance error

        self.var.DischargeM3Out = maskinfo.in_zero()
        # cumulative discharge at outlet [m3]
        self.var.TotalQInM3 = maskinfo.in_zero()
        # cumulative inflow from inflow hydrographs [m3]
        #self.var.sumDis = maskinfo.in_zero()
        self.var.sumDis = maskinfo.in_zero()
        self.var.sumIn = maskinfo.in_zero()
Exemplo n.º 27
0
    ldd = pcr.cover(ldd, ldd_select)
pcr.report(ldd, resultdir + ldd_map)
ds.Destroy()
''' report stream order, river and dem '''
streamorder = pcr.ordinal(pcr.streamorder(ldd))
river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999)))
dem_resample_map = pcr.cover(dem_resample_map, pcr.scalar(river) * 0 + mindem)
pcr.report(dem_resample_map, resultdir + dem_map)
pcr.report(streamorder, resultdir + streamorder_map)
pcr.report(river, resultdir + river_map)
''' deal with your catchments '''
if gaugeshp == None:
    print 'No gauges defined, using outlets instead'
    gauges = pcr.ordinal(
        pcr.uniqueid(
            pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1)))))
    pcr.report(gauges, resultdir + gauges_map)
#    ds = ogr.Open(gaugeshp)
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    lyr = ds.GetLayerByName(file_att)
#    spatialref = lyr.GetSpatialRef()
##    if not spatialref == None:
##        srs = osr.SpatialReference()
##        srs.ImportFromWkt(spatialref.ExportToWkt())
##        srs.AutoIdentifyEPSG()
##        gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
##        spatialref == None
#    #else:
#    gaugeshp_EPSG = clone_EPSG
#    print 'No projection defined for ' + file_att + '.shp'
#    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'
landmask = pcr.defined(pcr.readmap(clone_map))

# class map file name:
#~ class_map_file_name = "/home/sutan101/data/aqueduct_gis_layers/aqueduct_shp_from_marta/Aqueduct_States.map"
#~ class_map_file_name = "/home/sutan101/data/aqueduct_gis_layers/aqueduct_shp_from_marta/Aqueduct_GDBD.map"
#~ class_map_file_name = "/home/sutan101/data/processing_whymap/version_19september2014/major_aquifer_30min.extended.map"
#~ class_map_file_name = "/home/sutan101/data/processing_whymap/version_19september2014/major_aquifer_30min.map"
class_map_file_name = str(sys.argv[2])
class_map_default_folder = "/home/sutan101/data/aqueduct_gis_layers/aqueduct_shp_from_marta/" 
if class_map_file_name == "state": class_map_file_name = class_map_default_folder + "/Aqueduct_States.map"
if class_map_file_name == "drainage_unit": class_map_file_name = class_map_default_folder + "/Aqueduct_GDBD.map"
if class_map_file_name == "aquifer": class_map_file_name = class_map_default_folder + "/why_wgs1984_BUENO.map"
if class_map_file_name == "country": class_map_file_name = "/home/sutan101/data/country_shp_from_tianyi/World_Polys_High.map"

# reading the class map
class_map    = pcr.nominal(pcr.uniqueid(landmask))
if class_map_file_name != "pixel":
	class_map = vos.readPCRmapClone(class_map_file_name, clone_map, tmp_directory, None, False, None, True, False)
	class_map = pcr.ifthen(pcr.scalar(class_map) > 0.0, pcr.nominal(class_map)) 

# time selection
start_year = int(sys.argv[3])
end_year   = int(sys.argv[4])
 
# cell_area (unit: m2)
cell_area = pcr.readmap("/data/hydroworld/PCRGLOBWB20/input5min/routing/cellsize05min.correct.map") 
segment_cell_area = pcr.areatotal(cell_area, class_map)

# extent of aquifer/sedimentary basins:
sedimentary_basin = pcr.cover(pcr.scalar(pcr.readmap("/home/sutan101/data/sed_extent/sed_extent.map")), 0.0)
cell_area = sedimentary_basin * cell_area
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5))
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5))
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 0.5))
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.0))
basin_map = pcr.cover(basin_map, pcr.windowmajority(basin_map, 1.5))
basin_map = pcr.ifthen(landmask, basin_map)
#~ pcr.aguila(basin_map)

msg = "Redefining the basin map (so that it is consistent with the ldd map used in PCR-GLOBWB):"
logger.info(msg)
# - calculate the upstream area of every pixel:
upstream_area = pcr.catchmenttotal(cell_area, ldd)
# - calculate the catchment area of every basin:
upstream_area_maximum = pcr.areamaximum(upstream_area, basin_map)
# - identify the outlet of every basin (in order to rederive the basin so that it is consistent with the ldd)
outlet = pcr.nominal(pcr.uniqueid(pcr.ifthen(upstream_area == upstream_area_maximum, pcr.boolean(1.0))))
# - ignoring outlets with small upstream areas
threshold = 50. * 1000. * 1000.                                                 # unit: m2
outlet    = pcr.ifthen(upstream_area_maximum > threshold, outlet)
#~ pcr.aguila(outlet)
outlet = pcr.cover(outlet, pcr.nominal(0.0))
# - recalculate the basin
basin_map  = pcr.nominal(pcr.subcatchment(ldd, outlet))
basin_map  = pcr.clump(basin_map)
basin_map  = pcr.ifthen(landmask, basin_map)
pcr.report(basin_map , "basin_map.map")
#~ pcr.aguila(basin_map)
# - calculate the basin area
basin_area = pcr.areatotal(cell_area, basin_map)
pcr.report(basin_area, "basin_area.map")
#~ pcr.aguila(basin_area)
def main():

    # output folder (and tmp folder)
    clean_out_folder = True
    if os.path.exists(out_folder):
        if clean_out_folder:
            shutil.rmtree(out_folder)
            os.makedirs(out_folder)
    else:
        os.makedirs(out_folder)
    os.chdir(out_folder)
    os.system("pwd")

    # set the clone map
    print("set the clone")
    pcr.setclone(global_ldd_30min_inp_file)

    # define the landmask
    print("define the landmask")
    # - based on the 30min input
    landmask_30min = define_landmask(input_file = global_landmask_30min_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_30min_only.map")
    # - based on the 05min input
    landmask_05min = define_landmask(input_file = global_landmask_05min_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_05min_only.map")
    # - based on the 06min input
    landmask_06min = define_landmask(input_file = global_landmask_06min_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_06min_only.map")
    # - based on the 30sec input
    landmask_30sec = define_landmask(input_file = global_landmask_30sec_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_30sec_only.map")
    # - based on the 30sec input
    landmask_03sec = define_landmask(input_file = global_landmask_03sec_file,\
                                      clone_map_file = global_ldd_30min_inp_file,\
                                      output_map_file = "landmask_03sec_only.map")
    #
    # - merge all landmasks
    landmask = pcr.cover(landmask_30min, landmask_05min, landmask_06min,
                         landmask_30sec, landmask_03sec)
    pcr.report(landmask, "global_landmask_extended_30min.map")
    # ~ pcr.aguila(landmask)

    # extend ldd
    print("extend/define the ldd")
    ldd_map = pcr.readmap(global_ldd_30min_inp_file)
    ldd_map = pcr.ifthen(landmask, pcr.cover(ldd_map, pcr.ldd(5)))
    pcr.report(ldd_map, "global_ldd_extended_30min.map")
    # ~ pcr.aguila(ldd_map)

    # catchment map and size
    catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map))
    catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map)
    # ~ pcr.aguila(catchment_size)

    # identify small islands
    print("identify small islands")
    # - maps of islands smaller than 15000 cells (at half arc degree resolution)
    island_map = pcr.ifthen(landmask, pcr.clump(pcr.defined(ldd_map)))
    island_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), island_map)
    island_map = pcr.ifthen(island_size < 15000., island_map)
    # ~ # - use catchments (instead of islands)
    # ~ island_map  = catchment_map
    # ~ island_size = catchment_size
    # ~ island_map  = pcr.ifthen(island_size < 10000., island_map)
    # - sort from the largest island
    # -- take one cell per island as a representative
    island_map_rep_size = pcr.ifthen(
        pcr.areaorder(island_size, island_map) == 1.0, island_size)
    # -- sort from the largest island
    island_map_rep_ids = pcr.areaorder(
        island_map_rep_size * -1.00,
        pcr.ifthen(pcr.defined(island_map_rep_size), pcr.nominal(1.0)))
    # -- map of smaller islands, sorted from the largest one
    island_map = pcr.areamajority(pcr.nominal(island_map_rep_ids), island_map)

    # identify the biggest island for every group of small islands within a certain window (arcdeg cells)
    print("the biggest island for every group of small islands")
    large_island_map = pcr.ifthen(
        pcr.scalar(island_map) == pcr.windowminimum(pcr.scalar(island_map),
                                                    15.), island_map)
    # ~ pcr.aguila(large_island_map)

    # identify big catchments
    print("identify large catchments")
    catchment_map = pcr.catchment(ldd_map, pcr.pit(ldd_map))
    catchment_size = pcr.areatotal(pcr.spatial(pcr.scalar(1.0)), catchment_map)

    # ~ # - identify all large catchments with size >= 50 cells (at the resolution of 30 arcmin) = 50 x (50^2) km2 = 125000 km2
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 50, catchment_map)
    # ~ # - identify all large catchments with size >= 10 cells (at the resolution of 30 arcmin)
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 10, catchment_map)
    # ~ # - identify all large catchments with size >= 5 cells (at the resolution of 30 arcmin)
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 5, catchment_map)
    # ~ # - identify all large catchments with size >= 20 cells (at the resolution of 30 arcmin)
    # ~ large_catchment_map = pcr.ifthen(catchment_size >= 20, catchment_map)

    # - identify all large catchments with size >= 25 cells (at the resolution of 30 arcmin)
    large_catchment_map = pcr.ifthen(catchment_size >= 25, catchment_map)

    # - give the codes that are different than islands
    large_catchment_map = pcr.nominal(
        pcr.scalar(large_catchment_map) +
        10. * vos.getMinMaxMean(pcr.scalar(large_island_map))[1])

    # merge biggest islands and big catchments
    print("merge large catchments and islands")
    large_catchment_and_island_map = pcr.cover(large_catchment_map,
                                               large_island_map)
    # ~ large_catchment_and_island_map = pcr.cover(large_island_map, large_catchment_map)
    large_catchment_and_island_map_size = pcr.areatotal(
        pcr.spatial(pcr.scalar(1.0)), large_catchment_and_island_map)

    # - sort from the largest one
    # -- take one cell per island as a representative
    large_catchment_and_island_map_rep_size = pcr.ifthen(
        pcr.areaorder(large_catchment_and_island_map_size,
                      large_catchment_and_island_map) == 1.0,
        large_catchment_and_island_map_size)
    # -- sort from the largest
    large_catchment_and_island_map_rep_ids = pcr.areaorder(
        large_catchment_and_island_map_rep_size * -1.00,
        pcr.ifthen(pcr.defined(large_catchment_and_island_map_rep_size),
                   pcr.nominal(1.0)))
    # -- map of largest catchments and islands, sorted from the largest one
    large_catchment_and_island_map = pcr.areamajority(
        pcr.nominal(large_catchment_and_island_map_rep_ids),
        large_catchment_and_island_map)
    # ~ pcr.report(large_catchment_and_island_map, "large_catchments_and_islands.map")

    # ~ # perform cdo fillmiss2 in order to merge the small catchments to the nearest large catchments
    # ~ print("spatial interpolation/extrapolation using cdo fillmiss2 to get initial subdomains")
    # ~ cmd = "gdal_translate -of NETCDF large_catchments_and_islands.map large_catchments_and_islands.nc"
    # ~ print(cmd); os.system(cmd)
    # ~ cmd = "cdo fillmiss2 large_catchments_and_islands.nc large_catchments_and_islands_filled.nc"
    # ~ print(cmd); os.system(cmd)
    # ~ cmd = "gdal_translate -of PCRaster large_catchments_and_islands_filled.nc large_catchments_and_islands_filled.map"
    # ~ print(cmd); os.system(cmd)
    # ~ cmd = "mapattr -c " + global_ldd_30min_inp_file + " " + "large_catchments_and_islands_filled.map"
    # ~ print(cmd); os.system(cmd)
    # ~ # - initial subdomains
    # ~ subdomains_initial = pcr.nominal(pcr.readmap("large_catchments_and_islands_filled.map"))
    # ~ subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map)
    # ~ pcr.aguila(subdomains_initial)

    # spatial interpolation/extrapolation in order to merge the small catchments to the nearest large catchments
    print("spatial interpolation/extrapolation to get initial subdomains")
    field = large_catchment_and_island_map
    cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field)))
    zoneID = pcr.spreadzone(cellID, 0, 1)
    field = pcr.areamajority(field, zoneID)
    subdomains_initial = field
    subdomains_initial = pcr.areamajority(subdomains_initial, catchment_map)
    pcr.aguila(subdomains_initial)

    pcr.report(subdomains_initial, "global_subdomains_30min_initial.map")

    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[0])))
    print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])))

    # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[0])))
    # ~ print(str(int(vos.getMinMaxMean(pcr.scalar(subdomains_initial_clump))[1])))

    print("Checking all subdomains, avoid too large subdomains")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_initial))[1])

    # clone code that will be assigned
    assigned_number = 0

    subdomains_final = pcr.ifthen(
        pcr.scalar(subdomains_initial) < -7777, pcr.nominal(0))

    for nr in range(1, num_of_masks + 1, 1):

        msg = "Processing the landmask %s" % (str(nr))
        print(msg)

        mask_selected_boolean = pcr.ifthen(subdomains_initial == nr,
                                           pcr.boolean(1.0))

        # ~ if nr == 1: pcr.aguila(mask_selected_boolean)

        xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
        area_in_degree2 = (xmax - xmin) * (ymax - ymin)

        # ~ print(str(area_in_degree2))

        # check whether the size of bounding box is ok
        # - initial check value
        check_ok = True

        reference_area_in_degree2 = 2500.
        if area_in_degree2 > 1.50 * reference_area_in_degree2: check_ok = False
        if (xmax - xmin) > 10 * (ymax - ymin): check_ok = False

        if check_ok == True:

            msg = "Clump is not needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # assign the clone code
            assigned_number = assigned_number + 1

            # update global landmask for river and land
            mask_selected_nominal = pcr.ifthen(mask_selected_boolean,
                                               pcr.nominal(assigned_number))
            subdomains_final = pcr.cover(subdomains_final,
                                         mask_selected_nominal)

        if check_ok == False:

            msg = "Clump is needed."
            msg = "\n\n" + str(msg) + "\n\n"
            print(msg)

            # make clump
            clump_ids = pcr.nominal(pcr.clump(mask_selected_boolean))

            # merge clumps that are close together
            clump_ids_window_majority = pcr.windowmajority(clump_ids, 10.0)
            clump_ids = pcr.areamajority(clump_ids_window_majority, clump_ids)
            # ~ pcr.aguila(clump_ids)

            # minimimum and maximum values
            min_clump_id = int(
                pcr.cellvalue(pcr.mapminimum(pcr.scalar(clump_ids)), 1)[0])
            max_clump_id = int(
                pcr.cellvalue(pcr.mapmaximum(pcr.scalar(clump_ids)), 1)[0])

            for clump_id in range(min_clump_id, max_clump_id + 1, 1):

                msg = "Processing the clump %s of %s from the landmask %s" % (
                    str(clump_id), str(max_clump_id), str(nr))
                msg = "\n\n" + str(msg) + "\n\n"
                print(msg)

                # identify mask based on the clump
                mask_selected_boolean_from_clump = pcr.ifthen(
                    clump_ids == pcr.nominal(clump_id), mask_selected_boolean)
                mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean_from_clump,
                    mask_selected_boolean_from_clump)

                # check whether the clump is empty
                check_mask_selected_boolean_from_clump = pcr.ifthen(
                    mask_selected_boolean, mask_selected_boolean_from_clump)
                check_if_empty = float(
                    pcr.cellvalue(
                        pcr.mapmaximum(
                            pcr.scalar(
                                pcr.defined(
                                    check_mask_selected_boolean_from_clump))),
                        1)[0])

                if check_if_empty == 0.0:

                    msg = "Map is empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                else:

                    msg = "Map is NOT empty !"
                    msg = "\n\n" + str(msg) + "\n\n"
                    print(msg)

                    # assign the clone code
                    assigned_number = assigned_number + 1

                    # update global landmask for river and land
                    mask_selected_nominal = pcr.ifthen(
                        mask_selected_boolean_from_clump,
                        pcr.nominal(assigned_number))
                    subdomains_final = pcr.cover(subdomains_final,
                                                 mask_selected_nominal)

    # ~ # kill all aguila processes if exist
    # ~ os.system('killall aguila')

    pcr.aguila(subdomains_final)

    print("")
    print("")
    print("")

    print("The subdomain map is READY.")

    pcr.report(subdomains_final, "global_subdomains_30min_final.map")

    num_of_masks = int(vos.getMinMaxMean(pcr.scalar(subdomains_final))[1])
    print(num_of_masks)

    print("")
    print("")
    print("")

    for nr in range(1, num_of_masks + 1, 1):

        mask_selected_boolean = pcr.ifthen(subdomains_final == nr,
                                           pcr.boolean(1.0))

        xmin, ymin, xmax, ymax = boundingBox(mask_selected_boolean)
        area_in_degree2 = (xmax - xmin) * (ymax - ymin)

        print(
            str(nr) + " ; " + str(area_in_degree2) + " ; " +
            str((xmax - xmin)) + " ; " + str((ymax - ymin)))

    print("")
    print("")
    print("")

    print("Number of subdomains: " + str(num_of_masks))

    print("")
    print("")
    print("")

    # spatial extrapolation in order to cover the entire map
    print("spatial interpolation/extrapolation to cover the entire map")
    field = subdomains_final
    cellID = pcr.nominal(pcr.uniqueid(pcr.defined(field)))
    zoneID = pcr.spreadzone(cellID, 0, 1)
    field = pcr.areamajority(field, zoneID)
    subdomains_final_filled = field
    pcr.aguila(subdomains_final_filled)

    pcr.report(subdomains_final_filled,
               "global_subdomains_30min_final_filled.map")