Esempio n. 1
0
def lddcreate_save(
    lddname,
    dem,
    force,
    corevolume=1e35,
    catchmentprecipitation=1e35,
    corearea=1e35,
    outflowdepth=1e35,
):
    """
    Creates an ldd if a file does not exists or if the force flag is used

    input:
        - lddname (name of the ldd to create)
        - dem (actual dem)
        - force (boolean to force recreation of the ldd)
        - outflowdepth (set to 10.0E35 normally but smaller if needed)

    Output:
        - the LDD

    """
    if os.path.exists(lddname) and not force:
        if Verbose:
            print(("Returning existing ldd", lddname))
            return pcr.readmap(lddname)
    else:
        if Verbose:
            print(("Creating ldd", lddname))
            LDD = pcr.lddcreate(dem, 10.0e35, outflowdepth, 10.0e35, 10.0e35)
            pcr.report(LDD, lddname)
            return LDD
Esempio n. 2
0
def lddcreate_save(
    lddname,
    dem,
    force,
    corevolume=1e35,
    catchmentprecipitation=1e35,
    corearea=1e35,
    outflowdepth=1e35,
):
    """
    Creates an ldd if a file does not exists or if the force flag is used

    input:
        - lddname (name of the ldd to create)
        - dem (actual dem)
        - force (boolean to force recreation of the ldd)
        - outflowdepth (set to 10.0E35 normally but smaller if needed)

    Output:
        - the LDD

    """
    if os.path.exists(lddname) and not force:
        if Verbose:
            print(("Returning existing ldd", lddname))
            return pcr.readmap(lddname)
    else:
        if Verbose:
            print(("Creating ldd", lddname))
            LDD = pcr.lddcreate(dem, 10.0e35, outflowdepth, 10.0e35, 10.0e35)
            pcr.report(LDD, lddname)
            return LDD
Esempio n. 3
0
def pcr_preprocess(dem_in, x, y, itile, tempdir, ldd_in=None,
                    test=False, create_ldd=True):
    """
    function to set pcr clone and translate DEM (terrain) and ldd numpy 2d arrays to pcr maps

    :param terrain:     masked numpy 2d array with elevation data
    :param x:           numpy 1d array with x coordinates of elevation grid
    :param y:           numpy 1d array with Y coordinates of elevation grid
    :param tempdir:     string with directory to temporary save clone pcrmap
    :param ldd:         numpy 2d array with ldd grid, make sure it uses the pcrmap definition of ldd
    :param test:        if True do not remove clone maps

    :return:            pcr maps for dem and ldd
    """
    # create clone in temp_dir
    fn_clone = os.path.join(tempdir, '_{:03d}_dem.map'.format(itile))
    cl.makeDir(tempdir)  # make dir if not exist
    # DEM
    gdal_writemap(fn_clone, 'PCRaster', x, y, dem_in, -9999)  # note: missing value needs conversion in python item
    pcr.setclone(fn_clone)
    pcr.setglobaloption("unitcell")
    dem = pcr.readmap(fn_clone)
    # cleanup
    if not test:
        os.unlink(fn_clone)  # cleanup clone file
    os.unlink(fn_clone+'.aux.xml')
    # LDD
    if create_ldd:
        if ldd_in is None:
            print('Calculating LDD')
            ldd = pcr.lddcreate(dem, 1E31, 1E31, 1E31, 1E31)
        else:
            # TODO note that np.nan is default NoDataValue for ldd file. check this when reading data
            # TODO: x and y axis got mixed up when translating with numpy2pcr. check if it works here!
            # in process_tile function in coastal_inun.py
            ldd = pcr.lddrepair(pcr.ldd(pcr.numpy2pcr(pcr.Ldd, ldd_in, np.nan)))
    else:
        ldd = None
    return dem, ldd
Esempio n. 4
0
    else:
        print 'file ' + ldd_map + ' does not exist'
        print 'new ldd will be generated'

if generateldd:
    print 'Generating ldd...'
    if burndem:
        linescover = pcr.ifthen(lines==1,pcr.scalar(0))
        pointscover = pcr.ifthen(pcr.scalar(points)==1,pcr.scalar(0))
        #pcr.report(linescover,'lines.map')
        #pcr.report(pointscover,'points.map')
        dem = pcr.cover(dem,linescover,pointscover)
        #pcr.report(dem,'dem1.map')
        dem = dem + burn
        #pcr.report(dem,'dem2.map')
        ldd = pcr.lddcreate(dem,float("1E35"),float("1E35"),float("1E35"),float("1E35"))
    else:
        ldd = pcr.lddcreate(dem,burnvalue/2,float("1E35"),float("1E35"),float("1E35"))

streamorder = pcr.ordinal(pcr.streamorder(ldd))
river = pcr.boolean(pcr.ifthen(streamorder >= int(min(np.max(pcr.pcr2numpy(streamorder,-9999)),minorder)), streamorder))
outlets = pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.boolean(1))
outlets = pcr.nominal(pcr.uniqueid(outlets))
catchments = pcr.nominal(pcr.catchment(ldd, outlets))

if not keepall:
    catchments = pcr.nominal(pcr.ifthen(pcr.mapmaximum(pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments))) == pcr.areatotal(pcr.scalar(catchments)*0+1,pcr.nominal(catchments)),catchments))
    
pcr.report(ldd,ldd_map)
pcr.report(streamorder,streamorder_map)
pcr.report(river,river_map)
def generate_hydro_datasets(path, output_dir, step):
    print(path)

    file_name = os.path.splitext(os.path.basename(path))[0]
    map_path = output_dir + '/' + file_name + '.map'
    path_prefix = map_path[:-14]

    if step == 'ldd':
        cmd = u'gdal_translate -a_nodata -9999 -of PCRaster -ot Float32 ' + path + ' ' + map_path
        print(cmd)
        subprocess.call(cmd, shell=True)

    # slope = pcr.slope(dem)
    # pcr.report(slope, path_prefix + '_slope.map')

    # pcr.setglobaloption("lddin")

    if step == 'ldd':
        dem = pcr.readmap(map_path)

        print("Computing LDD ...")
        # enable pit filling
        ldd = pcr.lddcreate(dem, 9999999, 9999999, 9999999, 9999999)
        pcr.report(ldd, path_prefix + '_ldd.map')

        return
    elif step == 'ldddem':
        dem = pcr.readmap(map_path)

        print("Computing LDD DEM ...")
        dem_pitfilled = pcr.lddcreatedem(dem, 9999999, 9999999, 9999999,
                                         9999999)
        dem_diff = dem_pitfilled - dem
        pcr.report(dem_diff, path_prefix + '_dem_pits_diff.map')

        return

    # print("Computing LDD without pit filling ...")
    # ldd_pits = pcr.lddcreate(dem, 0, 0, 0, 0)
    # pcr.report(ldd_pits, path_prefix + '_ldd_with_pits.map')

    # print("Computing pits ...")
    # pits = pcr.pit(ldd_pits)

    # pcr.report(pits, path_prefix + '_pits.map')

    if step == 'fa':
        ldd = pcr.readmap(path_prefix + '_ldd.map')

        print("Computing flow accumulation ...")
        fa = pcr.accuflux(ldd, 1)
        pcr.report(fa, path_prefix + '_fa.map')

        return

    if step == 'catchments':
        ldd = pcr.readmap(path_prefix + '_ldd.map')

        print("Delineating catchments ...")
        catchments = pcr.catchment(ldd, pcr.pit(ldd))
        pcr.report(catchments, path_prefix + '_catchments.map')

        return

    if step == 'stream_order':
        ldd = pcr.readmap(path_prefix + '_ldd.map')

        print("Computing stream order ...")
        stream_order = pcr.streamorder(ldd)
        pcr.report(stream_order, path_prefix + '_streamorder.map')

        return

    if step == 'stream':
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        accuThreshold = 100
        print("Computing stream ...")
        stream = pcr.ifthenelse(
            pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1),
            pcr.boolean(0))
        pcr.report(stream, path_prefix + '_stream.map')
        return

    if step == 'height_river':
        print("Computing heigh_river ...")

        stream = pcr.readmap(path_prefix + '_stream.map')
        dem = pcr.readmap(map_path)
        height_river = pcr.ifthenelse(stream, pcr.ordinal(dem), 0)
        pcr.report(height_river, path_prefix + '_height_river.map')
        return

    if step == 'up_elevation':
        print("Computing up_elevation ...")

        height_river = pcr.readmap(path_prefix + '_height_river.map')
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
        pcr.report(up_elevation, path_prefix + '_up_elevation.map')
        return

    if step == 'hand':
        print("Computing HAND ...")
        dem = pcr.readmap(map_path)
        up_elevation = pcr.readmap(path_prefix + '_up_elevation.map')
        hand = pcr.max(dem - up_elevation, 0)
        pcr.report(hand, path_prefix + '_hand.map')
        return

    if step == 'dand':
        print("Computing DAND ...")
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        stream = pcr.readmap(path_prefix + '_stream.map')
        dist = pcr.ldddist(ldd, stream, 1)
        pcr.report(dist, path_prefix + '_dist.map')
        return

    if step == 'fa_river':
        print("Computing FA river ...")
        fa = pcr.readmap(path_prefix + '_fa.map')
        stream = pcr.readmap(path_prefix + '_stream.map')
        fa_river = pcr.ifthenelse(stream, pcr.ordinal(fa), 0)
        pcr.report(fa_river, path_prefix + '_fa_river.map')
        return

    if step == 'faand':
        print("Computing FAAND ...")
        fa_river = pcr.readmap(path_prefix + '_fa_river.map')
        ldd = pcr.readmap(path_prefix + '_ldd.map')
        up_fa = pcr.scalar(pcr.subcatchment(ldd, fa_river))
        pcr.report(up_fa, path_prefix + '_faand.map')
        return
Esempio n. 6
0
from rasterio.plot import show_hist

from rasterio.mask import mask

import pycrs

dem = 'D:/Chinmay/Hydro_modeling_erin_fall_2018/sp_model/dem.map'
output_path = 'D:/Chinmay/Hydro_modeling_erin_fall_2018/sp_model/'

pcr.setclone(dem)

slope = pcr.slope(dem)
pcr.report(slope, output_path + "gradient.map")

ldd = pcr.lddcreate(dem, 1e31, 1e31, 1e31, 1e31)
pcr.report(ldd, output_path + "ldd.map")

#############################################################################

#def downloadDEM(url):
#
#    """The function downloads the DEM from the given url
#    (In this case National Elevation Dataset, USGS.) """
#    # Translate url into a filename
#    filename = url.rsplit('/', 1)[-1]
#    if not os.path.exists(filename):
#        outfile = urllib.URLopener()
#        print "..............downloading dem.............."
#        dem = outfile.retrieve(url, filename)
#        print "..............done downloading.............."
Esempio n. 7
0
catchclip_map = workdir + "catchments_clip.map"
pcr.report(rivercatch, rivercatch_map)
pcr.report(catchments, catchclip_map)

ds = ogr.Open(workdir + "temp.shp")
lyr = ds.GetLayerByName("temp")

print 'calculating ldd'
for i in range(lyr.GetFeatureCount()):
    feature = lyr.GetFeature(i)
    catch = int(feature.GetField("ID"))
    print "calculating ldd for catchment: " + str(i + 1) + "/" + str(
        lyr.GetFeatureCount()) + "...."
    ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch,
                                          catchments)) * 0 + 1 * ldddem
    ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"),
                               float("1E35"), float("1E35"))
    ldd = pcr.cover(ldd, ldd_select)
pcr.report(ldd, resultdir + ldd_map)
ds.Destroy()
''' report stream order, river and dem '''
streamorder = pcr.ordinal(pcr.streamorder(ldd))
river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999)))
dem_resample_map = pcr.cover(dem_resample_map, pcr.scalar(river) * 0 + mindem)
pcr.report(dem_resample_map, resultdir + dem_map)
pcr.report(streamorder, resultdir + streamorder_map)
pcr.report(river, resultdir + river_map)
''' deal with your catchments '''
if gaugeshp == None:
    print 'No gauges defined, using outlets instead'
    gauges = pcr.ordinal(
Esempio n. 8
0
def main():
    clone_map = "mask\mask.map"
    clone_shp = "mask\mask.shp"
    clone_prj = "mask\mask.prj"
    workdir = "work\\"
    resultdir = "staticmaps\\"
    ''' read commandline arguments '''
    argv = sys.argv
    clone_EPSG = False

    try:
        opts, args = getopt.getopt(argv[1:], 'i:g:p:r:c:d:l:s:CA')
    except getopt.error:
        print 'fout'
        Usage()
        sys.exit(1)

    inifile = None
    rivshp = None
    catchshp = None
    dem_in = None
    landuse = None
    soiltype = None
    clean = False
    gaugeshp = None
    alltouching = False

    for o, a in opts:
        if o == '-i': inifile = a
        if o == '-p': clone_EPSG = 'EPSG:' + a
        if o == '-r': rivshp = a
        if o == '-c': catchshp = a
        if o == '-d': dem_in = a
        if o == '-l': landuse = a
        if o == '-s': soiltype = a
        if o == '-C': clean = True
        if o == '-g': gaugeshp = a
        if o == '-A': alltouching = True

    if inifile == None or rivshp == None or catchshp == None or dem_in == None:
        print 'the following files are compulsory:'
        print ' - ini-file'
        print ' - DEM (raster)'
        print ' - river (shape)'
        print ' - catchment (shape)'
        Usage()
        sys.exit(1)

    if landuse == None:
        print 'no raster with landuse classifications is specified. 1 class will be applied for the entire domain'

    if soiltype == None:
        print 'no raster with soil classifications is specified. 1 class will be applied for the entire domain'
    ''' read mask '''
    if not os.path.exists(clone_map):
        print 'Mask not found. Make sure the file mask\mask.map exists'
        print 'This file is usually created with the CreateGrid script'
        sys.exit(1)
    else:
        pcr.setclone(clone_map)
        ds = gdal.Open(clone_map, GA_ReadOnly)
        clone_trans = ds.GetGeoTransform()
        cellsize = clone_trans[1]
        clone_rows = ds.RasterYSize
        clone_columns = ds.RasterXSize
        extent_mask = [
            clone_trans[0], clone_trans[3] - ds.RasterYSize * cellsize,
            clone_trans[0] + ds.RasterXSize * cellsize, clone_trans[3]
        ]
        xmin, ymin, xmax, ymax = map(str, extent_mask)
        ds = None
        ones = pcr.scalar(pcr.readmap(clone_map))
        zeros = ones * 0
        empty = pcr.ifthen(ones == 0, pcr.scalar(0))
    ''' read projection from mask.shp '''
    # TODO: check how to deal with projections (add .prj to mask.shp in creategrid)
    if not os.path.exists(clone_prj):
        print 'please add prj-file to mask.shp'
        sys.exit(1)
    if os.path.exists(clone_shp):
        ds = ogr.Open(clone_shp)
        file_att = os.path.splitext(os.path.basename(clone_shp))[0]
        lyr = ds.GetLayerByName(file_att)
        spatialref = lyr.GetSpatialRef()
        if not spatialref == None:
            srs_clone = osr.SpatialReference()
            srs_clone.ImportFromWkt(spatialref.ExportToWkt())
            srs_clone.AutoIdentifyEPSG()
            unit_clone = False
            unit_clone = srs_clone.GetAttrValue('UNIT').lower()
            #clone_EPSG = 'EPSG:'+srs_clone.GetAttrValue("AUTHORITY",1)
            # TODO: fix hard EPSG code below
            clone_EPSG = 'EPSG:' + '4167'
            print 'EPSG-code is read from mask.shp: ' + clone_EPSG
            spatialref == None
    if not clone_EPSG:
        print 'EPSG-code cannot be read from mask.shp'
        print 'please add prj-file to mask.shp or specify on command line'
        print 'e.g. -p EPSG:4326 (for WGS84 lat lon projection)'

    ds = None
    clone_EPSG_int = int(clone_EPSG[5:len(clone_EPSG)])
    ''' open config-file '''
    config = wt.OpenConf(inifile)
    ''' read settings '''
    snapgaugestoriver = bool(
        int(wt.configget(config, "settings", "snapgaugestoriver", "1")))
    burnalltouching = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "1")))
    burninorder = bool(
        int(wt.configget(config, "settings", "burncatchalltouching", "0")))
    verticetollerance = float(
        wt.configget(config, "settings", "vertice_tollerance", "0.0001"))
    ''' read parameters '''
    burn_outlets = int(
        wt.configget(config, "parameters", "burn_outlets", 10000))
    burn_rivers = int(wt.configget(config, "parameters", "burn_rivers", 200))
    burn_connections = int(
        wt.configget(config, "parameters", "burn_connections", 100))
    burn_gauges = int(wt.configget(config, "parameters", "burn_gauges", 100))
    minorder = int(wt.configget(config, "parameters", "riverorder_min", 3))
    exec "percentile=tr.array(" + wt.configget(config, "parameters",
                                               "statisticmaps", [0, 100]) + ")"
    if not unit_clone:
        print 'failed to read unit (meter or degree) from mask projection'
        unit_clone = str(wt.configget(config, "settings", "unit", 'meter'))
        print 'unit read from settings: ' + unit_clone
    if unit_clone == 'degree':
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_degree", 0.0005))
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = float(
            wt.configget(config, "parameters", "highres_metre", 50))

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    ''' read staticmap locations '''
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")
    ''' read mask location (optional) '''
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)
    ''' create directories '''
    if os.path.isdir(workdir):
        shutil.rmtree(workdir)
    os.makedirs(workdir)

    if os.path.isdir(resultdir):
        shutil.rmtree(resultdir)
    os.makedirs(resultdir)
    ''' Preperation steps '''
    zero_map = workdir + "zero.map"
    zero_tif = workdir + "zero.tif"
    pcr.report(zeros, zero_map)
    # TODO: replace gdal_translate call
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, zero_tif))
    pcr.setglobaloption("lddin")
    ''' resample DEM '''
    dem_resample = workdir + "dem_resampled.tif"
    ds = gdal.Open(dem_in, GA_ReadOnly)
    band = ds.GetRasterBand(1)
    nodata = band.GetNoDataValue()
    proj = ds.GetGeoTransform()
    cellsize_dem = proj[1]
    ''' read DEM projection '''
    spatialref == None
    spatialref = ds.GetProjection()
    if not spatialref == None:
        srs = osr.SpatialReference()
        srs.ImportFromWkt(spatialref)
        srs.AutoIdentifyEPSG()
        dem_EPSG = 'EPSG:' + srs.GetAttrValue("AUTHORITY", 1)
        print 'EPSG-code is read from ' + os.path.basename(
            dem_in) + ': ' + dem_EPSG
        spatialref == None
        dem_EPSG_int = int(dem_EPSG[5:len(dem_EPSG)])
        srs_DEM = osr.SpatialReference()
        srs_DEM.ImportFromEPSG(dem_EPSG_int)
        clone2dem_transform = osr.CoordinateTransformation(srs_clone, srs_DEM)
    else:
        dem_EPSG = clone_EPSG
        print 'No projection defined for ' + os.path.basename(dem_in)
        print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    ds = None
    print 'Resampling DEM...'
    if nodata == None:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj, '-te', xmin, ymin,
              xmax, ymax, '-tr', str(cellsize), str(-cellsize), '-dstnodata',
              str(-9999), '-r', 'cubic', dem_in, dem_resample))
    else:
        call(('gdalwarp', '-overwrite', '-t_srs', clone_prj,
              '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-srcnodata', str(nodata), '-dstnodata',
              str(nodata), '-r', 'cubic', dem_in, dem_resample))
    ''' create dem.map and statistic maps '''
    dem_resample_map = resultdir + dem_map
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', dem_resample, dem_resample_map))
    print 'Computing DEM statistics ....'
    stats = wt.windowstats(dem_in, clone_rows, clone_columns, clone_trans,
                           srs_clone, resultdir, percentile)
    ''' burn DEM '''
    ds = ogr.Open(rivshp)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        rivshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    rivshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    # strip rivers to nodes
    xminc = str(float(xmin) + 0.5 * cellsize)
    yminc = str(float(ymin) + 0.5 * cellsize)
    xmaxc = str(float(xmax) - 0.5 * cellsize)
    ymaxc = str(float(ymax) - 0.5 * cellsize)
    if rivshp_EPSG == clone_EPSG:
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivshp))
    else:
        rivprojshp = workdir + 'rivshape_proj.shp'
        rivclipshp = workdir + 'rivshape_clip.shp'
        call(('ogr2ogr', '-s_srs', rivshp_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, rivprojshp, rivshp))
        call(('ogr2ogr', '-s_srs', clone_EPSG, '-t_srs', clone_EPSG, '-spat',
              xmin, ymin, xmax, ymax, '-clipsrc', xminc, yminc, xmaxc, ymaxc,
              rivclipshp, rivprojshp))

    rivshp = rivclipshp

    #### BURNING BELOW ####

    # TODO: check if extraction can be done within memory and retun a burn layer
    shapes = wt.Reach2Nodes(rivclipshp, clone_EPSG_int,
                            cellsize * verticetollerance, workdir)

    outlets = shapes[1]
    connections = shapes[2]
    outlets_att = os.path.splitext(os.path.basename(outlets))[0]
    connections_att = os.path.splitext(os.path.basename(connections))[0]
    dem_resample_att = os.path.splitext(os.path.basename(dem_resample))[0]
    connections_tif = workdir + connections_att + ".tif"
    outlets_tif = workdir + outlets_att + ".tif"
    # TODO: make the burning in memory
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, connections_tif))
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', outlets_att, outlets,
          outlets_tif))
    call(('gdal_rasterize', '-burn', '1', '-l', connections_att, connections,
          connections_tif))

    # convert rivers to order
    rivshp_att = os.path.splitext(os.path.basename(rivshp))[0]
    rivers_tif = workdir + rivshp_att + ".tif"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, '-ot',
          'Float32', zero_map, rivers_tif))
    if burninorder:  # make river shape with an order attribute
        OrderSHPs = wt.ReachOrder(rivshp, clone_EPSG_int,
                                  cellsize * verticetollerance, workdir)
        wt.Burn2Tif(OrderSHPs, 'order', rivers_tif)
    else:
        call(('gdal_rasterize', '-burn', '1', '-l', rivshp_att, rivshp,
              rivers_tif))

    # convert 2 maps
    connections_map = workdir + connections_att + ".map"
    rivers_map = workdir + rivshp_att + ".map"
    outlets_map = workdir + outlets_att + ".map"
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', connections_tif, connections_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', rivers_tif, rivers_map))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG, '-ot',
          'Float32', outlets_tif, outlets_map))

    # burn the layers in DEM
    outletsburn = pcr.scalar(
        pcr.readmap(outlets_map)) * pcr.scalar(burn_outlets)
    connectionsburn = pcr.scalar(
        pcr.readmap(connections_map)) * pcr.scalar(burn_connections)
    riverburn = pcr.scalar(pcr.readmap(rivers_map)) * pcr.scalar(burn_rivers)
    ldddem = pcr.cover(dem_resample_map,
                       pcr.ifthen(riverburn > 0, pcr.scalar(0)))
    ldddem = ldddem - outletsburn - connectionsburn - riverburn
    ldddem = pcr.cover(ldddem, pcr.scalar(0))
    pcr.report(ldddem, workdir + "dem_burn.map")
    ''' create ldd for multi-catchments '''
    ldd = pcr.ldd(empty)
    # reproject catchment shape-file
    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)
    spatialref = lyr.GetSpatialRef()
    #    if not spatialref == None:
    #        srs = osr.SpatialReference()
    #        srs.ImportFromWkt(spatialref.ExportToWkt())
    #        srs.AutoIdentifyEPSG()
    #        catchshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
    #        spatialref == None
    #    else:
    catchshp_EPSG = clone_EPSG
    print 'No projection defined for ' + file_att + '.shp'
    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'

    if not rivshp_EPSG == clone_EPSG:
        catchprojshp = workdir + 'catchshape_proj.shp'
        call(('ogr2ogr', '-s_srs', catchshp_EPSG, '-t_srs', clone_ESPG,
              catchprojshp, catchshp))
        catchshp = catchprojshp
    ds.Destroy()

    ds = ogr.Open(catchshp)
    file_att = os.path.splitext(os.path.basename(catchshp))[0]
    lyr = ds.GetLayerByName(file_att)

    fieldDef = ogr.FieldDefn("ID", ogr.OFTString)
    fieldDef.SetWidth(12)
    TEMP_out = Driver.CreateDataSource(workdir + "temp.shp")
    if not srs == None:
        TEMP_LYR = TEMP_out.CreateLayer("temp",
                                        srs,
                                        geom_type=ogr.wkbMultiPolygon)
    else:
        TEMP_LYR = TEMP_out.CreateLayer("temp", geom_type=ogr.wkbMultiPolygon)
    TEMP_LYR.CreateField(fieldDef)

    for i in range(lyr.GetFeatureCount()):
        orgfeature = lyr.GetFeature(i)
        geometry = orgfeature.geometry()
        feature = ogr.Feature(TEMP_LYR.GetLayerDefn())
        feature.SetGeometry(geometry)
        feature.SetField("ID", str(i + 1))
        TEMP_LYR.CreateFeature(feature)
    TEMP_out.Destroy()
    ds.Destroy

    # rasterize catchment map
    catchments_tif = workdir + "catchments.tif"
    catchments_map = workdir + "catchments.map"
    call(('gdal_translate', '-of', 'GTiff', '-a_srs', clone_EPSG, zero_map,
          catchments_tif))
    if alltouching:
        call(('gdal_rasterize', '-at', '-a', 'ID', '-l', "temp",
              workdir + 'temp.shp', catchments_tif))
    else:
        call(('gdal_rasterize', '-a', 'ID', '-l', "temp", workdir + 'temp.shp',
              catchments_tif))
    call(('gdal_translate', '-of', 'PCRaster', '-a_srs', clone_EPSG,
          catchments_tif, catchments_map))
    catchments = pcr.readmap(catchments_map)
    riverunique = pcr.clump(pcr.nominal(pcr.ifthen(riverburn > 0, riverburn)))
    rivercatch = pcr.areamajority(pcr.ordinal(catchments), riverunique)
    #catchments = pcr.cover(pcr.ordinal(rivercatch),pcr.ordinal(pcr.ifthen(catchments > 0, catchments)),pcr.ordinal(0))
    catchments = pcr.cover(
        pcr.ifthen(catchments > 0, pcr.ordinal(catchments)),
        pcr.ifthen(
            riverburn > 0,
            pcr.ordinal(
                pcr.spreadzone(pcr.nominal(catchments),
                               pcr.ifthen(riverburn > 0, pcr.scalar(1)), 1))))
    rivercatch_map = workdir + "catchments_river.map"
    catchclip_map = workdir + "catchments_clip.map"
    pcr.report(rivercatch, rivercatch_map)
    pcr.report(catchments, catchclip_map)

    ds = ogr.Open(workdir + "temp.shp")
    lyr = ds.GetLayerByName("temp")

    print 'calculating ldd'
    for i in range(lyr.GetFeatureCount()):
        feature = lyr.GetFeature(i)
        catch = int(feature.GetField("ID"))
        print "calculating ldd for catchment: " + str(i + 1) + "/" + str(
            lyr.GetFeatureCount()) + "...."
        ldddem_select = pcr.scalar(pcr.ifthen(catchments == catch,
                                              catchments)) * 0 + 1 * ldddem
        ldd_select = pcr.lddcreate(ldddem_select, float("1E35"), float("1E35"),
                                   float("1E35"), float("1E35"))
        ldd = pcr.cover(ldd, ldd_select)
    pcr.report(ldd, resultdir + ldd_map)
    ds.Destroy()
    ''' report stream order, river and dem '''
    streamorder = pcr.ordinal(pcr.streamorder(ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(dem_resample_map), 9999999)))
    dem_resample_map = pcr.cover(dem_resample_map,
                                 pcr.scalar(river) * 0 + mindem)
    pcr.report(dem_resample_map, resultdir + dem_map)
    pcr.report(streamorder, resultdir + streamorder_map)
    pcr.report(river, resultdir + river_map)
    ''' deal with your catchments '''
    if gaugeshp == None:
        print 'No gauges defined, using outlets instead'
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(pcr.ifthen(pcr.scalar(ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, resultdir + gauges_map)


#    ds = ogr.Open(gaugeshp)
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    lyr = ds.GetLayerByName(file_att)
#    spatialref = lyr.GetSpatialRef()
##    if not spatialref == None:
##        srs = osr.SpatialReference()
##        srs.ImportFromWkt(spatialref.ExportToWkt())
##        srs.AutoIdentifyEPSG()
##        gaugeshp_EPSG = 'EPSG:'+srs.GetAttrValue("AUTHORITY",1)
##        spatialref == None
#    #else:
#    gaugeshp_EPSG = clone_EPSG
#    print 'No projection defined for ' + file_att + '.shp'
#    print 'Assumed to be the same as model projection (' + clone_EPSG + ')'
#
#    # reproject gauge shape if necesarry
#    if not gaugeshp_EPSG == clone_EPSG:
#        gaugeprojshp = workdir + 'gaugeshape_proj.shp'
#        call(('ogr2ogr','-s_srs',rivshp_EPSG,'-t_srs',clone_ESPG,gaugeprojshp,gaugeshp))
#        gaugeshp = gaugeprojshp
#
#    file_att = os.path.splitext(os.path.basename(gaugeshp))[0]
#    gaugestif = workdir + file_att + '.tif'
#    gaugesmap = workdir + file_att + '.map'
#    call(('gdal_translate','-of','GTiff','-a_srs',clone_EPSG,zero_map,gaugestif))
#    call(('gdal_rasterize','-burn','1','-l',file_att,gaugeshp,gaugestif))
#    call(('gdal_translate','-of','PCRaster','-a_srs',clone_EPSG,gaugestif,gaugesmap))
#    gaugelocs = pcr.readmap(gaugesmap)
#    snapgaugestoriver = True
#
#    if snapgaugestoriver:
#        print "Snapping gauges to river"
#        gauges = pcr.uniqueid(pcr.boolean(gaugelocs))
#        gauges= wt.snaptomap(pcr.ordinal(gauges),river)
#
#    gaugesmap = pcr.ifthen(gauges > 0, gauges)
    ''' report riverlengthfrac '''
    riv_hr = workdir + 'river_highres.tif'
    wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs_clone, 0)
    file_att = os.path.splitext(os.path.basename(rivshp))[0]
    call(('gdal_rasterize', '-burn', '1', '-l', file_att, rivshp, riv_hr))
    print 'Computing river length...'
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wt.windowstats(riv_hr, clone_rows, clone_columns,
                                 clone_trans, srs_clone, resultdir, 'frac')
    ''' report outlet map '''
    pcr.report(pcr.ifthen(pcr.ordinal(ldd) == 5, pcr.ordinal(1)),
               resultdir + outlet_map)
    ''' report  map '''
    catchment = pcr.ifthen(catchments > 0, pcr.ordinal(1))
    pcr.report(catchment, resultdir + catchment_map)
    ''' report subcatchment map '''
    subcatchment = pcr.subcatchment(ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment), resultdir + subcatch_map)
    ''' report landuse map '''
    if landuse == None:
        pcr.report(pcr.nominal(ones), resultdir + landuse_map)
    else:
        landuse_resample = workdir + 'landuse.tif'
        landuse_map = resultdir + landuse_map
        transform = wt.GetRasterTranform(landuse, srs_clone)
        if not transform[0]:
            call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        else:
            call(('gdalwarp', '-overwrite', '-s_srs', transform[1], '-t_srs',
                  clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr',
                  str(cellsize), str(-cellsize), '-r', 'mode', landuse,
                  landuse_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              landuse_resample, landuse_map))
        landuse_work = pcr.readmap(landuse_map)
        pcr.report(pcr.nominal(landuse_work), landuse_map)
    ''' report soil map '''
    if soiltype == None:
        pcr.report(pcr.nominal(ones), resultdir + soil_map)
    else:
        soiltype_resample = workdir + 'soiltype.tif'
        soil_map = resultdir + soil_map
        #transform = wt.GetRasterTranform(soiltype,srs_clone)
        #        if not transform[0]:
        call(('gdalwarp', '-overwrite', '-s_srs', clone_EPSG, '-t_srs',
              clone_EPSG, '-te', xmin, ymin, xmax, ymax, '-tr', str(cellsize),
              str(-cellsize), '-r', 'mode', soiltype, soiltype_resample))
        #        else:
        #        call(('gdalwarp','-overwrite','-s_srs',transform[1],'-t_srs',clone_EPSG,'-te', xmin, ymin, xmax, ymax,'-tr',str(cellsize),str(-cellsize),'-r','mode',soiltype, soiltype_resample))
        call(('gdal_translate', '-of', 'PCRaster', '-ot', 'Float32',
              soiltype_resample, soil_map))
        soiltype_work = pcr.readmap(soil_map)
        pcr.report(pcr.nominal(soiltype_work), soil_map)

    if clean:
        wt.DeleteList(glob.glob(os.getcwd() + '\\' + resultdir + '/*.xml'))
def main(
        source,
        destination,
        inifile,
        dem_in,
        rivshp,
        catchshp,
        gaugeshp=None,
        landuse=None,
        soil=None,
        lai=None,
        other_maps=None,
        logfilename="wtools_static_maps.log",
        verbose=True,
        clean=True,
        alltouch=False,
        outlets=([], []),
):
    # parse other maps into an array
    if not other_maps == None:
        if type(other_maps) == str:
            print other_maps
            other_maps = (other_maps.replace(" ", "").replace("[", "").replace(
                "]", "").split(","))

    source = os.path.abspath(source)
    clone_map = os.path.join(source, "mask.map")
    clone_shp = os.path.join(source, "mask.shp")
    clone_prj = os.path.join(source, "mask.prj")

    if None in (rivshp, catchshp, dem_in):
        msg = """The following files are compulsory:
        - DEM (raster)
        - river (shape)
        - catchment (shape)
        """
        print(msg)
        parser.print_help()
        sys.exit(1)
    if (inifile is not None) and (not os.path.exists(inifile)):
        print "path to ini file cannot be found"
        sys.exit(1)
    if not os.path.exists(rivshp):
        print "path to river shape cannot be found"
        sys.exit(1)
    if not os.path.exists(catchshp):
        print "path to catchment shape cannot be found"
        sys.exit(1)
    if not os.path.exists(dem_in):
        print "path to DEM cannot be found"
        sys.exit(1)

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wt.setlogger(logfilename, "WTOOLS", verbose)

    # create directories # TODO: check if workdir is still necessary, try to
    # keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(destination), destination is not source):
        shutil.rmtree(destination)
    if destination is not source:
        os.makedirs(destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            "Clone file {:s} not found. Please run create_grid first.".format(
                clone_map))
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = wt.gdal_readmap(clone_map, "GTiff")
        trans = wt.get_geotransform(clone_map)
        extent = wt.get_extent(clone_map)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wt.get_projection(clone_map)
        unit_clone = srs.GetAttrValue("UNIT").lower()

    # READ CONFIG FILE
    # open config-file
    if inifile is None:
        config = ConfigParser.SafeConfigParser()
        config.optionxform = str
    else:
        config = wt.OpenConf(inifile)

    # read settings
    snapgaugestoriver = wt.configget(config,
                                     "settings",
                                     "snapgaugestoriver",
                                     True,
                                     datatype="boolean")
    burnalltouching = wt.configget(config,
                                   "settings",
                                   "burncatchalltouching",
                                   True,
                                   datatype="boolean")
    burninorder = wt.configget(config,
                               "settings",
                               "burncatchalltouching",
                               False,
                               datatype="boolean")
    verticetollerance = wt.configget(config,
                                     "settings",
                                     "vertice_tollerance",
                                     0.0001,
                                     datatype="float")
    """ read parameters """
    burn_outlets = wt.configget(config,
                                "parameters",
                                "burn_outlets",
                                10000,
                                datatype="int")
    burn_rivers = wt.configget(config,
                               "parameters",
                               "burn_rivers",
                               200,
                               datatype="int")
    burn_connections = wt.configget(config,
                                    "parameters",
                                    "burn_connections",
                                    100,
                                    datatype="int")
    burn_gauges = wt.configget(config,
                               "parameters",
                               "burn_gauges",
                               100,
                               datatype="int")
    minorder = wt.configget(config,
                            "parameters",
                            "riverorder_min",
                            3,
                            datatype="int")
    try:
        percentiles = np.array(
            config.get("parameters", "statisticmaps",
                       "0, 100").replace(" ", "").split(","),
            dtype="float",
        )
    except ConfigParser.NoOptionError:
        percentiles = [0.0, 100.0]
    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == "degree":
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_degree",
                                   0.0005,
                                   datatype="float")
    elif (unit_clone == "metre") or (unit_clone == "meter"):
        cellsize_hr = wt.configget(config,
                                   "parameters",
                                   "highres_metre",
                                   50,
                                   datatype="float")

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    clone_hr = os.path.join(destination, "clone_highres.tif")
    # make a highres clone as well!
    wt.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    catchment_map = wt.configget(config, "staticmaps", "catchment",
                                 "wflow_catchment.map")
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    demmax_map = wt.configget(config, "staticmaps", "demmax",
                              "wflow_demmax.map")
    demmin_map = wt.configget(config, "staticmaps", "demmin",
                              "wflow_demmin.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges",
                              "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse",
                               "wflow_landuse.map")
    ldd_map = wt.configget(config, "staticmaps", "ldd", "wflow_ldd.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet",
                              "wflow_outlet.map")
    riverlength_fact_map = wt.configget(config, "staticmaps",
                                        "riverlength_fact",
                                        "wflow_riverlength_fact.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(config, "staticmaps", "streamorder",
                                   "wflow_streamorder.map")
    subcatch_map = wt.configget(config, "staticmaps", "subcatch",
                                "wflow_subcatch.map")

    # read mask location (optional)
    masklayer = wt.configget(config, "mask", "masklayer", catchshp)

    # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0))

    # TODO: check if extents are correct this way
    # TODO: check what the role of missing values is in zeros and ones (l. 123
    # in old code)

    # first add a missing value to dem_in
    ds = gdal.Open(dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info("Resampling dem from {:s} to {:s}".format(
        os.path.abspath(dem_in), os.path.join(destination, dem_map)))
    wt.gdal_warp(
        dem_in,
        clone_map,
        os.path.join(destination, dem_map),
        format="PCRaster",
        gdal_interp=gdalconst.GRA_Average,
    )
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wt.get_projection(dem_in)
    except:
        logger.warning(
            "No projection found in DEM, assuming WGS 1984 lat long")
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    # if srs.ExportToProj4() == srs_dem.ExportToProj4():

    wt.windowstats(
        dem_in,
        len(yax),
        len(xax),
        trans,
        srs,
        destination,
        percentiles,
        transform=clone2dem_transform,
        logger=logger,
    )

    ## read catchment shape-file to create catchment map
    src = rasterio.open(clone_map)
    shapefile = fiona.open(catchshp, "r")
    catchment_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(catchment_shapes,
                               out_shape=src.shape,
                               all_touched=True,
                               transform=src.transform)
    catchment_domain = pcr.numpy2pcr(pcr.Ordinal, image.copy(), 0)

    ## read river shape-file and create burn layer
    shapefile = fiona.open(rivshp, "r")
    river_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(river_shapes,
                               out_shape=src.shape,
                               all_touched=False,
                               transform=src.transform)
    rivers = pcr.numpy2pcr(pcr.Nominal, image.copy(), 0)
    riverdem = pcr.scalar(rivers) * pcr.readmap(
        os.path.join(destination, dem_map))
    pcr.setglobaloption("lddin")
    riverldd = pcr.lddcreate(riverdem, 1e35, 1e35, 1e35, 1e35)

    riveroutlet = pcr.cover(
        pcr.ifthen(pcr.scalar(riverldd) == 5, pcr.scalar(1000)), 0)
    burn_layer = pcr.cover(
        (pcr.scalar(
            pcr.ifthen(
                pcr.streamorder(riverldd) > 1, pcr.streamorder(riverldd))) - 1)
        * 1000 + riveroutlet,
        0,
    )

    outlets_x, outlets_y = outlets
    n_outlets = len(outlets_x)
    logger.info("Number of outlets: {}".format(n_outlets))
    if n_outlets >= 1:
        outlets_map_numbered = tr.points_to_map(pcr.scalar(0), outlets_x,
                                                outlets_y, 0.5)
        outlets_map = pcr.boolean(outlets_map_numbered)
        # snap outlets to closest river (max 1 cell closer to river)
        outlets_map = pcr.boolean(
            pcr.cover(tr.snaptomap(pcr.ordinal(outlets_map), rivers), 0))

    ## create ldd per catchment
    logger.info("Calculating ldd")
    ldddem = pcr.scalar(clone_map)

    # per subcatchment, burn dem, then create modified dem that fits the ldd of the subcatchment
    # this ldd dem is merged over catchments, to create a global ldd that abides to the subcatchment boundaries
    for idx, shape in enumerate(catchment_shapes):
        logger.info("Computing ldd for catchment " + str(idx + 1) + "/" +
                    str(len(catchment_shapes)))
        image = features.rasterize([shape],
                                   out_shape=src.shape,
                                   all_touched=True,
                                   transform=src.transform)
        catchment = pcr.numpy2pcr(pcr.Scalar, image.copy(), 0)
        dem_burned_catchment = (
            pcr.readmap(os.path.join(destination, dem_map)) *
            pcr.scalar(catchment_domain) * catchment) - burn_layer
        # ldddem_catchment = pcr.lddcreatedem(
        #    dem_burned_catchment, 1e35, 1e35, 1e35, 1e35)
        ldddem = pcr.cover(ldddem, dem_burned_catchment)

    pcr.report(ldddem, os.path.join(destination, "ldddem.map"))

    wflow_ldd = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    if n_outlets >= 1:
        # set outlets to pit
        wflow_ldd = pcr.ifthenelse(outlets_map, pcr.ldd(5), wflow_ldd)
        wflow_ldd = pcr.lddrepair(wflow_ldd)

    pcr.report(wflow_ldd, os.path.join(destination, "wflow_ldd.map"))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(wflow_ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(destination, dem_map))
    pcr.report(streamorder, os.path.join(destination, streamorder_map))
    pcr.report(river, os.path.join(destination, river_map))

    # deal with your catchments
    if gaugeshp == None:
        logger.info("No gauges defined, using outlets instead")
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(
                    pcr.ifthen(pcr.scalar(wflow_ldd) == 5, pcr.boolean(1)))))
        pcr.report(gauges, os.path.join(destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # since the products here (river length fraction) are not yet used
    # this is disabled for now, as it also takes a lot of computation time
    if False:
        # report river length
        # make a high resolution empty map
        dem_hr_file = os.path.join(destination, "dem_highres.tif")
        burn_hr_file = os.path.join(destination, "burn_highres.tif")
        demburn_hr_file = os.path.join(destination, "demburn_highres.map")
        riv_hr_file = os.path.join(destination, "riv_highres.map")
        wt.gdal_warp(dem_in, clone_hr, dem_hr_file)
        # wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
        # open the shape layer
        ds = ogr.Open(rivshp)
        lyr = ds.GetLayer(0)
        wt.ogr_burn(
            lyr,
            clone_hr,
            -100,
            file_out=burn_hr_file,
            format="GTiff",
            gdal_type=gdal.GDT_Float32,
            fill_value=0,
        )
        # read dem and burn values and add
        xax_hr, yax_hr, burn_hr, fill = wt.gdal_readmap(burn_hr_file, "GTiff")
        burn_hr[burn_hr == fill] = 0
        xax_hr, yax_hr, dem_hr, fill = wt.gdal_readmap(dem_hr_file, "GTiff")
        dem_hr[dem_hr == fill] = np.nan
        demburn_hr = dem_hr + burn_hr
        demburn_hr[np.isnan(demburn_hr)] = -9999
        wt.gdal_writemap(demburn_hr_file, "PCRaster", xax_hr, yax_hr,
                         demburn_hr, -9999.)
        pcr.setclone(demburn_hr_file)
        demburn_hr = pcr.readmap(demburn_hr_file)

        logger.info("Calculating ldd to determine river length")
        ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
        pcr.report(ldd_hr, os.path.join(destination, "ldd_hr.map"))
        pcr.setglobaloption("unitcell")
        riv_hr = pcr.scalar(
            pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr)
        pcr.report(riv_hr, riv_hr_file)
        pcr.setglobaloption("unittrue")
        pcr.setclone(clone_map)
        logger.info("Computing river length")
        wt.windowstats(
            riv_hr_file,
            len(yax),
            len(xax),
            trans,
            srs,
            destination,
            stat="fact",
            transform=False,
            logger=logger,
        )
        # TODO: nothing happens with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(
        pcr.ifthen(pcr.ordinal(wflow_ldd) == 5, pcr.ordinal(1)),
        os.path.join(destination, outlet_map),
    )

    # report subcatchment map
    subcatchment = pcr.subcatchment(wflow_ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment),
               os.path.join(destination, subcatch_map))

    # Report land use map
    if landuse == None:
        logger.info(
            "No land use map used. Preparing {:s} with only ones.".format(
                os.path.join(destination, landuse_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, landuse_map))
    else:
        logger.info("Resampling land use from {:s} to {:s}".format(
            os.path.abspath(landuse),
            os.path.join(destination, os.path.abspath(landuse_map)),
        ))
        wt.gdal_warp(
            landuse,
            clone_map,
            os.path.join(destination, landuse_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    # report soil map
    if soil == None:
        logger.info("No soil map used. Preparing {:s} with only ones.".format(
            os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        logger.info("Resampling soil from {:s} to {:s}".format(
            os.path.abspath(soil),
            os.path.join(destination, os.path.abspath(soil_map)),
        ))
        wt.gdal_warp(
            soil,
            clone_map,
            os.path.join(destination, soil_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    if lai == None:
        logger.info(
            "No vegetation LAI maps used. Preparing default maps {:s} with only ones."
            .format(os.path.join(destination, soil_map)))
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        dest_lai = os.path.join(destination, "clim")
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(lai, "LAI00000.{:03d}".format(month + 1))
            lai_out = os.path.join(dest_lai,
                                   "LAI00000.{:03d}".format(month + 1))
            logger.info("Resampling vegetation LAI from {:s} to {:s}".format(
                os.path.abspath(lai_in), os.path.abspath(lai_out)))
            wt.gdal_warp(
                lai_in,
                clone_map,
                lai_out,
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Bilinear,
                gdal_type=gdalconst.GDT_Float32,
            )

    # report soil map
    if other_maps == None:
        logger.info("No other maps used. Skipping other maps.")
    else:
        logger.info("Resampling list of other maps...")
        for map_file in other_maps:
            map_name = os.path.split(map_file)[1]
            logger.info("Resampling a map from {:s} to {:s}".format(
                os.path.abspath(map_file),
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
            ))
            wt.gdal_warp(
                map_file,
                clone_map,
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Mode,
                gdal_type=gdalconst.GDT_Float32,
            )

    if clean:
        wt.DeleteList(glob.glob(os.path.join(destination, "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "clim", "*.xml")),
                      logger=logger)
        wt.DeleteList(glob.glob(os.path.join(destination, "*highres*")),
                      logger=logger)
Esempio n. 10
0
def main():

    ### Read input arguments #####
    logfilename = 'wtools_static_maps.log'
    parser = OptionParser()
    usage = "usage: %prog [options]"
    parser = OptionParser(usage=usage)
    parser.add_option('-q',
                      '--quiet',
                      dest='verbose',
                      default=True,
                      action='store_false',
                      help='do not print status messages to stdout')
    parser.add_option('-i',
                      '--ini',
                      dest='inifile',
                      default=None,
                      help='ini file with settings for static_maps.exe')
    parser.add_option('-s',
                      '--source',
                      dest='source',
                      default='wflow',
                      help='Source folder containing clone (default=./wflow)')
    parser.add_option('-d',
                      '--destination',
                      dest='destination',
                      default='staticmaps',
                      help='Destination folder (default=./staticmaps)')
    parser.add_option('-r',
                      '--river',
                      dest='rivshp',
                      default=None,
                      help='river network polyline layer (ESRI Shapefile)')
    parser.add_option('-c',
                      '--catchment',
                      dest='catchshp',
                      default=None,
                      help='catchment polygon layer (ESRI Shapefile)')
    parser.add_option('-g',
                      '--gauges',
                      dest='gaugeshp',
                      default=None,
                      help='gauge point layer (ESRI Shapefile)')
    parser.add_option('-D',
                      '--dem',
                      dest='dem_in',
                      default=None,
                      help='digital elevation model (GeoTiff)')
    parser.add_option('-L',
                      '--landuse',
                      dest='landuse',
                      default=None,
                      help='land use / land cover layer (GeoTiff)')
    parser.add_option('-S',
                      '--soiltype',
                      dest='soil',
                      default=None,
                      help='soil type layer (GeoTiff)')
    parser.add_option(
        '-V',
        '--vegetation',
        dest='lai',
        default=None,
        help=
        'vegetation LAI layer location (containing 12 GeoTiffs <LAI00000.XXX.tif>)'
    )
    parser.add_option(
        '-O',
        '--other_maps',
        dest='other_maps',
        default=None,
        help=
        'bracketed [] comma-separated list of paths to other maps that should be reprojected'
    )
    parser.add_option(
        '-C',
        '--clean',
        dest='clean',
        default=False,
        action='store_true',
        help='Clean the .xml files from static maps folder when finished')
    parser.add_option(
        '-A',
        '--alltouch',
        dest='alltouch',
        default=False,
        action='store_true',
        help=
        'option to burn catchments "all touching".\nUseful when catchment-size is small compared to cellsize'
    )
    (options, args) = parser.parse_args()
    # parse other maps into an array
    options.other_maps = options.other_maps.replace(' ', '').replace(
        '[', '').replace(']', '').split(',')

    options.source = os.path.abspath(options.source)
    clone_map = os.path.join(options.source, 'mask.map')
    clone_shp = os.path.join(options.source, 'mask.shp')
    clone_prj = os.path.join(options.source, 'mask.prj')

    if None in (options.inifile, options.rivshp, options.catchshp,
                options.dem_in):
        msg = """The following files are compulsory:
        - ini file
        - DEM (raster)
        - river (shape)
        - catchment (shape)
        """
        print(msg)
        parser.print_help()
        sys.exit(1)
    if not os.path.exists(options.inifile):
        print 'path to ini file cannot be found'
        sys.exit(1)
    if not os.path.exists(options.rivshp):
        print 'path to river shape cannot be found'
        sys.exit(1)
    if not os.path.exists(options.catchshp):
        print 'path to catchment shape cannot be found'
        sys.exit(1)
    if not os.path.exists(options.dem_in):
        print 'path to DEM cannot be found'
        sys.exit(1)

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wtools_lib.setlogger(logfilename, 'WTOOLS', options.verbose)

    # create directories # TODO: check if workdir is still necessary, try to keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(options.destination), options.destination
                      is not options.source):
        shutil.rmtree(options.destination)
    if options.destination is not options.source:
        os.makedirs(options.destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            'Clone file {:s} not found. Please run create_grid first.'.format(
                clone_map))
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = gis.gdal_readmap(clone_map, 'GTiff')
        trans = wtools_lib.get_geotransform(clone_map)
        extent = wtools_lib.get_extent(clone_map)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wtools_lib.get_projection(clone_map)
        unit_clone = srs.GetAttrValue('UNIT').lower()

    ### READ CONFIG FILE
    # open config-file
    config = wtools_lib.OpenConf(options.inifile)

    # read settings
    snapgaugestoriver = wtools_lib.configget(config,
                                             'settings',
                                             'snapgaugestoriver',
                                             True,
                                             datatype='boolean')
    burnalltouching = wtools_lib.configget(config,
                                           'settings',
                                           'burncatchalltouching',
                                           True,
                                           datatype='boolean')
    burninorder = wtools_lib.configget(config,
                                       'settings',
                                       'burncatchalltouching',
                                       False,
                                       datatype='boolean')
    verticetollerance = wtools_lib.configget(config,
                                             'settings',
                                             'vertice_tollerance',
                                             0.0001,
                                             datatype='float')
    ''' read parameters '''
    burn_outlets = wtools_lib.configget(config,
                                        'parameters',
                                        'burn_outlets',
                                        10000,
                                        datatype='int')
    burn_rivers = wtools_lib.configget(config,
                                       'parameters',
                                       'burn_rivers',
                                       200,
                                       datatype='int')
    burn_connections = wtools_lib.configget(config,
                                            'parameters',
                                            'burn_connections',
                                            100,
                                            datatype='int')
    burn_gauges = wtools_lib.configget(config,
                                       'parameters',
                                       'burn_gauges',
                                       100,
                                       datatype='int')
    minorder = wtools_lib.configget(config,
                                    'parameters',
                                    'riverorder_min',
                                    3,
                                    datatype='int')
    percentiles = np.array(config.get('parameters', 'statisticmaps',
                                      '0, 100').replace(' ', '').split(','),
                           dtype='float')

    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == 'degree':
        cellsize_hr = wtools_lib.configget(config,
                                           'parameters',
                                           'highres_degree',
                                           0.0005,
                                           datatype='float')
    elif (unit_clone == 'metre') or (unit_clone == 'meter'):
        cellsize_hr = wtools_lib.configget(config,
                                           'parameters',
                                           'highres_metre',
                                           50,
                                           datatype='float')

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0,
                -cellsize_hr)
    clone_hr = os.path.join(options.destination, 'clone_highres.tif')
    # make a highres clone as well!
    wtools_lib.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    catchment_map = wtools_lib.configget(config, 'staticmaps', 'catchment',
                                         'wflow_catchment.map')
    dem_map = wtools_lib.configget(config, 'staticmaps', 'dem',
                                   'wflow_dem.map')
    demmax_map = wtools_lib.configget(config, 'staticmaps', 'demmax',
                                      'wflow_demmax.map')
    demmin_map = wtools_lib.configget(config, 'staticmaps', 'demmin',
                                      'wflow_demmin.map')
    gauges_map = wtools_lib.configget(config, 'staticmaps', 'gauges',
                                      'wflow_gauges.map')
    landuse_map = wtools_lib.configget(config, 'staticmaps', 'landuse',
                                       'wflow_landuse.map')
    ldd_map = wtools_lib.configget(config, 'staticmaps', 'ldd',
                                   'wflow_ldd.map')
    river_map = wtools_lib.configget(config, 'staticmaps', 'river',
                                     'wflow_river.map')
    outlet_map = wtools_lib.configget(config, 'staticmaps', 'outlet',
                                      'wflow_outlet.map')
    riverlength_fact_map = wtools_lib.configget(config, 'staticmaps',
                                                'riverlength_fact',
                                                'wflow_riverlength_fact.map')
    soil_map = wtools_lib.configget(config, 'staticmaps', 'soil',
                                    'wflow_soil.map')
    streamorder_map = wtools_lib.configget(config, 'staticmaps', 'streamorder',
                                           'wflow_streamorder.map')
    subcatch_map = wtools_lib.configget(config, 'staticmaps', 'subcatch',
                                        'wflow_subcatch.map')

    # read mask location (optional)
    masklayer = wtools_lib.configget(config, 'mask', 'masklayer',
                                     options.catchshp)

    # ???? empty = pcr.ifthen(ones == 0, pcr.scalar(0))

    # TODO: check if extents are correct this way
    # TODO: check what the role of missing values is in zeros and ones (l. 123 in old code)

    # first add a missing value to dem_in
    ds = gdal.Open(options.dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info('Resampling dem from {:s} to {:s}'.format(
        os.path.abspath(options.dem_in),
        os.path.join(options.destination, dem_map)))
    gis.gdal_warp(options.dem_in,
                  clone_map,
                  os.path.join(options.destination, dem_map),
                  format='PCRaster',
                  gdal_interp=gdalconst.GRA_Average)
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wtools_lib.get_projection(options.dem_in)
    except:
        logger.warning(
            'No projection found in DEM, assuming WGS 1984 lat long')
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    #if srs.ExportToProj4() == srs_dem.ExportToProj4():
    for percentile in percentiles:
        if percentile >= 100:
            logger.info('computing window maximum')
            percentile_dem = os.path.join(options.destination,
                                          'wflow_dem_max.map')
        elif percentile <= 0:
            logger.info('computing window minimum')
            percentile_dem = os.path.join(options.destination,
                                          'wflow_dem_min.map')
        else:
            logger.info('computing window {:d} percentile'.format(
                int(percentile)))
            percentile_dem = os.path.join(
                options.destination,
                'wflow_dem_{:03d}.map'.format(int(percentile)))

        percentile_dem = os.path.join(
            options.destination,
            'wflow_dem_{:03d}.map'.format(int(percentile)))
        stats = wtools_lib.windowstats(options.dem_in,
                                       len(yax),
                                       len(xax),
                                       trans,
                                       srs,
                                       percentile_dem,
                                       percentile,
                                       transform=clone2dem_transform,
                                       logger=logger)


#    else:
#        logger.warning('Projections of DEM and clone are different. DEM statistics for different projections is not yet implemented')
    """

    # burn in rivers
    # first convert and clip the river shapefile
    # retrieve river shape projection, if not available assume EPSG:4326
    file_att = os.path.splitext(os.path.basename(options.rivshp))[0]
    ds = ogr.Open(options.rivshp)
    lyr = ds.GetLayerByName(file_att)
    extent = lyr.GetExtent()
    extent_in = [extent[0], extent[2], extent[1], extent[3]]
    try:
        # get spatial reference from shapefile
        srs_rivshp = lyr.GetSpatialRef()
        logger.info('Projection in river shapefile is {:s}'.format(srs_rivshp.ExportToProj4()))
    except:
        logger.warning('No projection found in {:s}, assuming WGS 1984 lat-lon'.format(options.rivshp))
        srs_rivshp = osr.SpatialReference()
        srs_rivshp.ImportFromEPSG(4326)
    rivprojshp = os.path.join(options.destination, 'rivshp_proj.shp')
    logger.info('Projecting and clipping {:s} to {:s}'.format(options.rivshp, rivprojshp))
    # TODO: Line below takes a very long time to process, the bigger the shapefile, the more time. How do we deal with this?
    call(('ogr2ogr','-s_srs', srs_rivshp.ExportToProj4(),'-t_srs', srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin), '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax), rivprojshp, options.rivshp))
    """

    # TODO: BURNING!!

    # project catchment layer to projection of clone
    file_att = os.path.splitext(os.path.basename(options.catchshp))[0]
    print options.catchshp
    ds = ogr.Open(options.catchshp)
    lyr = ds.GetLayerByName(file_att)
    extent = lyr.GetExtent()
    extent_in = [extent[0], extent[2], extent[1], extent[3]]
    try:
        # get spatial reference from shapefile
        srs_catchshp = lyr.GetSpatialRef()
        logger.info('Projection in catchment shapefile is {:s}'.format(
            srs_catchshp.ExportToProj4()))
    except:
        logger.warning(
            'No projection found in {:s}, assuming WGS 1984 lat-lon'.format(
                options.catchshp))
        srs_catchshp = osr.SpatialReference()
        srs_catchshp.ImportFromEPSG(4326)
    catchprojshp = os.path.join(options.destination, 'catchshp_proj.shp')
    logger.info('Projecting {:s} to {:s}'.format(options.catchshp,
                                                 catchprojshp))
    call(('ogr2ogr', '-s_srs', srs_catchshp.ExportToProj4(), '-t_srs',
          srs.ExportToProj4(), '-clipsrc', '{:f}'.format(xmin),
          '{:f}'.format(ymin), '{:f}'.format(xmax), '{:f}'.format(ymax),
          catchprojshp, options.catchshp))

    #
    logger.info('Calculating ldd')
    ldddem = pcr.readmap(os.path.join(options.destination, dem_map))
    ldd_select = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    pcr.report(ldd_select, os.path.join(options.destination, 'wflow_ldd.map'))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(ldd_select))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(options.destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(options.destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(options.destination, dem_map))
    pcr.report(streamorder, os.path.join(options.destination, streamorder_map))
    pcr.report(river, os.path.join(options.destination, river_map))

    # deal with your catchments
    if options.gaugeshp == None:
        logger.info('No gauges defined, using outlets instead')
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(
                    pcr.ifthen(pcr.scalar(ldd_select) == 5, pcr.boolean(1)))))
        pcr.report(gauges, os.path.join(options.destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # report river length
    # make a high resolution empty map
    dem_hr_file = os.path.join(options.destination, 'dem_highres.tif')
    burn_hr_file = os.path.join(options.destination, 'burn_highres.tif')
    demburn_hr_file = os.path.join(options.destination, 'demburn_highres.map')
    riv_hr_file = os.path.join(options.destination, 'riv_highres.map')
    gis.gdal_warp(options.dem_in, clone_hr, dem_hr_file)
    # wtools_lib.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
    file_att = os.path.splitext(os.path.basename(options.rivshp))[0]
    # open the shape layer
    ds = ogr.Open(options.rivshp)
    lyr = ds.GetLayerByName(file_att)
    gis.ogr_burn(lyr,
                 clone_hr,
                 -100,
                 file_out=burn_hr_file,
                 format='GTiff',
                 gdal_type=gdal.GDT_Float32,
                 fill_value=0)
    # read dem and burn values and add
    xax_hr, yax_hr, burn_hr, fill = gis.gdal_readmap(burn_hr_file, 'GTiff')
    burn_hr[burn_hr == fill] = 0
    xax_hr, yax_hr, dem_hr, fill = gis.gdal_readmap(dem_hr_file, 'GTiff')
    dem_hr[dem_hr == fill] = np.nan
    demburn_hr = dem_hr + burn_hr
    demburn_hr[np.isnan(demburn_hr)] = -9999
    gis.gdal_writemap(demburn_hr_file, 'PCRaster', xax_hr, yax_hr, demburn_hr,
                      -9999.)
    pcr.setclone(demburn_hr_file)
    demburn_hr = pcr.readmap(demburn_hr_file)
    ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
    pcr.report(ldd_hr, os.path.join(options.destination, 'ldd_hr.map'))
    pcr.setglobaloption('unitcell')
    riv_hr = pcr.scalar(
        pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(ldd_hr)
    pcr.report(riv_hr, riv_hr_file)
    pcr.setglobaloption('unittrue')
    pcr.setclone(clone_map)
    logger.info('Computing river length')
    #riverlength = wt.windowstats(riv_hr,clone_rows,clone_columns,clone_trans,srs_clone,resultdir,'frac',clone2dem_transform)
    riverlength = wtools_lib.windowstats(riv_hr_file,
                                         len(yax),
                                         len(xax),
                                         trans,
                                         srs,
                                         os.path.join(options.destination,
                                                      riverlength_fact_map),
                                         stat='fact',
                                         logger=logger)
    # TODO: nothing happends with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(pcr.ifthen(pcr.ordinal(ldd_select) == 5, pcr.ordinal(1)),
               os.path.join(options.destination, outlet_map))

    # report subcatchment map
    subcatchment = pcr.subcatchment(ldd_select, gauges)
    pcr.report(pcr.ordinal(subcatchment),
               os.path.join(options.destination, subcatch_map))

    # Report land use map
    if options.landuse == None:
        logger.info(
            'No land use map used. Preparing {:s} with only ones.'.format(
                os.path.join(options.destination, landuse_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, landuse_map))
    else:
        logger.info('Resampling land use from {:s} to {:s}'.format(
            os.path.abspath(options.landuse),
            os.path.join(options.destination, os.path.abspath(landuse_map))))
        gis.gdal_warp(options.landuse,
                      clone_map,
                      os.path.join(options.destination, landuse_map),
                      format='PCRaster',
                      gdal_interp=gdalconst.GRA_Mode,
                      gdal_type=gdalconst.GDT_Int32)

    # report soil map
    if options.soil == None:
        logger.info('No soil map used. Preparing {:s} with only ones.'.format(
            os.path.join(options.destination, soil_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, soil_map))
    else:
        logger.info('Resampling soil from {:s} to {:s}'.format(
            os.path.abspath(options.soil),
            os.path.join(options.destination, os.path.abspath(soil_map))))
        gis.gdal_warp(options.soil,
                      clone_map,
                      os.path.join(options.destination, soil_map),
                      format='PCRaster',
                      gdal_interp=gdalconst.GRA_Mode,
                      gdal_type=gdalconst.GDT_Int32)

    if options.lai == None:
        logger.info(
            'No vegetation LAI maps used. Preparing default maps {:s} with only ones.'
            .format(os.path.join(options.destination, soil_map)))
        pcr.report(pcr.nominal(ones),
                   os.path.join(options.destination, soil_map))
    else:
        dest_lai = os.path.join(options.destination, 'clim')
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(options.lai,
                                  'LAI00000.{:03d}'.format(month + 1))
            lai_out = os.path.join(dest_lai,
                                   'LAI00000.{:03d}'.format(month + 1))
            logger.info('Resampling vegetation LAI from {:s} to {:s}'.format(
                os.path.abspath(lai_in), os.path.abspath(lai_out)))
            gis.gdal_warp(lai_in,
                          clone_map,
                          lai_out,
                          format='PCRaster',
                          gdal_interp=gdalconst.GRA_Bilinear,
                          gdal_type=gdalconst.GDT_Float32)

    # report soil map
    if options.other_maps == None:
        logger.info('No other maps used. Skipping other maps.')
    else:
        logger.info('Resampling list of other maps...')
        for map_file in options.other_maps:
            map_name = os.path.split(map_file)[1]
            logger.info('Resampling a map from {:s} to {:s}'.format(
                os.path.abspath(map_file),
                os.path.join(options.destination, map_name)))
            gis.gdal_warp(map_file,
                          clone_map,
                          os.path.join(options.destination, map_name),
                          format='PCRaster',
                          gdal_interp=gdalconst.GRA_Mode,
                          gdal_type=gdalconst.GDT_Float32)

    if options.clean:
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, '*.xml')),
                              logger=logger)
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, 'clim', '*.xml')),
                              logger=logger)
        wtools_lib.DeleteList(glob.glob(
            os.path.join(options.destination, '*highres*')),
                              logger=logger)
def generate_hydro_datasets(path, output_dir, step):
    print(path)

    file_name = os.path.splitext(os.path.basename(path))[0]
    map_path = output_dir + "/" + file_name + ".map"
    path_prefix = map_path[:-14]

    if step == "ldd":
        cmd = u"gdal_translate -a_nodata -9999 -of PCRaster -ot Float32 " + path + " " + map_path
        print(cmd)
        subprocess.call(cmd, shell=True)

    # slope = pcr.slope(dem)
    # pcr.report(slope, path_prefix + '_slope.map')

    # pcr.setglobaloption("lddin")

    if step == "ldd":
        dem = pcr.readmap(map_path)

        print("Computing LDD ...")
        # enable pit filling
        ldd = pcr.lddcreate(dem, 9999999, 9999999, 9999999, 9999999)
        pcr.report(ldd, path_prefix + "_ldd.map")

        return
    elif step == "ldddem":
        dem = pcr.readmap(map_path)

        print("Computing LDD DEM ...")
        dem_pitfilled = pcr.lddcreatedem(dem, 9999999, 9999999, 9999999, 9999999)
        dem_diff = dem_pitfilled - dem
        pcr.report(dem_diff, path_prefix + "_dem_pits_diff.map")

        return

    # print("Computing LDD without pit filling ...")
    # ldd_pits = pcr.lddcreate(dem, 0, 0, 0, 0)
    # pcr.report(ldd_pits, path_prefix + '_ldd_with_pits.map')

    # print("Computing pits ...")
    # pits = pcr.pit(ldd_pits)

    # pcr.report(pits, path_prefix + '_pits.map')

    if step == "fa":
        ldd = pcr.readmap(path_prefix + "_ldd.map")

        print("Computing flow accumulation ...")
        fa = pcr.accuflux(ldd, 1)
        pcr.report(fa, path_prefix + "_fa.map")

        return

    if step == "catchments":
        ldd = pcr.readmap(path_prefix + "_ldd.map")

        print("Delineating catchments ...")
        catchments = pcr.catchment(ldd, pcr.pit(ldd))
        pcr.report(catchments, path_prefix + "_catchments.map")

        return

    if step == "stream_order":
        ldd = pcr.readmap(path_prefix + "_ldd.map")

        print("Computing stream order ...")
        stream_order = pcr.streamorder(ldd)
        pcr.report(stream_order, path_prefix + "_streamorder.map")

        return

    if step == "stream":
        ldd = pcr.readmap(path_prefix + "_ldd.map")
        accuThreshold = 100
        print("Computing stream ...")
        stream = pcr.ifthenelse(pcr.accuflux(ldd, 1) >= accuThreshold, pcr.boolean(1), pcr.boolean(0))
        pcr.report(stream, path_prefix + "_stream.map")
        return

    if step == "height_river":
        print("Computing heigh_river ...")

        stream = pcr.readmap(path_prefix + "_stream.map")
        dem = pcr.readmap(map_path)
        height_river = pcr.ifthenelse(stream, pcr.ordinal(dem), 0)
        pcr.report(height_river, path_prefix + "_height_river.map")
        return

    if step == "up_elevation":
        print("Computing up_elevation ...")

        height_river = pcr.readmap(path_prefix + "_height_river.map")
        ldd = pcr.readmap(path_prefix + "_ldd.map")
        up_elevation = pcr.scalar(pcr.subcatchment(ldd, height_river))
        pcr.report(up_elevation, path_prefix + "_up_elevation.map")
        return

    if step == "hand":
        print("Computing HAND ...")
        dem = pcr.readmap(map_path)
        up_elevation = pcr.readmap(path_prefix + "_up_elevation.map")
        hand = pcr.max(dem - up_elevation, 0)
        pcr.report(hand, path_prefix + "_hand.map")
        return

    if step == "dand":
        print("Computing DAND ...")
        ldd = pcr.readmap(path_prefix + "_ldd.map")
        stream = pcr.readmap(path_prefix + "_stream.map")
        dist = pcr.ldddist(ldd, stream, 1)
        pcr.report(dist, path_prefix + "_dist.map")
        return

    if step == "fa_river":
        print("Computing FA river ...")
        fa = pcr.readmap(path_prefix + "_fa.map")
        stream = pcr.readmap(path_prefix + "_stream.map")
        fa_river = pcr.ifthenelse(stream, pcr.ordinal(fa), 0)
        pcr.report(fa_river, path_prefix + "_fa_river.map")
        return

    if step == "faand":
        print("Computing FAAND ...")
        fa_river = pcr.readmap(path_prefix + "_fa_river.map")
        ldd = pcr.readmap(path_prefix + "_ldd.map")
        up_fa = pcr.scalar(pcr.subcatchment(ldd, fa_river))
        pcr.report(up_fa, path_prefix + "_faand.map")
        return