Exemple #1
0
def run_simulation(reccsv, dirtif, widthtif, bedtif, runoffcsv, date1, date2, lisfloodfp, outfolder):

    # Determine end of the simulation, how many days
    t = (pd.to_datetime(date2, format='%Y-%m-%d') -
         pd.to_datetime(date1, format='%Y-%m-%d')).days + 1

    # Create 1D DEM, synthetic
    demtif = outfolder + 'dem1d.tif'
    wdt = gu.get_data(widthtif)
    geo = gu.get_geo(widthtif)
    dem = np.where(wdt > 0, 10000, 0)
    gu.write_raster(dem, demtif, geo, 'Int16', 0)

    # Convert input files to ASCII
    widthasc = outfolder + 'width.asc'
    call(['gdal_translate',
          '-of', 'AAIGRID',
          widthtif, widthasc])

    demasc = outfolder + 'dem.asc'
    call(['gdal_translate',
          '-of', 'AAIGRID',
          demtif, demasc])

    bedasc = outfolder + 'bed.asc'
    call(['gdal_translate',
          '-of', 'AAIGRID',
          bedtif, bedasc])

    # Write LISFLOOD-FP files
    bcilfp = outfolder + 'lfp.bci'
    write_bci(bcilfp, runoffcsv)

    bdylfp = outfolder + 'lfp.bdy'
    write_bdy(bdylfp, runoffcsv, t)

    evaplfp = outfolder + 'lfp.evap'
    write_evap(evaplfp, t)

    gaugelfp = outfolder + 'lfp.gauge'
    stagelfp = outfolder + 'lfp.stage'
    write_gauge_stage_all_cells(reccsv, dirtif, widthtif, gaugelfp, stagelfp)

    parlfp = outfolder + 'lfp.par'
    write_par(parlfp=parlfp,
              bcilfp=bcilfp,
              bdylfp=bdylfp,
              evaplfp=evaplfp,
              gaugelfp=gaugelfp,
              stagelfp=stagelfp,
              dembnktif=demasc,
              wdttif=widthasc,
              bedtif=bedasc,
              t=t)

    # Run simulation
    call([lisfloodfp, '-v', 'lfp.par'], cwd=outfolder)
Exemple #2
0
def create_dir_d4(dirtaud4, dirtaud8, dirtau_maskd4):

    dat1 = gdalutils.get_data(dirtaud8)
    dat2 = gdalutils.get_data(dirtau_maskd4)
    geo = gdalutils.get_geo(dirtaud8)

    A = np.where(dat2 > 0)
    dat1[A] = dat2[A]

    gdalutils.write_raster(dat1, dirtaud4, geo, "Int16", -32768)
Exemple #3
0
def burn_banks_dem(dembnktif, demtif, fixbnktif):

    print("     burning banks in dem...")

    nodata = -9999
    fout = dembnktif
    base = gdalutils.get_data(demtif)
    basegeo = gdalutils.get_geo(demtif)
    new = gdalutils.get_data(fixbnktif)
    out = np.where(new > 0, new, base)
    gdalutils.write_raster(out, fout, basegeo, "Float32", nodata)
Exemple #4
0
def step_04():

    A = gu.get_data('delta_surf_interp.tif')
    B = gu.get_data(fill_demf)
    C = gu.get_data(void_demf)
    geo = gu.get_geo(void_demf)
    mysum = A+B
    final = np.where(C==nodata,mysum,C)
    
    final[(final>=8000) | (final<=-8000)] = nodata
    gu.write_raster(final,'dem.tif',geo,'Float64',nodata)
Exemple #5
0
def calculate_area(filename, output):

    geo = gdalutils.get_geo(filename)
    nx = np.int32(geo[4])
    ny = np.int32(geo[5])
    resx = np.float32(geo[6])
    resy = np.float32(geo[7])
    x = np.float32(geo[8])
    y = np.float32(geo[9])
    dat = calc_area(nx, ny, resx, resy, x, y)
    gdalutils.write_raster(np.array(dat), output, geo, "Float32", -9999)
Exemple #6
0
def d82d4(filedir, filediro, fileneto):
    """
    Returns direction and river netwrok maps in D4
    """

    nodata = -32768.
    dirdata = gdalutils.get_data(filedir)
    dirgeo = gdalutils.get_geo(filedir)
    data, net = cy_d82d4(np.int16(dirdata), np.int16(nodata))
    gdalutils.write_raster(np.int16(data), filediro, dirgeo, "Int16", nodata)
    gdalutils.write_raster(np.int16(net), fileneto, dirgeo, "Int16", nodata)
Exemple #7
0
def rastermask(file, mask, fmt, outp):
    """
    Mask input array following a defined mask (1,0)
    """

    nodata = -32768
    filedata = gdalutils.get_data(file)
    maskdata = gdalutils.get_data(mask)
    filegeo = gdalutils.get_geo(file)
    data = cy_rastermask(np.float64(filedata), np.int16(maskdata))
    gdalutils.write_raster(np.float64(data), outp, filegeo, fmt, nodata)
Exemple #8
0
def rasterthreshold(file, thres, fmt, outp):
    """
    Output a raster based on a threshold (larger-equal-than)
    """

    nodata = -1
    filedata = gdalutils.get_data(file)
    filegeo = gdalutils.get_geo(file)
    data = cy_rasterthreshold(np.float64(filedata), np.float64(thres),
                              np.float64(nodata))
    gdalutils.write_raster(np.float64(data), outp, filegeo, fmt, nodata)
Exemple #9
0
def step_01():

    geo        = gu.get_geo(void_demf)
    void_dem   = gu.get_data(void_demf)
    fill_dem   = gu.get_data(fill_demf)
    delta_surf = void_dem - fill_dem

    delta_surf[(delta_surf>=8000) | (delta_surf<=-8000)] = nodata
    delta_surf[delta_surf==0] = nodata

    gu.write_raster(delta_surf,'delta_surf_wt_voids.tif',geo,'Float64',nodata)
Exemple #10
0
def burn_banks_dem_1D(dembnktif, demtif, fixbnktif):

    print("     burning banks in dem 1D...")

    nodata = -9999
    fout = dembnktif
    base = gdalutils.get_data(demtif)
    basegeo = gdalutils.get_geo(demtif)
    new = (np.ma.masked_values(gdalutils.get_data(fixbnktif), nodata) +
           10000).filled(nodata)
    out = np.where(new > 0, new, base)
    gdalutils.write_raster(out, fout, basegeo, "Float32", nodata)
Exemple #11
0
def directions_esri(inputrast, outputrast):
    """
    Function to change convetion from a DIR file
    This script changes these numbers from TauDEM convention, 1,2,3...
	to ESRI convention 128,64,32,.. 
    """

    nodata = -32768
    data = gdalutils.get_data(inputrast)
    datageo = gdalutils.get_geo(inputrast)
    data_esri = cy_directions_esri(np.int16(data), np.int16(nodata))
    gdalutils.write_raster(np.int16(data_esri), outputrast, datageo, "Int16",
                           nodata)
Exemple #12
0
def directions_tau(inputrast, outputrast):
    """
    Function to use in Shell to change convetion from a DIR file
    HydroSHEDS uses ESRI convention 128,64,32,.. this script
    changes these numbers to TauDEM convention, 1,2,3...
    """

    nodata = -32768
    data = gdalutils.get_data(inputrast)
    datageo = gdalutils.get_geo(inputrast)
    datatau = cy_directions_tau(np.int16(data), np.int16(nodata))
    gdalutils.write_raster(np.float64(datatau), outputrast, datageo, "Int16",
                           nodata)
Exemple #13
0
def rasterresample(method, demf, netf, output, outlier, hrnodata, thresh,
                   nproc):

    print("    running rasterresample.py...")

    fname1 = demf
    fname2 = output

    # coordinates for bank elevations are based in river network mask
    net = gdalutils.get_data(netf)
    geo = gdalutils.get_geo(netf)

    # consider all pixels in net30 including river network pixels
    iy, ix = np.where(net > -1)
    x = geo[8][ix]
    y = geo[9][iy]

    # Split x and y in nproc parts
    split_x = np.array_split(x, nproc)
    split_y = np.array_split(y, nproc)

    # Define a queue
    queue = mp.Queue()

    # Setup a list of processes that we want to run
    processes = []
    processes = [
        mp.Process(target=calc_resampling_mp,
                   args=(i, queue, fname1, hrnodata, split_x[i], split_y[i],
                         thresh, outlier, method)) for i in range(len(split_x))
    ]

    # Run processes
    for p in processes:
        p.start()

    # Get process results from the queue
    results = [queue.get() for p in processes]

    # Retrieve results in a particular order
    results.sort()
    results = [r[1] for r in results]

    # Stack results horizontally
    elev = np.hstack(results).reshape(net.shape)

    # Replace NaN by hrnodata
    elev[np.isnan(elev)] = hrnodata

    # elev = calc_resampling(fname1,hrnodata,x,y,ix,iy,thresh,outlier,method)
    gdalutils.write_raster(elev, fname2, geo, "Float32", hrnodata)
Exemple #14
0
def multiply_rasters(rast1, rast2, out):

    dat1 = gdalutils.get_data(rast1)
    dat2 = gdalutils.get_data(rast2)
    geo1 = gdalutils.get_geo(rast1)
    geo2 = gdalutils.get_geo(rast2)

    dat_masked1 = np.ma.masked_where(dat1 == geo1[11], dat1)
    dat_masked2 = np.ma.masked_where(dat2 == geo2[11], dat2)

    res = dat_masked1 * dat_masked2
    res.set_fill_value(-9999)

    gdalutils.write_raster(res.filled(), out, geo1, "Float32", -9999)
Exemple #15
0
def extract_from_zip(zipresults, date, date1, date2, var, proj4, outfile):
    """
    Extract variable per date then convert to GTIFF
    
    Example
    -------
    import lfptools.utils as lfp
    proj4 = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
    lfp.extract_from_zip('./176.zip','2002-08-20','1990-01-01','2014-12-31','wd',proj4,'./tmp.tif')

    """

    tmpdir = os.path.dirname(outfile) + '/tmp/'
    try:
        os.mkdir(tmpdir)
    except FileExistsError:
        pass

    # Open zip
    myzip = zipfile.ZipFile(zipresults)

    # Get list of files in zip
    mylist = sorted(myzip.namelist())
    myvar = [i for i in mylist if i.endswith('.' + var)]

    # Simulation times
    dates = pd.date_range(date1, date2)

    # Retrieve filenames based on dates
    ix = np.where(dates == date)[0][0]

    # Extract ASCII file
    myzip.extract(myvar[ix], tmpdir)

    # Get info from ASCII
    fname = tmpdir + myvar[ix]
    dat = gdalutils.get_data(fname)
    geo = gdalutils.get_geo(fname)
    geo[10] = _return_projection(proj4)
    gdalutils.write_raster(dat, outfile, geo, 'Float64', geo[-1])

    # Remove temp folder
    shutil.rmtree(tmpdir)
Exemple #16
0
    geo[5] = int(np.ceil(geo[5] / nwindow))
    # modify resolution: multiply by nwindow
    geo[6] = geo[6] * nwindow
    geo[7] = geo[7] * nwindow

    #########################################################################################
    # Downsample dem array
    if not os.path.exists(dem_downsample):
        data = gdalutils.get_data(demtif)
        print('inshape', data.shape)
        downsample_dem = block_reduce(data,
                                      block_size=(nwindow, nwindow),
                                      func=np.mean,
                                      cval=-9999)
        print('downsampled dem', downsample_dem.shape)
        gdalutils.write_raster(downsample_dem, dem_downsample, geo, 'Float32',
                               -9999)

    #########################################################################################
    # Downsample ord and acc arrays (for calculation of directions)
    #
    if not os.path.exists(ord_downsample) or not os.path.exists(
            acc_downsample):
        data = gdalutils.get_data(ordtif)
        downsample = block_reduce(data,
                                  block_size=(nwindow, nwindow),
                                  func=np.max,
                                  cval=-32767)
        downsample_count = block_reduce(data,
                                        block_size=(nwindow, nwindow),
                                        func=count,
                                        cval=-32767)
Exemple #17
0
def basinsplit(ncatch, outdir, cattif, demtif, acctif, nettif, wthtif, dirtif,
               aretif, ordtif, tretxt, cootxt):

    # Get extend for every catchment and area
    catarr = gdalutils.get_data(cattif)

    try:
        dat = catarr == ncatch
    except:
        sys.exit('ERROR invalid basin number')

    # Use gdal to mask out basin in network and direction tifs
    nettmp = 'net_tmp.tif'
    dirtmp = 'dir_tmp.tif'
    acctmp = 'acc_tmp.tif'
    ordtmp = 'ord_tmp.tif'
    cmd = [
        'gdal_calc.py', '--calc', 'where(B==' + str(ncatch) + ',A,0)',
        '--format', 'GTiff', '--type', 'Int16', '--NoDataValue', '-9999', '-B',
        cattif, '--B_band', '1', '-A', nettif, '--A_band', '1', '--co',
        'COMPRESS=DEFLATE', '--outfile', nettmp
    ]
    subprocess.call(cmd)
    cmd = [
        'gdal_calc.py', '--calc', 'where(B==' + str(ncatch) + ',A,0)',
        '--format', 'GTiff', '--type', 'Int16', '--NoDataValue', '-9999', '-B',
        cattif, '--B_band', '1', '-A', dirtif, '--A_band', '1', '--co',
        'COMPRESS=DEFLATE', '--outfile', dirtmp
    ]
    subprocess.call(cmd)
    cmd = [
        'gdal_calc.py', '--calc', 'where(B==' + str(ncatch) + ',A,0)',
        '--format', 'GTiff', '--type', 'Float32', '--NoDataValue', '-9999',
        '-B', cattif, '--B_band', '1', '-A', acctif, '--A_band', '1', '--co',
        'COMPRESS=DEFLATE', '--outfile', acctmp
    ]
    subprocess.call(cmd)
    cmd = [
        'gdal_calc.py', '--calc', 'where(B==' + str(ncatch) + ',A,0)',
        '--format', 'GTiff', '--type', 'Int16', '--NoDataValue', '-9999', '-B',
        cattif, '--B_band', '1', '-A', ordtif, '--A_band', '1', '--co',
        'COMPRESS=DEFLATE', '--outfile', ordtmp
    ]
    subprocess.call(cmd)
    print('separated basin for nettif, dirtif, acctif, ordtif')

    catgeo = gdalutils.get_geo(cattif)
    area = gdalutils.get_data(aretif)
    #outlet = gdalutils.get_data(otltif)
    #direc = gdalutils.get_data(dirtif)
    row, col = np.where(dat)
    _sum = np.sum(dat * area)
    # clean up
    del (catarr, dat, area)

    if _sum >= 100:  # be sure basin is larger than 100 Km2

        xmin = catgeo[8][min(col)]
        xmax = catgeo[8][max(col)]
        ymin = catgeo[9][max(row)]
        ymax = catgeo[9][min(row)]
        # Clean up
        del (row, col)

        # Clip input rasters
        netarr_tmp, netgeo_tmp = gdalutils.clip_raster(nettmp, xmin, ymin,
                                                       xmax, ymax)
        net_size = (netarr_tmp > 0).sum()
        print('loaded net array')

        if net_size >= 35:  # be sure river network is long enough

            # Load tree and coord files
            tree = misc_utils.read_tree_taudem(tretxt)
            lfp_coor = misc_utils.read_coord_taudem(cootxt)
            lfp_coor.index.name = 'index'

            # Get list of x,y points in river network in basin
            iy, ix = np.where(netarr_tmp > 0)
            Xrav = netgeo_tmp[8][ix]
            Yrav = netgeo_tmp[9][iy]
            # Clean up memory
            del (netarr_tmp)

            # Clipping tree file based on segments within basin
            print('Clipping tree file')
            lfp_tree = pd.DataFrame()
            for i in tree.index:
                sta = tree.loc[i, 'start_pnt']
                end = tree.loc[i, 'end_pnt']
                lon1 = lfp_coor.loc[sta, 'lon']
                lat1 = lfp_coor.loc[sta, 'lat']
                lon2 = lfp_coor.loc[end, 'lon']
                lat2 = lfp_coor.loc[end, 'lat']
                #                dis1, ind1 = misc_utils.near_euc(
                #                    lfp_coor['lon'].values, lfp_coor['lat'].values, (lon1, lat1))
                #                dis2, ind2 = misc_utils.near_euc(
                #                    lfp_coor['lon'].values, lfp_coor['lat'].values, (lon2, lat2))
                dis1, ind1 = misc_utils.near_euc(Xrav, Yrav, (lon1, lat1))
                dis2, ind2 = misc_utils.near_euc(Xrav, Yrav, (lon2, lat2))
                # default value 0.01 wasn't able to find link number 3504, this value was increased to 0.012 to find missing link
                if (dis1 <= 0.012) & (dis2 <= 0.012):
                    lfp_tree = lfp_tree.append(tree.loc[i, :])
            lfp_tree = lfp_tree[[
                'link_no', 'start_pnt', 'end_pnt', 'frst_ds', 'frst_us',
                'scnd_us', 'strahler', 'mon_pnt', 'shreve'
            ]]
            lfp_tree.index.name = 'index'

            # Creating folder per basin
            ncatchstr = "%03d" % ncatch
            folder = outdir + "/" + ncatchstr
            create_out_folder(folder)

            # Writing clipped coord and tree files
            print('Writing text files')
            fnametre = folder + "/" + ncatchstr + "_tre.csv"
            fnamecoo = folder + "/" + ncatchstr + "_coo.csv"
            lfp_coor.to_csv(fnamecoo)
            lfp_tree.to_csv(fnametre, float_format='%i')
            # clean up memory
            del (lfp_coor, lfp_tree)

            # Creating rec dataframe
            rec = connections(fnametre, fnamecoo)

            #  Writing XXX_rec.csv file
            fnamerec = folder + "/" + ncatchstr + "_rec.csv"
            rec.to_csv(fnamerec)

            # Get extent from rec dataframe
            xmin = rec['lon'].min()
            xmax = rec['lon'].max()
            ymin = rec['lat'].min()
            ymax = rec['lat'].max()
            # Clean up memory
            del (rec)

            # Get fixed extent
            # _dir    = getdir(rec,dirtif)
            # _dirlet = getdirletter(_dir)
            # xmin,ymin,xmax,ymax = get_extent_outlet(_dirlet,0.1,xmin,ymin,xmax,ymax)

            # Clipping rasters
            print('Loading and clipping rasters')
            nodata = -9999
            # Creating output names
            fnamedem = folder + "/" + ncatchstr + "_dem.tif"
            fnameacc = folder + "/" + ncatchstr + "_acc.tif"
            fnamenet = folder + "/" + ncatchstr + "_net.tif"
            fnamewth = folder + "/" + ncatchstr + "_wth.tif"
            fnamedir = folder + "/" + ncatchstr + "_dir.tif"
            fnameord = folder + "/" + ncatchstr + "_ord.tif"

            # Load and write each array before removing it from memory
            demarrcli, demgeocli = gdalutils.clip_raster(
                demtif, xmin, ymin, xmax, ymax)
            gdalutils.write_raster(demarrcli, fnamedem, demgeocli, "Float32",
                                   nodata)
            del (demarrcli, demgeocli)

            accarrcli, accgeocli = gdalutils.clip_raster(
                acctmp, xmin, ymin, xmax, ymax)
            gdalutils.write_raster(accarrcli, fnameacc, accgeocli, "Float32",
                                   nodata)
            del (accarrcli, accgeocli)

            wtharrcli, wthgeocli = gdalutils.clip_raster(
                wthtif, xmin, ymin, xmax, ymax)
            gdalutils.write_raster(wtharrcli, fnamewth, wthgeocli, "Float32",
                                   nodata)
            del (wtharrcli, wthgeocli)

            dirarrcli, dirgeocli = gdalutils.clip_raster(
                dirtmp, xmin, ymin, xmax, ymax)
            gdalutils.write_raster(dirarrcli, fnamedir, dirgeocli, "Int16",
                                   nodata)
            del (dirarrcli, dirgeocli)

            netarrcli, netgeocli = gdalutils.clip_raster(
                nettmp, xmin, ymin, xmax, ymax)
            gdalutils.write_raster(netarrcli, fnamenet, netgeocli, "Int16",
                                   nodata)
            del (netarrcli, netgeocli)

            ordarrcli, ordgeocli = gdalutils.clip_raster(
                ordtmp, xmin, ymin, xmax, ymax)
            gdalutils.write_raster(ordarrcli, fnameord, ordgeocli, "Int16",
                                   nodata)
            del (ordarrcli, ordgeocli)

            # Finally delete the nettmp and dirtmp files
            os.remove(nettmp)
            os.remove(dirtmp)
            os.remove(ordtmp)
            os.remove(acctmp)

        else:
            print("NOT PROCESSED: Number of pixels in river lower than 35 : " +
                  str(net_size) + " pixels in basin number " + str(ncatch))
    else:
        print("NOT PROCESSED: Basin area lower than 100 Km2 : " + str(_sum) +
              " KM**2 in basin number " + str(ncatch))
Exemple #18
0
def calc_banks(banktif, bedtif, fname_disch, fname_stage, reccsv, return_per, layer, outfolder):

    # Loading stage and discharge files
    # Try statement added since some discharge and stage files are empty, exit program
    try:
        stage = lfp.read_stage(fname_stage)
        df_locs = lfp.read_stage_locs(fname_stage)
        df_locs.index = range(len(stage.columns))
        discharge = lfp.read_discharge(fname_disch)
        stage.columns = range(len(discharge.columns))
        discharge.columns = range(len(stage.columns))
    except ValueError:
        sys.exit('ERROR: Probably stage or discharge file is empty')

    # Loading Return Perid database (eg. FLOPROS)
    gdf_defenses = gpd.read_file(return_per)

    # Getting protection level from Return Period dataset at every cell
    # River points have been buffered to allow disaggrement between geolocations
    # By buffering some points get more than one value, maximum flood protection is selected
    mygeom = [Point(x, y) for x, y in zip(df_locs['x'], df_locs['y'])]
    gdf_locs = gpd.GeoDataFrame(crs={'init': 'epsg:4326'}, geometry=mygeom)
    gdf_locs_buf = gpd.GeoDataFrame(
        crs={'init': 'epsg:4326'}, geometry=gdf_locs.buffer(0.1))
    gdf_locs_ret = gpd.sjoin(gdf_locs_buf, gdf_defenses, op='intersects')
    gdf_locs_ret['index'] = gdf_locs_ret.index
    gdf_locs_ret = gdf_locs_ret.sort_values(
        layer, ascending=False).drop_duplicates('index').sort_values('index')

    # Estimating error in discharge fitting
    dis_err = []
    for i in range(discharge.shape[1]):
        try:
            dis_err.append(get_discharge_error(discharge[i]))
        except (KeyError,np.core._internal.AxisError):
            dis_err.append(0)

    # Estimating a defenses-related discharge
    dis_df = []
    for i in range(discharge.shape[1]):
        ret_pe = gdf_locs_ret['MerL_Riv'][i]
        try:
            dis_df.append(get_discharge_returnperiod(discharge[i], ret_pe))
        except (KeyError,np.core._internal.AxisError):
            dis_df.append(np.nan)

    # Estimating error in stage fitting
    stg_err = []
    for i in range(discharge.shape[1]):
        try:
            stg_err.append(get_stage_error(discharge[i], stage[i]))
        except (RuntimeError, TypeError):
            stg_err.append(0)

    # Estimating a defenses-related stage
    stg_df = []
    for i in range(discharge.shape[1]):
        try:
            stg_df.append(get_stage_discharge(
                discharge[i], stage[i], dis_df[i]))
        except (RuntimeError, TypeError):
            stg_df.append(np.nan)

    # Preparing a summary with variables retrived
    df_locs['dis_df'] = dis_df
    df_locs['stg_df'] = stg_df
    df_locs['dis_err'] = dis_err
    df_locs['stg_err'] = stg_err

    # Read REC file
    rec = pd.read_csv(reccsv)

    # Convert dataframe to geodataframe, join with rec
    gdf_sum = gpd.GeoDataFrame(df_locs, crs={'init': 'epsg:4326'}, geometry=[
        Point(x, y) for x, y in zip(df_locs['x'], df_locs['y'])])
    gdf_rec = gpd.GeoDataFrame(rec, crs={'init': 'epsg:4326'}, geometry=[
        Point(x, y) for x, y in zip(rec['lon'], rec['lat'])])
    gdf_rec_buf = gpd.GeoDataFrame(
        rec, crs={'init': 'epsg:4326'}, geometry=gdf_rec.buffer(0.001))
    gdf_sum_rec = gpd.sjoin(gdf_sum, gdf_rec_buf,
                            how='inner', op='intersects')
    gdf_sum_rec.sort_values('index_right', inplace=True)

    # Save errors in a GeoJSON file
    try:
        gdf_sum_rec.to_file(outfolder + 'bnk_err.geojson', driver='GeoJSON')
    except:
        os.remove(outfolder + 'bnk_err.geojson')
        gdf_sum_rec.to_file(outfolder + 'bnk_err.geojson', driver='GeoJSON')

    # Score should greater than 0.85 for both Discharge and Stage to be accepted, otherwise NaN
    gdf_err = gdf_sum_rec['stg_df'].where(
        (gdf_sum_rec['dis_err'] > 0.85) & (gdf_sum_rec['stg_err'] > 0.85))

    # Fill with NaN stg_df not filling that condition
    gdf_sum_rec['stg_df'] = gdf_err

    # NaNs are filled repating last/first number per link
    gdf_sum_rec_fillna = gdf_sum_rec.groupby('link').fillna(
        method='bfill').fillna(method='ffill')
    gdf_sum_rec_fillna['link'] = gdf_sum_rec['link']

    # Read data and geo for bedtif
    bed = gu.get_data(bedtif)
    geo = gu.get_geo(bedtif)

    # Convert dataframes to arrays
    df_locs_stgdf = gdf_sum_rec_fillna[['x', 'y', 'stg_df']]
    df_locs_stgdf.columns = ['x', 'y', 'z']
    arr_stgdf = gu.pandas_to_array(df_locs_stgdf, geo, 0)

    # Sum bankfull stage and defenses-related stage to bed
    arr_bnkdf = (bed + arr_stgdf)

    # Write burned banks in ASC and TIF files
    gu.write_raster(arr_bnkdf, banktif, geo, 'Float64', 0)
Exemple #19
0
def basinsplit(ncatch, outdir, cattif, demtif, acctif, nettif, wthtif, dirtif, aretif, otltif, tretxt, cootxt):

    # Get extend for every catchment and area
    catarr = gdalutils.get_data(cattif)

    try:
        dat = catarr == ncatch
    except:
        sys.exit('ERROR invalid basin number')

    catgeo = gdalutils.get_geo(cattif)
    area = gdalutils.get_data(aretif)
    outlet = gdalutils.get_data(otltif)
    direc = gdalutils.get_data(dirtif)
    row, col = np.where(dat)
    _sum = np.sum(dat*area)

    if _sum >= 100:  # be sure basin is larger than 100 Km2

        xmin = catgeo[8][min(col)]
        xmax = catgeo[8][max(col)]
        ymin = catgeo[9][max(row)]
        ymax = catgeo[9][min(row)]

        # Clip input rasters
        netarr_tmp, netgeo_tmp = gdalutils.clip_raster(
            nettif, xmin, ymin, xmax, ymax)
        catarr_tmp, catgeo_tmp = gdalutils.clip_raster(
            cattif, xmin, ymin, xmax, ymax)

        # Mask only the catchment and fill with zeros
        netarr_tmp = np.where(catarr_tmp == ncatch, netarr_tmp, 0)

        if netarr_tmp.sum() >= 35:  # be sure river network is long enough

            # Clipping tree and coord files based on nettif > 0, coordinates
            tree = misc_utils.read_tree_taudem(tretxt)
            coor = misc_utils.read_coord_taudem(cootxt)
            iy, ix = np.where(netarr_tmp > 0)
            Xrav = netgeo_tmp[8][ix]
            Yrav = netgeo_tmp[9][iy]

            # Clipping coord file (it may be improved, calculation takes some time)
            lfp_coor = pd.DataFrame()
            for i in range(len(Xrav)):
                dis, ind = misc_utils.near_euc(
                    coor['lon'].values, coor['lat'].values, (Xrav[i], Yrav[i]))
                if dis <= 0.01:
                    lfp_coor = lfp_coor.append(coor.loc[ind, :])
            lfp_coor = lfp_coor[['lon', 'lat',
                                 'distance', 'elev', 'contr_area']]
            lfp_coor.index.name = 'index'
            lfp_coor.sort_index(inplace=True)
            # Remove duplicates just in case
            lfp_coor.drop_duplicates(inplace=True)

            # Clipping tree file
            lfp_tree = pd.DataFrame()
            for i in tree.index:
                sta = tree.loc[i, 'start_pnt']
                end = tree.loc[i, 'end_pnt']
                lon1 = coor.loc[sta, 'lon']
                lat1 = coor.loc[sta, 'lat']
                lon2 = coor.loc[end, 'lon']
                lat2 = coor.loc[end, 'lat']
                dis1, ind1 = misc_utils.near_euc(
                    lfp_coor['lon'].values, lfp_coor['lat'].values, (lon1, lat1))
                dis2, ind2 = misc_utils.near_euc(
                    lfp_coor['lon'].values, lfp_coor['lat'].values, (lon2, lat2))
                # default value 0.01 wasn't able to find link number 3504, this value was increased to 0.012 to find missing link
                if (dis1 <= 0.012) & (dis2 <= 0.012):
                    lfp_tree = lfp_tree.append(tree.loc[i, :])
            lfp_tree = lfp_tree[['link_no', 'start_pnt', 'end_pnt', 'frst_ds',
                                 'frst_us', 'scnd_us', 'strahler', 'mon_pnt', 'shreve']]
            lfp_tree.index.name = 'index'

            # Creating folder per basin
            ncatchstr = "%03d" % ncatch
            folder = outdir + "/" + ncatchstr
            create_out_folder(folder)

            # Writing clipped coord and tree files
            fnametre = folder + "/" + ncatchstr + "_tre.csv"
            fnamecoo = folder + "/" + ncatchstr + "_coo.csv"
            lfp_coor.to_csv(fnamecoo)
            lfp_tree.to_csv(fnametre, float_format='%i')

            # Creating rec dataframe
            rec = connections(fnametre, fnamecoo)

            #  Writing XXX_rec.csv file
            fnamerec = folder + "/" + ncatchstr + "_rec.csv"
            rec.to_csv(fnamerec)

            # Get extent from rec dataframe
            xmin = rec['lon'].min()
            xmax = rec['lon'].max()
            ymin = rec['lat'].min()
            ymax = rec['lat'].max()

            # Get fixed extent
            # _dir    = getdir(rec,dirtif)
            # _dirlet = getdirletter(_dir)
            # xmin,ymin,xmax,ymax = get_extent_outlet(_dirlet,0.1,xmin,ymin,xmax,ymax)

            # Clipping rasters
            demarrcli, demgeocli = gdalutils.clip_raster(
                demtif, xmin, ymin, xmax, ymax)
            accarrcli, accgeocli = gdalutils.clip_raster(
                acctif, xmin, ymin, xmax, ymax)
            wtharrcli, wthgeocli = gdalutils.clip_raster(
                wthtif, xmin, ymin, xmax, ymax)
            dirarrcli, dirgeocli = gdalutils.clip_raster(
                dirtif, xmin, ymin, xmax, ymax)
            netarrcli, netgeocli = gdalutils.clip_raster(
                nettif, xmin, ymin, xmax, ymax)
            catarrcli, catgeocli = gdalutils.clip_raster(
                cattif, xmin, ymin, xmax, ymax)

            # Mask only the catchment and fill with zeros
            netarrcli = np.where(catarrcli == ncatch, netarrcli, 0)
            dirarrcli = np.where(catarrcli == ncatch, dirarrcli, 0)

            # Creating output names
            fnamedem = folder + "/" + ncatchstr + "_dem.tif"
            fnameacc = folder + "/" + ncatchstr + "_acc.tif"
            fnamenet = folder + "/" + ncatchstr + "_net.tif"
            fnamewth = folder + "/" + ncatchstr + "_wth.tif"
            fnamedir = folder + "/" + ncatchstr + "_dir.tif"

            # Writing clipped arrays
            nodata = -9999
            gdalutils.write_raster(demarrcli, fnamedem,
                                   demgeocli, "Float32", nodata)
            gdalutils.write_raster(accarrcli, fnameacc,
                                   accgeocli, "Float32", nodata)
            gdalutils.write_raster(netarrcli, fnamenet,
                                   netgeocli, "Float32", nodata)
            gdalutils.write_raster(wtharrcli, fnamewth,
                                   wthgeocli, "Float32", nodata)
            gdalutils.write_raster(dirarrcli, fnamedir,
                                   dirgeocli, "Float32", nodata)

        else:
            print("NOT PROCESSED: Number of pixels in river lower than 35 : " +
                  str(netarr_tmp.sum()) + " pixels in basin number " + str(ncatch))
    else:
        print("NOT PROCESSED: Basin area lower than 100 Km2 : " +
              str(_sum) + " KM**2 in basin number " + str(ncatch))
Exemple #20
0
def beds(widthtif, bnkfixtif, runoffcsv, date1, date2, bedtif, lisfloodfp):

    # Create a temp temporal work folder
    outfolder = os.path.dirname(bedtif) + '/beds-temp/'
    try:
        os.makedirs(outfolder + 'lfp/nc/')
    except FileExistsError:
        pass

    # Determine end of the simulation, how many days
    t = (pd.to_datetime(date2, format='%Y-%m-%d') -
         pd.to_datetime(date1, format='%Y-%m-%d')).days + 1

    # Create 1D DEM, synthetic
    demtif = outfolder + 'dem1d.tif'
    wdt = gu.get_data(widthtif)
    geo = gu.get_geo(widthtif)
    dem = np.where(wdt > 0, 10000, 0)
    gu.write_raster(dem, demtif, geo, 'Int16', 0)

    # Convert input files to ASCII
    widthasc = outfolder + 'width.asc'
    call(['gdal_translate',
          '-of', 'AAIGRID',
          widthtif, widthasc])

    demasc = outfolder + 'dem.asc'
    call(['gdal_translate',
          '-of', 'AAIGRID',
          demtif, demasc])

    bnkfixasc = outfolder + 'bnkfix.asc'
    call(['gdal_translate',
          '-of', 'AAIGRID',
          bnkfixtif, bnkfixasc])

    # Write LISFLOOD-FP files
    bcilfp = outfolder + 'lfp.bci'
    write_bci(bcilfp, runoffcsv)

    bdylfp = outfolder + 'lfp.bdy'
    write_bdy(bdylfp, runoffcsv, t)

    evaplfp = outfolder + 'lfp.evap'
    write_evap(evaplfp, t)

    parlfp = outfolder + 'lfp.par'
    write_par(parlfp=parlfp,
              bcilfp=bcilfp,
              bdylfp=bdylfp,
              evaplfp=evaplfp,
              gaugelfp='./none',
              stagelfp='./none',
              dembnktif=demasc,
              wdttif=widthasc,
              bedtif=bnkfixasc,
              t=t)

    # Run simulation
    call([lisfloodfp, '-v', 'lfp.par'], cwd=outfolder)

    # Write netCDFs for WATER DEPTHS
    myfiles = sorted(glob(outfolder + '/lfp/*.wd'))
    for myfile in myfiles:
        fname = outfolder + 'lfp/nc/' + os.path.basename(myfile) + '.nc'
        xr.open_rasterio(myfile).to_dataset(name='myvar').to_netcdf(fname)

    # Read netCDFs
    ds = xr.open_mfdataset(outfolder + 'lfp/nc/*.nc',
                           concat_dim='band',
                           autoclose=True,
                           parallel=False,
                           chunks={'band': 10})

    # Calculating mean
    method = ds.where(ds > 0, 0).myvar.mean('band')

    # Saving result in netCDF
    method.to_netcdf(outfolder + 'mean.nc')

    # Reading banks
    bnkfix = gu.get_data(bnkfixtif)
    bnkfix = np.where(bnkfix > 0, bnkfix, 0)

    # Calculating bed
    bed = bnkfix - method.compute().data

    # Write final raster
    gu.write_raster(bed, bedtif, geo, 'Float64', 0)
        geo[5] = int(np.ceil(geo[5] / nwindow))
        # modify resolution: multiply by nwindow
        geo[6] = geo[6] * nwindow
        geo[7] = geo[7] * nwindow

        #########################################################################################
        # Downsample chanmask arrays
        if not os.path.exists(maskraw_downsample):
            data = gdalutils.get_data(maskrawtif)
            downsample_count = block_reduce(data,
                                            block_size=(nwindow, nwindow),
                                            func=count,
                                            cval=-32767)
            data_mask = downsample_count >= count_thresh
            print('downsampled mask', data_mask.shape)
            gdalutils.write_raster(data_mask, maskraw_downsample, geo, 'Int16',
                                   -9999)
        else:
            data_mask = gdalutils.get_data(maskraw_downsample)
        #########################################################################################
        # Clean chanmask - remove values away from stream network
        data_net = gdalutils.get_data(net_downsample)
        data_maskclean = clean_mask(data_mask, data_net, 900)
        gdalutils.write_raster(data_maskclean, maskclean_downsample, geo,
                               'Int16', -9999)

    else:
        if not os.path.exists(maskcleantif):
            data_mask = gdalutils.get_data(maskrawtif)

            #########################################################################################
            # Clean chanmask - remove values away from stream network