Ejemplo n.º 1
0
def write_outlets(outshp, dirtif_mask):

    proj = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'

    dat = gdalutils.get_data(dirtif_mask)
    geo = gdalutils.get_geo(dirtif_mask)
    rows, cols = np.where(dat > 0)

    x = []
    y = []
    for row, col in zip(rows, cols):
        A = find_neighbours(dat, row, col)
        if np.any(A < 0):
            x.append(geo[8][col])
            y.append(geo[9][row])

    # Initiate shapefile
    w = shapefile.Writer(shapefile.POINT)
    w.field('x')
    w.field('y')
    w.field('id')

    # Write coordinate points in shapefile
    for i in range(len(x)):
        w.point(x[i], y[i])
        w.record(x[i], y[i], i)
    w.save(outshp)
    fname = os.path.dirname(outshp)+'/' + \
        os.path.basename(outshp).split('.')[0] + '.prj'
    prj = open(fname, "w")
    srs = osr.SpatialReference()
    srs.ImportFromProj4(proj)
    prj.write(srs.ExportToWkt())
    prj.close()

    typ = "Byte"
    fmt = "GTiff"
    nodata = 0
    name1 = os.path.dirname(outshp)+'/' + \
        os.path.basename(outshp).split('.')[0] + '.shp'
    name2 = os.path.dirname(outshp)+'/' + \
        os.path.basename(outshp).split('.')[0] + '.tif'
    subprocess.call([
        "gdal_rasterize", "-a_nodata",
        str(nodata), "-ot", typ, "-of", fmt, "-tr",
        str(geo[6]),
        str(geo[7]), "-burn", "1", "-a_srs", proj, "-te",
        str(geo[0]),
        str(geo[1]),
        str(geo[2]),
        str(geo[3]), name1, name2
    ])
Ejemplo n.º 2
0
def getdepths(proj, netf, method, output, **kwargs):

    print("    runnning getdepths.py...")

    fname = output

    w = shapefile.Writer(shapefile.POINT)
    w.field('x')
    w.field('y')
    w.field('depth')

    if method == "depth_raster":
        depth_raster(w, netf, **kwargs)
    elif method == "depth_geometry":
        depth_geometry(w, **kwargs)
    elif method == "depth_manning":
        depth_manning(w, **kwargs)
    else:
        sys.exit("ERROR method not recognised")

    # write final value in a shapefile
    w.save("%s.shp" % fname)

    # write .prj file
    prj = open("%s.prj" % fname, "w")
    srs = osr.SpatialReference()
    srs.ImportFromProj4(proj)
    prj.write(srs.ExportToWkt())
    prj.close()

    nodata = -9999
    fmt = "GTiff"
    name1 = output + ".shp"
    name2 = output + ".tif"
    mygeo = gdalutils.get_geo(netf)
    subprocess.call([
        "gdal_rasterize", "-a_nodata",
        str(nodata), "-of", fmt, "-tr",
        str(mygeo[6]),
        str(mygeo[7]), "-a", "depth", "-a_srs", proj, "-te",
        str(mygeo[0]),
        str(mygeo[1]),
        str(mygeo[2]),
        str(mygeo[3]), name1, name2
    ])
Ejemplo n.º 3
0
def fixelevs(source, output, netf, recf, proj, method):

    print("    running fixelevs.py...")

    # Reading XXX_net.tif file
    geo = gdalutils.get_geo(netf)

    # Reading XXX_rec.csv file
    rec = pd.read_csv(recf)

    # Reading XXX_bnk.shp file
    bnk_gdf = gpd.read_file(source)

    # Initiate output shapefile
    w = shapefile.Writer(shapefile.POINT)
    w.field('x')
    w.field('y')
    w.field('elevadj')

    # Retrieving bank elevations from XXX_bnk.shp file
    # Values are stored in rec['bnk']
    rec['bnk'] = bnk_gdf['elev'].astype(float)

    # Adjusting bank values, resulting values
    # are stored in rec['bnk_adj']
    # coordinates are grouped by REACH number
    rec['bnk_adj'] = 0
    recgrp = rec.groupby('reach')
    for reach, df in recgrp:
        ids = df.index
        dem = df['bnk']

        # calc bank elevation
        if method == 'yamazaki':
            adjusted_dem = bank4flood(dem)
        elif method == 'lowless':
            adjusted_dem = lowless(dem)
        else:
            sys.exit('Method not recognised')
        rec['bnk_adj'][ids] = adjusted_dem

    # Writing .shp resulting file
    for i in rec.index:
        w.point(rec['lon'][i], rec['lat'][i])
        w.record(rec['lon'][i], rec['lat'][i], rec['bnk_adj'][i])
    w.save("%s.shp" % output)

    # write .prj file
    prj = open("%s.prj" % output, "w")
    srs = osr.SpatialReference()
    srs.ImportFromProj4(proj)
    prj.write(srs.ExportToWkt())
    prj.close()

    nodata = -9999
    fmt = "GTiff"
    name1 = output + ".shp"
    name2 = output + ".tif"
    subprocess.call([
        "gdal_rasterize", "-a_nodata",
        str(nodata), "-of", fmt, "-co", "COMPRESS=DEFLATE", "-tr",
        str(geo[6]),
        str(geo[7]), "-a", "elevadj", "-a_srs", proj, "-te",
        str(geo[0]),
        str(geo[1]),
        str(geo[2]),
        str(geo[3]), name1, name2
    ])
Ejemplo n.º 4
0
def getwidths(recf, netf, proj, fwidth, output, thresh):

    print("    running getwidths.py...")

    w = shapefile.Writer(shapefile.POINT)
    w.field('x')
    w.field('y')
    w.field('width')

    # Reading XXX_rec.csv file
    rec = pd.read_csv(recf)

    # Get nearest width from datasource
    # Uses Euclidean distance to find nearest point in source
    # `try` included since it may happen that the width database doesn't
    # contains data in the basin if that is the case all values are assigned
    # a 30 m width
    width = []
    for x, y in zip(rec['lon'], rec['lat']):

        xmin = x - thresh
        ymin = y - thresh
        xmax = x + thresh
        ymax = y + thresh

        dat, geo = gdalutils.clip_raster(fwidth, xmin, ymin, xmax, ymax)
        iy, ix = np.where(dat > 30)
        xdat = geo[8][ix]
        ydat = geo[9][iy]

        try:
            dis, ind = misc_utils.near_euc(xdat, ydat, (x, y))
            val = dat[iy[ind], ix[ind]]
            width.append(val)
        except ValueError:
            width.append(np.nan)

    rec['width'] = width

    # Group river network per link
    # If there are more NaN than real values, all values in link are equal to 30
    # Otherwise, interpolate real values to fill NaNs
    def check_width(a):
        b = a.copy()
        c = b.isnull()
        falses = c.sum()
        trues = c.count() - falses
        if trues >= falses:
            return a.interpolate(limit_direction='both')
        else:
            b.loc[:] = 30
            return b
    rec.loc[:, 'width'] = rec.groupby('link').width.apply(check_width)

   # Writing .shp resulting file
    for x, y, width in zip(rec['lon'], rec['lat'], rec['width']):
        w.point(x, y)
        w.record(x, y, width)
    w.save("%s.shp" % output)

    # write .prj file
    prj = open("%s.prj" % output, "w")
    srs = osr.SpatialReference()
    srs.ImportFromProj4(proj)
    prj.write(srs.ExportToWkt())
    prj.close()

    geo = gdalutils.get_geo(netf)

    fmt = "GTiff"
    nodata = -9999
    name1 = output+".shp"
    name2 = output+".tif"
    subprocess.call(["gdal_rasterize", "-a_nodata", str(nodata), "-of", fmt, "-tr", str(geo[6]), str(geo[7]),
                     "-a", "width", "-a_srs", proj, "-te", str(geo[0]), str(geo[1]), str(geo[2]), str(geo[3]), name1, name2])
Ejemplo n.º 5
0
def getbankelevs(output, recf, netf, hrdemf, proj, method, hrnodata, thresh,
                 outlier):

    print("    running getbankelevs.py...")

    fname = output

    w = shapefile.Writer(shapefile.POINT)
    w.field('x')
    w.field('y')
    w.field('elev')

    # Coordinates for bank elevations are based on the Rec file
    rec = pd.read_csv(recf)

    for x, y in zip(rec['lon'], rec['lat']):

        xmin = x - thresh
        ymin = y - thresh
        xmax = x + thresh
        ymax = y + thresh

        dem, dem_geo = gdalutils.clip_raster(hrdemf, xmin, ymin, xmax, ymax)
        ddem = np.ma.masked_where(dem == hrnodata, dem)

        if method == 'near':
            nodata = dem_geo[11]
            dfdem = gdalutils.array_to_pandas(dem, dem_geo, nodata, 'gt')
            arr = haversine.haversine_array(
                np.array(dfdem['y'].values, dtype='float32'),
                np.float32(dfdem['x'].values), np.float32(y), np.float32(x))
            dfdem['dis'] = np.array(arr)
            dfdem.sort_values(by='dis', inplace=True)
            elev = dfdem.iloc[0, 2]

        elif method == 'meanmin':
            if outlier == "yes":
                ddem = check_outlier(dem, ddem, hrnodata, 3.5)
            elev = np.mean([ddem.mean(), ddem.min()])

        elif method == 'mean':
            if outlier == "yes":
                ddem = check_outlier(dem, ddem, hrnodata, 3.5)
            elev = ddem.mean()

        elif method == 'min':
            if outlier == "yes":
                ddem = check_outlier(dem, ddem, hrnodata, 3.5)
            elev = ddem.min()

        # Write final file in a shapefile

        if np.isfinite(elev):
            w.point(x, y)
            w.record(x, y, elev)

    w.save("%s.shp" % fname)

    # Write .prj file
    prj = open("%s.prj" % fname, "w")
    srs = osr.SpatialReference()
    srs.ImportFromProj4(proj)
    prj.write(srs.ExportToWkt())
    prj.close()

    geo = gdalutils.get_geo(netf)

    fmt = "GTiff"
    nodata = -9999
    bnkname1 = output + ".shp"
    bnkname2 = output + ".tif"
    subprocess.call([
        "gdal_rasterize", "-a_nodata",
        str(nodata), "-of", fmt, "-co", "COMPRESS=DEFLATE", "-tr",
        str(geo[6]),
        str(geo[7]), "-a", "elev", "-a_srs", proj, "-te",
        str(geo[0]),
        str(geo[1]),
        str(geo[2]),
        str(geo[3]), bnkname1, bnkname2
    ])
Ejemplo n.º 6
0
def getwidths_varthresh(recf, netf, proj, fwidth, output, fbankfullq):

    # Reading XXX_net.tif file
    geo1 = gdalutils.get_geo(netf)

    bankfullq = gpd.read_file(fbankfullq)
    # bankfullq has name: 'bankfullq'

    # Reading XXX_rec.csv file
    rec = pd.read_csv(recf)
    print('loaded data')

    # x and y resolution (degrees)
    xres = geo1[6]
    yres = geo1[7]
    print('data res', xres, yres)

    width = []
    width = np.ones([len(bankfullq)],
                    dtype=np.float32) * 30.  # 30 is default value
    for row in bankfullq.itertuples():
        #print(row[0],row[1],row[2],row[3],row[4])
        i = row[0]
        x = float(row[1])
        y = float(row[2])
        bfq = max(float(row[3]), 1.)
        # Choose some threshold based on bankfull q (bfq)
        thresh = np.log(bfq) / 1000. + bfq / 1000000. + 2 * abs(
            xres) + 2 * abs(yres)

        # come up with minimum width to search for, based on bankfullq
        # This is designed to prevent assigning
        #width values from the tributaries to the major river channels
        minwidth = bfq / 100. + 30

        # Get nearest width from datasource
        # Uses Euclidean distance to find nearest point in source
        # `try` included since it may happen that the width database doesn't
        # contains data in the basin if that is the case all values are assigned
        # a 30 m width

        xmin = x - thresh
        ymin = y - thresh
        xmax = x + thresh
        ymax = y + thresh

        dat, geo = gdalutils.clip_raster(fwidth, xmin, ymin, xmax, ymax)
        try:
            iy, ix = np.where(dat > 30)
        except:
            print('Error: point', i, x, y)
            print('Vals:', bfq, thresh, dat)
            continue
        xdat = geo[8][ix]
        ydat = geo[9][iy]

        try:
            dis, ind = misc_utils.near_euc(xdat, ydat, (x, y))
            val = dat[iy[ind], ix[ind]]
            #width.append(val)
            width[i] = val
        except ValueError:
            #width.append(30.)
            continue

# Add widths to dataframe, then copy to new dataframe
#bankfullq['width'] = width
#widths = bankfullq[['x', 'y', 'geometry','width']]

    rec['width'] = width
    #################################################################
    # Group river network per link
    rec.loc[:, 'width'] = rec.groupby('link').width.apply(check_width)

    # Write out files
    print('Writing out data')
    name1 = output + '.shp'
    #widths.to_file(name1)

    w = shapefile.Writer(shapefile.POINT)
    w.field('x')
    w.field('y')
    w.field('width')
    # Writing .shp resulting file
    for x, y, width in zip(rec['lon'], rec['lat'], rec['width']):
        w.point(x, y)
        w.record(x, y, width)
    w.save("%s.shp" % output)

    # write .prj file
    prj = open("%s.prj" % output, "w")
    srs = osr.SpatialReference()
    srs.ImportFromProj4(proj)
    prj.write(srs.ExportToWkt())
    prj.close()

    nodata = -9999
    fmt = "GTiff"
    #    name1 = output
    #    name2 = os.path.dirname(output) + '/' + \
    #        os.path.basename(output).split('.')[0] + '.tif'
    name2 = output + '.tif'
    subprocess.call([
        "gdal_rasterize", "-a_nodata",
        str(nodata), "-of", fmt, "-ot", "Float32", "-co", "COMPRESS=DEFLATE",
        "-tr",
        str(geo1[6]),
        str(geo1[7]), "-a", "width", "-a_srs", proj, "-te",
        str(geo1[0]),
        str(geo1[1]),
        str(geo1[2]),
        str(geo1[3]), name1, name2
    ])
Ejemplo n.º 7
0
def getslopes(source, output, netf, recf, proj, step):

    print("    runnning getslopes.py...")

    # Reading XXX_rec.csv file
    rec = pd.read_csv(recf)

    # Reading XXX_net.tif file
    geo = gdalutils.get_geo(netf)

    # Reading bank file (adjusted bank)
    elev = np.array(shapefile.Reader(source).records(), dtype='float64')

    # Initiate output shapefile
    w = shapefile.Writer(shapefile.POINT)
    w.field('x')
    w.field('y')
    w.field('slope')

    # Retrieving adjusted bank elevations from XXX_bnkfix.shp file
    # Values are stored in rec['bnk']
    bnkadj = []
    for i in rec.index:
        dis, ind = misc_utils.near_euc(elev[:, 0], elev[:, 1],
                                       (rec['lon'][i], rec['lat'][i]))
        bnkadj.append(elev[ind, 2])
    rec['bnkadj'] = bnkadj

    # Calculating slopes
    # coordinates are grouped by REACH number
    rec['slopes'] = 0
    recgrp = rec.groupby('reach')
    for reach, df in recgrp:
        ids = df.index
        dem = df['bnkadj']
        # calc slopes
        slopes_vals = calc_slope_step(dem, df['lon'].values, df['lat'].values,
                                      step)
        rec['slopes'][ids] = slopes_vals

    # Writing .shp resulting file
    for i in rec.index:
        w.point(rec['lon'][i], rec['lat'][i])
        w.record(rec['lon'][i], rec['lat'][i], rec['slopes'][i])
    w.save("%s.shp" % output)

    # write .prj file
    prj = open("%s.prj" % output, "w")
    srs = osr.SpatialReference()
    srs.ImportFromProj4(proj)
    prj.write(srs.ExportToWkt())
    prj.close()

    # Writing .tif file
    nodata = -9999
    fmt = "GTiff"
    name1 = output + ".shp"
    name2 = output + ".tif"
    subprocess.call([
        "gdal_rasterize", "-a_nodata",
        str(nodata), "-of", fmt, "-tr",
        str(geo[6]),
        str(geo[7]), "-a", "slope", "-a_srs", proj, "-te",
        str(geo[0]),
        str(geo[1]),
        str(geo[2]),
        str(geo[3]), name1, name2
    ])