Exemplo n.º 1
0
def RdCompare():
  parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter, description="""RichDEM Dataset Comparison

Parameters:
rda      -- A dataset
""")
  parser.add_argument('rda1',              type=str,            help='Elevation model')
  parser.add_argument('rda2',              type=str,            help='Elevation model')
  args = parser.parse_args()

  ds1 = rd.LoadGDAL(args.rda1)
  ds2 = rd.LoadGDAL(args.rda2)

  if ds1.no_data!=ds2.no_data:
    print("NoData differs")

  if ds1.geotransform!=ds2.geotransform:
    print("Geotransform differs")

  if ds1.projection!=ds2.projection:
    print("Projection differs")    

  if np.any(np.isnan(ds1)):
    print("NaN in '{0}'".format(filename))

  if np.any(np.isnan(ds2)):
    print("NaN in '{0}'".format(filename))    

  diff = np.array(ds1-ds2)

  print("Absolute Max difference: {0:10.6}".format(np.nanmax(np.abs(diff))))
  print("Absolute Min difference: {0:10.6}".format(np.nanmin(np.abs(diff))))
  print("Absolute Avg difference: {0:10.6}".format(np.mean  (np.abs(diff))))
  print("RMS difference:          {0:10.6}".format(np.sqrt(np.mean(np.square(diff)))))
Exemplo n.º 2
0
def FlowAccumulation():
  parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter, description="""RichDEM Flow Accumulation

A variety of methods are available.

Method            Note                           Reference
Tarboton          Alias for Dinf.
Dinf              Alias for Tarboton.
Quinn             Holmgren with exponent=1.
Holmgren(E)       Generalization of Quinn.
Freeman(E)        TODO
FairfieldLeymarie Alias for Rho8.
Rho8              Alias for FairfieldLeymarie.
OCallaghan        Alias for D8.                  10.1016/S0734-189X(84)80011-0
D8                Alias for OCallaghan.          10.1016/S0734-189X(84)80011-0

Methods marked (E) require the exponent argument.
""")
  parser.add_argument('dem',              type=str,                help='Elevation model')
  parser.add_argument('outname',          type=str,                help='Name of output file')
  parser.add_argument('-m', '--method',   type=str, required=True, help='Flow accumulation method to use')
  parser.add_argument('-e', '--exponent', type=float,              help='Some methods require an exponent')
  parser.add_argument('-v', '--version',  action='version', version=rd._RichDEMVersion())
  args = parser.parse_args()

  dem = rd.LoadGDAL(args.dem)
  rd._AddAnalysis(dem, ' '.join(sys.argv))
  accum = rd.FlowAccumulation(dem, method=args.method, exponent=args.exponent)
  rd.SaveGDAL(args.outname, accum)
Exemplo n.º 3
0
def RichFlow(bassin, zone, root, flowdir, overwrite):
    """
    Calcule le plan de drainage en utilisant RichDEM :
    FillDepressions + ResolveFlats
    """

    raster_template = os.path.join(root, bassin, zone, 'DEM5M.tif')
    flow_raster = os.path.join(root, bassin, zone, flowdir)

    if os.path.exists(flow_raster) and not overwrite:
        click.secho('Output already exists : %s' % flow_raster, fg=WARNING)
        return

    dem = rd.LoadGDAL(raster_template)
    filled = rd.FillDepressions(dem)
    rd.ResolveFlats(filled, True)
    flow = ta.flowdir(filled, filled.no_data)

    ds = rio.open(raster_template)
    profile = ds.profile.copy()
    profile.update(dtype=np.int16, nodata=-1, compress='deflate')

    with rio.open(flow_raster, 'w', **profile) as dst:
        dst.write(flow, 1)

    click.secho('Saved to %s' % flow_raster, fg=SUCCESS)
Exemplo n.º 4
0
def GaussianFilter(in_dem, sigma=1, out_file=None):
    """Applies a Gaussian filter to an image.

    Args:
        in_dem (str): File path to the input image.
        kernel_size (int, optional): The size of the moving window. Defaults to 3.
        out_file (str, optional): File path to the output image. Defaults to None.

    Returns:
        np.array: The numpy array containing the filtered image.
    """
    print("Gaussian filtering ...")
    start_time = time.time()
    dem = rd.LoadGDAL(in_dem)
    no_data = dem.no_data
    projection = dem.projection
    geotransform = dem.geotransform

    gau = ndimage.gaussian_filter(dem, sigma=sigma)
    gau = np2rdarray(gau, no_data, projection, geotransform)
    print("Run time: {:.4f} seconds".format(time.time() - start_time))

    if out_file is not None:
        print("Saving dem ...")
        rd.SaveGDAL(out_file, gau)
        return out_file

    return gau
Exemplo n.º 5
0
def DelineateMounts(in_dem,
                    min_size,
                    min_height,
                    interval,
                    out_dir,
                    bool_shp=False):

    if not os.path.exists(out_dir):
        os.mkdir(out_dir)

    print("Loading data ...")
    dem = rd.LoadGDAL(in_dem)
    projection = dem.projection
    geotransform = dem.geotransform
    cell_size = np.round(geotransform[1], decimals=3)

    out_dem = os.path.join(out_dir, "dem_flip.tif")
    in_dem = FlipDEM(dem, delta=100, out_file=out_dem)

    min_elev, max_elev, no_data = get_min_max_nodata(dem)
    print("min = {:.2f}, max = {:.2f}, no_data = {}, cell_size = {}".format(
        min_elev, max_elev, no_data, cell_size))

    sink_path = ExtractSinks(in_dem, min_size, out_dir)
    dep_id_path, dep_level_path = DelineateDepressions(sink_path, min_size,
                                                       min_height, interval,
                                                       out_dir, bool_shp)

    return dep_id_path, dep_level_path
Exemplo n.º 6
0
def MeanFilter(in_dem, kernel_size=3, out_file=None):
    """Applies a mean filter to an image.

    Args:
        in_dem (str): File path to the input image.
        kernel_size (int, optional): The size of the moving window. Defaults to 3.
        out_file (str, optional): File path to the output image. Defaults to None.

    Returns:
        np.array: The numpy array containing the filtered image.
    """
    print("Mean filtering ...")
    start_time = time.time()
    dem = rd.LoadGDAL(in_dem)
    no_data = dem.no_data
    projection = dem.projection
    geotransform = dem.geotransform

    weights = np.full((kernel_size, kernel_size),
                      1.0 / (kernel_size * kernel_size))
    mean = ndimage.filters.convolve(dem, weights)
    mean = np2rdarray(mean, no_data, projection, geotransform)
    print("Run time: {:.4f} seconds".format(time.time() - start_time))

    if out_file is not None:
        print("Saving dem ...")
        rd.SaveGDAL(out_file, mean)
        return out_file

    return mean
Exemplo n.º 7
0
def calc_topo(dem_path):
    """
    Calculates slope and aspect from given DEM and saves output.
    The function checks to see whether a slope/aspect file has already been created so as to avoid needless processing.
    
    Parameters:
    dem_path (pathlib.PosixPath): The relative or absolute path to an input DEM file.

    Dependencies: 
    richdem module
    GDAL binaries
    pathlib module
    """
    slope_path = Path(
        str(dem_path).replace("dem", "slope"))
    aspect_path = Path(
        str(dem_path).replace("dem", "aspect"))

    if ((not slope_path.is_file()) or 
            (not aspect_path.is_file())):
        
        dem = rd.LoadGDAL(str(dem_path))

    if not slope_path.is_file():
        slope = rd.TerrainAttribute(
            dem, attrib='slope_riserun')
        rd.SaveGDAL(str(slope_path), slope)
    
    if not aspect_path.is_file():
        aspect = rd.TerrainAttribute(dem, attrib='aspect')
        rd.SaveGDAL(str(aspect_path), aspect)
Exemplo n.º 8
0
def TerrainAttribute():
  parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter, description="""RichDEM Terrain Attribute

A variety of methods are available.

Parameters:
dem      -- An elevation model
attrib   -- Terrain attribute to calculate. (See below.)
zscale   -- How much to scale the z-axis by prior to calculation

Method:
slope_riserun
slope_percentage
slope_degrees
slope_radians
aspect
curvature
planform_curvature
profile_curvature
""")
  parser.add_argument('dem',              type=str,                help='Elevation model')
  parser.add_argument('outname',          type=str,                help='Name of output file')
  parser.add_argument('-a', '--attrib',   type=str, required=True, help='Terrain attribute to calculate')
  parser.add_argument('-z', '--zscale',   type=float, default=1.0, help='Scale elevations by this factor prior to calculation')
  parser.add_argument('-v', '--version',  action='version', version=rd._RichDEMVersion())
  args = parser.parse_args()

  dem = rd.LoadGDAL(args.dem)
  rd._AddAnalysis(dem, ' '.join(sys.argv))
  tattrib = rd.TerrainAttribute(dem, attrib=args.attrib, zscale=args.zscale)
  rd.SaveGDAL(args.outname, tattrib)
Exemplo n.º 9
0
def processDEM(fileloc):
    """
    computes hydrologically sound DEM by filling pits 
    and also computes flow accumulation 
    """
    pitremovedDEM = rd.FillDepressions(rd.LoadGDAL(fileloc))
    accumDEM = rd.FlowAccumulation(pitremovedDEM, method='Dinf')

    return pitremovedDEM, accumDEM
Exemplo n.º 10
0
def mosaic_slope(url, Projection, Geotransform, Height, Width, Extent,
                 Resolution):

    tile_id = url.split('/')[7]
    name = f'{tile_id}.tif'
    urllib.request.urlretrieve(
        url, '/Users/jackson/Desktop/Execute/' + tile_id + '.tif')

    # We must reproject our DEM tile
    gdal.Warp(name, name, dstSRS=Projection)

    # Now we run the the BTH over our DEM tile
    Tile_GeoTiff = gdal.Open(name, 0)
    T = Tile_GeoTiff.GetGeoTransform()
    P = Tile_GeoTiff.GetProjection()
    Tile_Array = rd.LoadGDAL(name)

    Slopes = rd.TerrainAttribute(Tile_Array, attrib='slope_riserun')

    filename = name
    driver = gdal.GetDriverByName('GTiff')
    dataset = driver.Create(filename, Slopes.shape[1], Slopes.shape[0], 1,
                            gdal.GDT_Float32)
    dataset.GetRasterBand(1).WriteArray(Slopes)
    dataset.SetGeoTransform(T)
    dataset.SetProjection(P)
    dataset.FlushCache()
    dataset = None

    # Preparing to add our new tile to the mosaic
    src_files_to_mosaic = []
    src = rasterio.open('Topo_Slope.tif')
    src_files_to_mosaic.append(src)
    src = rasterio.open(name)
    src_files_to_mosaic.append(src)

    os.remove('/Users/jackson/Desktop/Execute/' + tile_id + '.tif')

    # Adding to Mosaic
    mosaic, out = merge(src_files_to_mosaic,
                        method='first',
                        bounds=Extent,
                        res=Resolution,
                        nodata=-9999)
    mosaic = np.reshape(mosaic, (Height, Width))

    # Using GDAL to write the output raster
    filename = 'Topo_Slope.tif'  # Tiff holding Black Top Hat
    driver = gdal.GetDriverByName('GTiff')  # Driver for writing geo-tiffs
    dataset = driver.Create(filename, Width, Height, 1,
                            gdal.GDT_Float32)  # Creating our tiff file
    dataset.GetRasterBand(1).WriteArray(mosaic)  # Writing values to our tiff
    dataset.SetGeoTransform(Geotransform)  # Setting geo-transform
    dataset.SetProjection(Projection)  # Setting the projection
    dataset.FlushCache()  # Saving tiff to disk
    dataset = None
Exemplo n.º 11
0
def curvature(input_file):
    """Create a curvature layer (combine profile and planform curvature) using richdem."""
    arr = rd.LoadGDAL(input_file)  #rd.rdarray(input_file,no_data=-9999)
    curvature = rd.TerrainAttribute(arr, attrib='curvature')
    output_file = input_file[:-9] + '_curvature_temp.tif'
    head, tail = os.path.split(output_file)
    rd.SaveGDAL(output_file, curvature)
    createMetadata(
        sys.argv,
        head + '/')  #just remove the filename so you are left with the path
Exemplo n.º 12
0
 def elevation_grid(self):
     """
     Property storing the grid containing the elevation data.
     """
     if not hasattr(self, '_elevation_grid'):
         dem_path = os.path.join(os.getcwd(), 'DEM.tif')
         elevation.clip(bounds=self.bounds, output=dem_path)
         self._elevation_grid = np.array(rd.LoadGDAL(dem_path))
         os.remove('DEM.tif')
     return self._elevation_grid
Exemplo n.º 13
0
def create_slope_aspect(input_file):
    arr = rd.LoadGDAL(input_file,
                      no_data=-9999)  #rd.rdarray(input_file,no_data=-9999)
    aspect = rd.TerrainAttribute(arr, attrib='aspect')
    slope = rd.TerrainAttribute(arr, attrib='slope_radians')
    aspect_output = input_file[:-4] + '_aspect.tif'
    slope_output = input_file[:-4] + '_slope.tif'
    rd.SaveGDAL(aspect_output, aspect)
    rd.SaveGDAL(slope_output, slope)
    return aspect_output, slope_output
Exemplo n.º 14
0
def BreachDepressions():
  parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter, description="""RichDEM Depression Breaching""")

  parser.add_argument('dem',                    type=str,                help='Elevation model')
  parser.add_argument('outname',                type=str,                help='Name of output file')
  parser.add_argument('-v', '--version',  action='version', version=rd._RichDEMVersion())
  args = parser.parse_args()

  dem = rd.LoadGDAL(args.dem)
  rd._AddAnalysis(dem, ' '.join(sys.argv))
  rd.BreachDepressions(dem)
  rd.SaveGDAL(args.outname, dem)
Exemplo n.º 15
0
def DepressionFilling():
  parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter, description='RichDEM Depression Filling')

  parser.add_argument('dem',     type=str,                     help='Elevation model')
  parser.add_argument('outname', type=str,                     help='Name of output file')
  parser.add_argument('-g', '--gradient', action='store_true', help='Ensure that all cells are at least an epsilon above their downstream cell. This ensures that each cell has a defined flow direction.')
  parser.add_argument('-v', '--version',  action='version', version=rd._RichDEMVersion())
  args = parser.parse_args()

  dem = rd.LoadGDAL(args.dem)
  rd._AddAnalysis(dem, ' '.join(sys.argv))
  rd.FillDepressions(dem, epsilon=args.gradient, in_place=True)
  rd.SaveGDAL(args.outname, dem)
Exemplo n.º 16
0
def RouteFlow_rd(dempath, flow, method='Dinf'):
    """
    Use richdem to route flow
    Args:
        dempath: path to DEM (.tif)
        flow: amount of flow to route
        method: richdem routing method (default: 'Dinf')

    Returns:
        routed flow

    """
    rdprop = rd.FlowProportions(rd.LoadGDAL(dempath), method=method)
    def overlap_contour_dem(self, contour_path, dem_path):
        ct = self.read_contour(contour_path) * 255
        dem_data = rd.LoadGDAL(dem_path)
        m = np.max(dem_data)
        n = np.min(dem_data)
        dem_data = (dem_data - n) / float(m) * 255

        size = list(dem_data.shape)
        size.append(3)
        img = np.zeros(size, np.uint8)
        img[:, :, 0] = ct
        img[:, :, 1] = dem_data
        return img
Exemplo n.º 18
0
def BreachDepressions():
    parser = argparse.ArgumentParser(
        formatter_class=RawTextHelpFormatter,
        description="""RichDEM Depression Breaching

Modes:
Complete:    Breach everything.
            Ignore max_path_len, max_path_depth.
            There will be no depressions.
            There will be no mercy.
Selective:   Only breach those depressions that can be breached using the
            above criteria.
Constrained: Dig as long a path as necessary, but don't dig it deeper than
            max_path_depth.
""")

    parser.add_argument('dem', type=str, help='Elevation model')
    parser.add_argument('outname', type=str, help='Name of output file')
    parser.add_argument('-m',
                        '--mode',
                        required=True,
                        type=str,
                        help='Breaching mode to use')
    parser.add_argument(
        '-f',
        '--fill',
        action='store_true',
        help="If depressions can't be breached, should they be filled?")
    parser.add_argument('-l',
                        '--max_path_len',
                        type=int,
                        help="Maximum length of breaching path in cells")
    parser.add_argument('-d',
                        '--max_path_depth',
                        type=float,
                        help="Maximum depth of breaching path in z-units")
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version=rd._RichDEMVersion())
    args = parser.parse_args()

    dem = rd.LoadGDAL(args.dem)
    rd._AddAnalysis(dem, ' '.join(sys.argv))
    rd.BreachDepressions(dem,
                         mode=args.mode,
                         fill=args.fill,
                         max_path_len=args.max_path_len,
                         max_path_depth=args.max_path_depth,
                         in_place=True)
    rd.SaveGDAL(args.outname, dem)
Exemplo n.º 19
0
def drain_area(dem, drain_area_out):
    """
    Creates a raster where each pixel represents the contributing
    upstream drainage area in km2. DEM should be in a desired projected coordinate system.
    PARAMS
    :dem: string - path to dem raster file
    :drain_area_out: string - path to output drainage area raster file
    """

    dem_in = rd.LoadGDAL(dem)
    rd.FillDepressions(dem_in, epsilon=True, in_place=True)
    accum_d8 = rd.FlowAccumulation(dem_in, method='D8')
    da = accum_d8 * (accum_d8.geotransform[1]**2 / 1000000)
    rd.SaveGDAL(drain_area_out, da)

    return
def pendiente(srcFolder="./", dstFolder="./"):
    for archivo in os.listdir(srcFolder):
        if archivo.endswith(".tif"):
            if srcFolder.endswith("/"):
                ruta = srcFolder + archivo
            else:
                ruta = srcFolder + "/" + archivo
            dem = richdem.LoadGDAL(ruta)
            slope = richdem.TerrainAttribute(dem, attrib='slope_radians')
            archivo = "pendiente_" + archivo
            if not os.path.exists(dstFolder):
                os.mkdir(dstFolder)
            if srcFolder.endswith("/"):
                dstRuta = dstFolder + archivo
            else:
                dstRuta = dstFolder + "/" + archivo
            richdem.SaveGDAL(dstRuta, slope)
Exemplo n.º 21
0
def RdInfo():
    parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter,
                                     description="""RichDEM Dataset Information

Parameters:
rda      -- A dataset
""")
    parser.add_argument('rda', type=str, help='Elevation model')
    parser.add_argument('-s',
                        '--show',
                        action='store_true',
                        help='Show the model')
    parser.add_argument('--cmap',
                        type=str,
                        default='jet',
                        help='Colormap (Default: jet)')
    args = parser.parse_args()

    rda = rd.LoadGDAL(args.rda)

    print('File      = {0}    '.format(args.rda))
    print('Data type = {0}    '.format(rda.dtype))
    print('Width     = {0}    '.format(rda.shape[1]))
    print('Height    = {0}    '.format(rda.shape[0]))
    print('Shape     = {0}x{1}'.format(rda.shape[1], rda.shape[0]))
    print('Metadata:')

    for k, v in rda.metadata.items():
        if k == 'PROCESSING_HISTORY':
            continue
        print('\t{0} = {1}'.format(k, v))

    print('Processing History:')
    print('-------------------')
    if 'PROCESSING_HISTORY' in rda.metadata:
        for ph in rda.metadata['PROCESSING_HISTORY'].split('\n'):
            ph = ph.strip()
            if len(ph) == 0:
                continue
            print(ph)
    print('-------------------')

    if args.show:
        rd.rdShow(rda, cmap=args.cmap)
Exemplo n.º 22
0
def MedianFilter(in_dem, kernel_size=3, out_file=None):

    print("Median filtering ...")
    start_time = time.time()
    dem = rd.LoadGDAL(in_dem)
    no_data = dem.no_data
    projection = dem.projection
    geotransform = dem.geotransform

    med = ndimage.median_filter(dem, size=kernel_size)
    med = np2rdarray(med, no_data, projection, geotransform)
    print("Run time: {:.4f} seconds".format(time.time() - start_time))

    if out_file is not None:
        print("Saving dem ...")
        rd.SaveGDAL(out_file, med)
        return out_file

    return med
Exemplo n.º 23
0
def GaussianFilter(in_dem, sigma=1, out_file=None):

    print("Gaussian filtering ...")
    start_time = time.time()
    dem = rd.LoadGDAL(in_dem)
    no_data = dem.no_data
    projection = dem.projection
    geotransform = dem.geotransform

    gau = ndimage.gaussian_filter(dem, sigma=sigma)
    gau = np2rdarray(gau, no_data, projection, geotransform)
    print("Run time: {:.4f} seconds".format(time.time() - start_time))

    if out_file is not None:
        print("Saving dem ...")
        rd.SaveGDAL(out_file, gau)
        return out_file

    return gau
Exemplo n.º 24
0
def DelineateMounts(in_dem,
                    min_size,
                    min_height,
                    interval,
                    out_dir,
                    bool_shp=False):
    """Delineates the nested hierarchy of elevated features (i.e., mounts).

    Args:
        in_dem (str): File path to the input DEM.
        min_size (int): The minimum number of pixels to be considered as an object.
        min_height (float): The minimum depth of the feature to be considered as an object.
        interval (float): The slicing interval.
        out_dir (str): The output directory.
        bool_shp (bool, optional): Whether to generate shapefiles. Defaults to False.

    Returns:
        tuple: File paths to the depression ID and level.
    """
    if not os.path.exists(out_dir):
        os.mkdir(out_dir)

    print("Loading data ...")
    dem = rd.LoadGDAL(in_dem)
    # projection = dem.projection
    geotransform = dem.geotransform
    cell_size = np.round(geotransform[1], decimals=3)

    out_dem = os.path.join(out_dir, "dem_flip.tif")
    in_dem = FlipDEM(dem, delta=100, out_file=out_dem)

    min_elev, max_elev, no_data = get_min_max_nodata(dem)
    print("min = {:.2f}, max = {:.2f}, no_data = {}, cell_size = {}".format(
        min_elev, max_elev, no_data, cell_size))

    sink_path = ExtractSinks(in_dem, min_size, out_dir)
    dep_id_path, dep_level_path = DelineateDepressions(sink_path, min_size,
                                                       min_height, interval,
                                                       out_dir, bool_shp)

    return dep_id_path, dep_level_path
Exemplo n.º 25
0
def MeanFilter(in_dem, kernel_size=3, out_file=None):

    print("Mean filtering ...")
    start_time = time.time()
    dem = rd.LoadGDAL(in_dem)
    no_data = dem.no_data
    projection = dem.projection
    geotransform = dem.geotransform

    weights = np.full((kernel_size, kernel_size),
                      1.0 / (kernel_size * kernel_size))
    mean = ndimage.filters.convolve(dem, weights)
    mean = np2rdarray(mean, no_data, projection, geotransform)
    print("Run time: {:.4f} seconds".format(time.time() - start_time))

    if out_file is not None:
        print("Saving dem ...")
        rd.SaveGDAL(out_file, mean)
        return out_file

    return mean
def add_slope_aspect_curvature(df, file, indexes):
    for attr in ['slope_percentage', 'aspect', 'profile_curvature']:
        table = None
        try:
            table = rd.TerrainAttribute(rd.LoadGDAL(file, no_data=-9999),
                                        attrib=attr)
            rd.SaveGDAL("./temp.tif", table)
            table = None
            table = gr.from_file("./temp.tif")
            for index in indexes:
                try:
                    row = df.loc[index]
                    val = table.map_pixel(row['lon'], row['lat'])
                    df.loc[index, attr] = float(val)
                except:
                    df.loc[index, attr] = np.nan
            os.remove("./temp.tif")
        except:
            for index in indexes:
                df.loc[index, attr] = np.nan
    return df
def runoff():
    #    return 788
    dem_path = os.path.join(os.getcwd(), 'Konambe_dem_clipped.tif')
    village_dem = rd.LoadGDAL(dem_path, no_data=-9999)
    rd.FillDepressions(village_dem, epsilon=False, in_place=False)
    arr = rd.TerrainAttribute(village_dem,
                              attrib='slope_percentage',
                              zscale=1 / 111120)
    np.save('out.npy', arr)
    demnp = np.load('out.npy')
    dem = copy.copy(arr)
    dem[np.where((arr > 0) & (arr < 5))] = 1
    dem[np.where((arr >= 5) & (arr < 20))] = 2
    dem[np.where((arr >= 20))] = 3

    c1 = np.count_nonzero(dem == 1)
    c2 = np.count_nonzero(dem == 2)
    c3 = np.count_nonzero(dem == 3)

    area_m2_1 = c1 * 900
    area_m2_2 = c2 * 900
    area_m2_3 = c3 * 900
    area_ha1 = area_m2_1 * 0.0001
    area_ha2 = area_m2_2 * 0.0001
    area_ha3 = area_m2_3 * 0.0001
    #print('area',area_ha1+area_ha2)
    worthy_area = area_ha1 + area_ha2
    #coeff for rainfall 775mm
    runoff1 = area_ha1 * 1.0791
    runoff2 = area_ha2 * 1.6186
    runoff3 = area_ha3 * 2.1583
    #coeff for rainfall 725mm
    #runoff1=area_ha1*1.0791
    #runoff2=area_ha2*1.3878
    #runoff3=area_ha3*1.8496
    tot_runoff = runoff1 + runoff2 + runoff3
    return tot_runoff, worthy_area


#r=runoff()
Exemplo n.º 28
0
def DelineateDepressions(in_sink, min_size, min_depth, interval, out_dir, bool_level_shp=False):

    # The following parameters can be used by default
    interval = interval * (-1)  # convert slicing interval to negative value

    out_img_dir = os.path.join(out_dir, "img-level")
    out_shp_dir = os.path.join(out_dir, "shp-level")
    out_obj_file = os.path.join(out_dir, "depression_id.tif")
    out_level_file = os.path.join(out_dir, "depression_level.tif")
    out_vec_file = os.path.join(out_dir, "depressions.shp")
    out_csv_file = os.path.join(out_dir, "depressions_info.csv")

    init_time = time.time()

    # delete contents in output folder if existing
    if not os.path.exists(out_dir):
        os.mkdir(out_dir)
    if os.path.exists(out_img_dir):
        shutil.rmtree(out_img_dir)
    os.mkdir(out_img_dir)
    if os.path.exists(out_shp_dir):
        shutil.rmtree(out_shp_dir)
    os.mkdir(out_shp_dir)

    print("Reading data ...")
    read_time = time.time()

    image = rd.LoadGDAL(in_sink)
    no_data_raw, projection, geotransform, resolution = getMetadata(image)
    rows_cols = image.shape
    print("rows, cols: " + str(rows_cols))
    print("Pixel resolution: " + str(resolution))
    print("Read data time: {:.4f} seconds".format(time.time() - read_time))

    min_elev, max_elev, no_data = get_min_max_nodata(image)  # set nodata value to a large value, e.g., 9999
    # initialize output image
    obj_image = np.zeros(image.shape)  # output depression image with unique id for each nested depression
    level_image = np.zeros(image.shape)  # output depression level image

    # nb_labels is the total number of objects. 0 represents background object.
    label_objects, nb_labels = regionGroup(image, min_size, no_data)
    # regions = measure.regionprops(label_objects, image, coordinates='xy')
    regions = measure.regionprops(label_objects, image)
    del image  # delete the original image to save memory
    prep_time = time.time()
    print("Data preparation time: {:.4f} seconds".format(prep_time - init_time))
    print("Total number of regions: {}".format(nb_labels))

    identify_time = time.time()

    obj_uid = 0
    global_dep_list = []

    # loop through regions and identify nested depressions in each region using level-set method
    for region in regions:  # iterate through each depression region
        region_id = region.label
        img = region.intensity_image  # dem subset for each region
        bbox = region.bbox

        # save all input parameters needed for level set methods as a dict
        image_paras = set_image_paras(no_data, min_size, min_depth, interval, resolution)

        # execute level set methods
        out_obj, dep_list = levelSet(img, region_id, obj_uid, image_paras)

        for dep in dep_list:
            global_dep_list.append(dep)

        obj_uid += len(dep_list)

        level_obj = obj_to_level(out_obj, global_dep_list)
        obj_image = writeObject(obj_image, out_obj, bbox)       # write region to whole image
        level_image = writeObject(level_image, level_obj, bbox)

        del out_obj, level_obj, region

    del regions, label_objects

    print("=========== Run time statistics =========== ")
    print("(rows, cols):\t\t\t {0}".format(str(rows_cols)))
    print("Pixel resolution:\t\t {0} m".format(str(resolution)))
    print("Number of regions:\t\t {0}".format(str(nb_labels)))
    print("Data preparation time:\t\t {:.4f} s".format(prep_time - init_time))
    print("Identify level time:\t\t {:.4f} s".format(time.time() - identify_time))

    write_time = time.time()
    # writeRaster(obj_image, out_obj_file, in_sink)
    # writeRaster(level_image, out_level_file, in_sink)
    # SaveGDAL function can only save data as floating point
    level_image = np2rdarray(np.int32(level_image), no_data_raw, projection, geotransform)
    rd.SaveGDAL(out_level_file, level_image)
    obj_image = np2rdarray(np.int32(obj_image), no_data_raw, projection, geotransform)
    rd.SaveGDAL(out_obj_file, obj_image)
    print("Write image time:\t\t {:.4f} s".format(time.time() - write_time))

    # converting object image to polygon
    level_time = time.time()
    polygonize(out_obj_file, out_vec_file)
    write_dep_csv(global_dep_list, out_csv_file)
    print("Polygonize time:\t\t {:.4f} s".format(time.time() - level_time))

    # extracting polygons for each individual level
    if bool_level_shp:
        level_time = time.time()
        extract_levels(level_image, obj_image, min_size, no_data, out_img_dir, out_shp_dir, in_sink, False)
        print("Extract level time:\t\t {:.4f} s".format(time.time() - level_time))
        shutil.rmtree(out_img_dir)
    else:
        shutil.rmtree(out_shp_dir)
        shutil.rmtree(out_img_dir)
    del level_image
    del obj_image

    end_time = time.time()
    print("Total run time:\t\t\t {:.4f} s".format(end_time - init_time))
    return out_obj_file, out_level_file
Exemplo n.º 29
0
    del obj_image

    end_time = time.time()
    print("Total run time:\t\t\t {:.4f} s".format(end_time - init_time))
    return out_obj_file, out_level_file


# #####################################  main script
if __name__ == '__main__':

    # ************************ change the following parameters if needed ******************************** #
    # set input files
    in_dem = os.path.join(os.getcwd(), "lidar/data/dem.tif")
    in_sink = os.path.join(os.getcwd(), "lidar/data/sink.tif")
    # parameters for level set method
    min_size = 1000         # minimum number of pixels as a depression
    min_depth = 0.3         # minimum depression depth
    interval = 0.3          # slicing interval, top-down approach
    bool_level_shp = True  # whether or not to extract polygons for each individual level
    # set output directory
    out_dir = os.path.join(os.path.expanduser("~"), "temp")  # create a temp folder under user home directory
    # **************************************************************************************************#

    dep_id_path, dep_level_path = DelineateDepressions(in_sink, min_size, min_depth, interval, out_dir, bool_level_shp)
    print("Results are saved in: {}".format(out_dir))

    dep_id = rd.LoadGDAL(dep_id_path)
    dep_id_fig = rd.rdShow(dep_id, ignore_colours=[0], axes=False, cmap='jet', figsize=(6, 5.5))
    dep_level = rd.LoadGDAL(dep_level_path)
    dep_level_fig = rd.rdShow(dep_level, ignore_colours=[0], axes=False, cmap='jet', figsize=(6, 5.5))
Exemplo n.º 30
0
def gui():
    """An interactive Graphical User Interface (GUI) for the lidar package."""

    # identify the sample data directory of the package
    package_name = "lidar"
    data_dir = pkg_resources.resource_filename(package_name, "data/")

    # use the sample dem. Change it to your own dem if needed
    in_dem = os.path.join(data_dir, "dem.tif")
    # set output directory. By default, use the temp directory under user's home directory
    out_dir = os.path.join(os.path.expanduser("~"), "temp")

    if not os.path.exists(out_dir):
        os.mkdir(out_dir)

    with sg.FlexForm("lidar package GUI") as form:
        form_rows = [
            [
                sg.Text(
                    "Level-set Method for Delineating Topographic Hierarchy",
                    size=(50, 1),
                    font=("Arial", 14),
                    text_color="black",
                )
            ],
            [sg.Text("Select DEM:", font=("Arial", 14))],
            [sg.InputText(in_dem, size=(60, 1)),
             sg.FileBrowse()],
            [sg.Text("Delineation Mode:", font=("Arial", 14))],
            [
                sg.Radio("Depressions", "RADIO1", default=True),
                sg.Radio("Mounts", "RADIO1"),
            ],
            [sg.Text("DEM Filtering:", font=("Arial", 14))],
            [
                sg.Text("Select Filter:"),
                sg.InputCombo([
                    "None", "Mean Filter", "Median Filter", "Gaussian Filter"
                ]),
                sg.Text("Kernel Size: "),
                sg.InputText(default_text="3", size=(10, 1)),
            ],
            [sg.Text("Level-set Parameters:", font=("Arial", 14))],
            [
                sg.Text("Minimum size:"),
                sg.InputText(default_text="1000", size=(10, 1)),
                sg.Text("Minimum depth:"),
                sg.InputText(default_text="1.0", size=(10, 1)),
            ],
            [
                sg.Text("Slicing interval:"),
                sg.InputText(default_text="0.5", size=(10, 1)),
                sg.Text("Output shapefiles:"),
                sg.InputCombo(["Yes", "No"], default_value="No"),
            ],
            [sg.Text("Display Results:", font=("Arial", 14))],
            [sg.InputCombo(["Yes", "No"], default_value="No")],
            [sg.Text("Select Output Directory:", font=("Arial", 14))],
            [sg.InputText(out_dir, size=(60, 1)),
             sg.FolderBrowse()],
            [sg.Submit(), sg.Cancel()],
        ]
        button, (
            in_dem,
            mode_dep,
            mode_mnt,
            filter_type,
            kernel_szie,
            min_size,
            min_depth,
            interval,
            bool_shp,
            display,
            out_dir,
        ) = form.LayoutAndRead(form_rows)

        if button == "Submit":

            kernel_szie = int(kernel_szie)
            min_size = int(min_size)
            min_depth = float(min_depth)
            interval = float(interval)
            if bool_shp == "Yes":
                bool_shp = True
            else:
                bool_shp = False
            if display == "Yes":
                display = True
            else:
                display = False
            if mode_mnt and in_dem == os.path.join(data_dir, "dem.tif"):
                in_dem = os.path.join(data_dir, "dsm.tif")

            out_dem_name = filter_type.split(" ")[0].lower() + ".tif"
            out_dem = os.path.join(out_dir, out_dem_name)

            sg.Popup(
                "Please Wait!",
                "The program is running! You will receive another message when it is done!",
            )

            if filter_type == "Mean Filter":
                in_dem = MeanFilter(in_dem,
                                    kernel_size=kernel_szie,
                                    out_file=out_dem)
            elif filter_type == "Median Filter":
                in_dem = MedianFilter(in_dem,
                                      kernel_size=kernel_szie,
                                      out_file=out_dem)
            elif filter_type == "Gaussian Filter":
                in_dem = GaussianFilter(in_dem,
                                        sigma=kernel_szie,
                                        out_file=out_dem)

            if mode_dep:
                sink_path = ExtractSinks(in_dem, min_size, out_dir)
                dep_id_path, dep_level_path = DelineateDepressions(
                    sink_path, min_size, min_depth, interval, out_dir,
                    bool_shp)
            else:
                sink_path = os.path.join(out_dir, "sink.tif")
                dep_id_path, dep_level_path = DelineateMounts(
                    in_dem, min_size, min_depth, interval, out_dir, bool_shp)

            if display:
                # loading data and results
                dem = rd.LoadGDAL(in_dem)
                sink = rd.LoadGDAL(sink_path)
                dep_id = rd.LoadGDAL(dep_id_path)
                dep_level = rd.LoadGDAL(dep_level_path)

                # plotting results
                dem_fig = rd.rdShow(dem,
                                    ignore_colours=[0],
                                    axes=False,
                                    cmap="jet",
                                    figsize=(6, 5.5))
                sink_fig = rd.rdShow(sink,
                                     ignore_colours=[0],
                                     axes=False,
                                     cmap="jet",
                                     figsize=(6, 5.5))
                dep_id_fig = rd.rdShow(dep_id,
                                       ignore_colours=[0],
                                       axes=False,
                                       cmap="jet",
                                       figsize=(6, 5.5))
                dep_level_path = rd.rdShow(
                    dep_level,
                    ignore_colours=[0],
                    axes=False,
                    cmap="jet",
                    figsize=(6, 5.5),
                )

                del (
                    dem,
                    sink,
                    dep_id,
                    dep_level,
                    dem_fig,
                    sink_fig,
                    dep_id_fig,
                    dep_id_path,
                )

            sg.Popup("Success!",
                     "The results are saved in: {}".format(out_dir))