Esempio n. 1
0
def extract_value_metrique(path_couche_point, path_metrique):
    '''
    :param path_couche_point: Chemin de la couche de point. Les valeurs seront extraites dans les points (str)
    :param path_metrique: Chemin du répertoire contenant la/les images dont il faut extraire les valeur (str)
    :return: Couche points en input, contenant les valeurs des images pour chaque points (.shp)
    '''

    wbt = whitebox.WhiteboxTools()
    # Lecture des métriques
    print('lecture des métriques...')
    ls = glob.glob(path_metrique + os.sep + '*.tif')
    dic_ordre = {}

    # Préparation de la chaîne de métrique à mettre en input dans la fonction d'extraction
    print('préparation de la chaine de métriques...')
    chaine_metrique = ''
    for i in range(len(ls)):
        metrique = ls[i]
        nom = os.path.basename(metrique).split('_')[0]
        #name = dic_metrique[nom_basename]
        dic_ordre.update({str(i + 1): nom})

        if chaine_metrique == '':
            chaine_metrique = metrique
        else:
            chaine_metrique = chaine_metrique + ';' + metrique
    print(dic_ordre)

    # Extraction des valeurs des images du répertoire selon l'odre de la chaîne. Les valeurs sont ajoutée dans la couche
    print('Extraction des valeurs...')
    wbt.extract_raster_values_at_points(chaine_metrique,
                                        points=path_couche_point)

    # Lecture de la couche de point résultante
    print('Ouverture du SHP...')
    shp = gpd.read_file(path_couche_point)
    # del shp['VALUE']

    # Changement du nom des colonnes 'VALUE' pour le vrai nom des images
    print('Création des nouvelles colonnes...')
    for col in shp.columns:
        if col == 'id' or col == 'geometry':
            pass
        elif col[0:5] == 'VALUE':
            num = col[5:]
            nom_colonne = dic_ordre[num]
            shp[nom_colonne] = shp[col].round(4)

    print('Suppression des anciennes colonnes...')
    for col in shp.columns:
        if col[0:5] == 'VALUE':
            del shp[col]

    # Sauvegarde de la couche résultante au même endroit
    print('Sauvegarde...')
    shp.to_file(path_couche_point)
Esempio n. 2
0
def determine_treated_areas(srcfolder):
    wbt = whitebox.WhiteboxTools()
    wbt.work_dir = str(Path(srcfolder).resolve())
    wbt.verbose = False

    pbar = tqdm(list(Path(wbt.work_dir).glob("*/BMP*.shp")))
    for shpfile in pbar:
        pbar.set_description(f"Processing {shpfile.stem}")
        demfile = (shpfile.parent / "DEM.tif").resolve()
        dp = dem.DEMProcessor(wbt, demfile, shpfile.parent)
        pbar.set_description(f"Processing {shpfile.stem} (Watersheds)")
        dp.watershed(shpfile, suffix=f"04_watershed_{shpfile.stem}")
Esempio n. 3
0
def flow_accumulation(srcfolder):
    wbt = whitebox.WhiteboxTools()
    wbt.work_dir = str(Path(srcfolder).resolve())
    wbt.verbose = False

    pbar = tqdm(list(Path(wbt.work_dir).glob("*/DEM.tif")))
    for demfile in pbar:
        pbar.set_description(f"Processing {demfile.parent.name} (flow acc.)")
        dp = dem.DEMProcessor(wbt, demfile.resolve(), demfile.parent)
        dp.flow_accumulation()

    return 0
Esempio n. 4
0
def compile_pourpoints(srcfolder, dstfolder, bmps_to_exclude):

    skippers = (pandas.read_json(bmps_to_exclude, dtype={
        "huc": str
    }).set_index(["huc", "layer"]).loc[:, "object_id"].to_dict())

    wbt = whitebox.WhiteboxTools()
    wbt.work_dir = str(Path(".").resolve())
    wbt.verbose = False

    dstpath = Path(dstfolder)
    pbar = tqdm(list(Path(srcfolder).glob("*.gdb")))
    for gdb in pbar:
        pbar.set_description(gdb.stem)
        with TemporaryDirectory() as td:
            for lyrinfo in LAYERS:
                tmpfile = Path(td) / f"{lyrinfo['name']}.shp"
                raw_shapes = geopandas.read_file(gdb, layer=lyrinfo["name"])
                if not raw_shapes.empty:
                    huc = gdb.stem.split("_")[-1]
                    to_skip = skippers.get((huc, lyrinfo["name"]), None)
                    to_skip = validate.at_least_empty_list(to_skip)
                    points = (
                        raw_shapes.loc[lambda df: ~df.index.isin(to_skip)].
                        pipe(lyrinfo["fxn"]).assign(
                            snapped=lyrinfo["snap"]).reset_index().drop(
                                columns=["index"]))
                    points.to_file(tmpfile)
                    if lyrinfo["snap"]:
                        demfile = dstpath / gdb.stem / "DEM.tif"
                        dp = dem.DEMProcessor(wbt, demfile, dstpath / gdb.stem)
                        dp.snap_points(tmpfile, tmpfile)

            outdir = dstpath / gdb.stem
            _gdfs = [
                geopandas.read_file(shp) for shp in Path(td).glob("*.shp")
            ]
            if any([not gdf.empty for gdf in _gdfs]):
                all_points = pandas.concat(
                    _gdfs,
                    ignore_index=True,
                    axis=0,
                )
                outdir.parent.mkdir(exist_ok=True, parents=True)
                for col in ["isin80s", "isin2010", "isin2016"]:
                    subset = (
                        all_points.loc[lambda df: df[col].eq(1)].reset_index(
                            drop=True).rename_axis("usid", axis="index"))
                    outfile = outdir / f"BMP_{col}.shp"
                    if not subset.empty:
                        subset.to_file(outfile)
    return 0
def slope(dem, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    print('Création de la pente...')
    wbt.slope(dem, output)
    print('Terminé')
    print()
def echantillonnage(masque_ext, masque_zone, dossier_output, path_metrique):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    liste_raster = [masque_ext, masque_zone]
    liste_valeur_in = []
    liste_valeur_out = []

    # Pour chaque masque de la liste
    for raster in liste_raster:

        suffixe = ''
        if raster == liste_raster[0]:
            suffixe = 'in'
        elif raster == liste_raster[1]:
            suffixe = 'out'

        # Création du raster d'échantillons aléatoire sur le masque
        random_raster = os.path.join(dossier_output,
                                     'random_values_{}.tif'.format(suffixe))
        wbt.random_sample(raster, random_raster, 2000)

        # Transformation du raster d'échantillon en points
        random_point = os.path.join(dossier_output,
                                    'random_values_{}.shp'.format(suffixe))
        wbt.raster_to_vector_points(random_raster, random_point)

        # Extraction des valeurs de la métrique à la localisation de chaque point. Les valeurs sont stockées dans
        # un nouvel attribut de la couche
        wbt.extract_raster_values_at_points(path_metrique,
                                            random_point,
                                            out_text=False)

        # Extraction et formatage des valeurs dans un dictionnaire. La clé 'in_zone' contient la liste des valeurs
        # à l'intérieur des zones. La clé 'out_zone' contient la liste des valeurs à l'extérieur des zones.
        with fiona.open(
                os.path.join(dossier_output,
                             'random_values_{}.shp'.format(suffixe)),
                'r') as points:
            for feature in points:
                for key, value in feature.items():
                    if key == 'properties':
                        if raster == liste_raster[0]:
                            liste_valeur_in.append(value['VALUE1'])
                        elif raster == liste_raster[1]:
                            liste_valeur_out.append(value['VALUE1'])

    dict_valeurs = {'in_zone': liste_valeur_in, 'out_zone': liste_valeur_out}
    return dict_valeurs
Esempio n. 7
0
def snap(dataurl, out_url, out_url2):
    wbt = whitebox.WhiteboxTools()
    wbt.set_verbose_mode(False)
    wbt.work_dir = out_url
    wbt.snap_pour_points("point.shp",
                         "Flow_acc.tif",
                         "snap_point.shp",
                         snap_dist=0.01)
    wbt.watershed("Flow_dir.tif", "snap_point.shp", "Watershed.tif")
    # wbt.longest_flowpath("DEM_fill.tif","Watershed.tif",'LongestFlowpath.shp')
    # wbt.raster_to_vector_lines("Watershed.tif","Watershed.shp")

    # Convert basin raster file to polygon
    mask = None
    with rasterio.open(os.path.join(out_url, "Watershed.tif")) as src:
        image = src.read(1)  # first band
        results = ({
            'properties': {
                'raster_val': v
            },
            'geometry': s
        } for i, (
            s,
            v) in enumerate(shapes(image, mask=mask, transform=src.transform)))

    geoms = list(results)
    boundary = shapes(geoms[0]['geometry'])
    gpd_polygonized_raster = gpd.GeoDataFrame.from_features(geoms)
    # Filter nodata value
    gpd_polygonized_raster = gpd_polygonized_raster[
        gpd_polygonized_raster['raster_val'] == 1]
    # Convert to geojson
    boundary = gpd_polygonized_raster.to_json()
    gpd_polygonized_raster.to_file(driver='ESRI Shapefile',
                                   filename=os.path.join(
                                       out_url, "basin_boundary.shp"))
    wbt.clip_raster_to_polygon("DEM_out.tif", "basin_boundary.shp",
                               "DEM_watershed.tif")
    wbt.hypsometric_analysis("DEM_watershed.tif", "hypso.html")
    wbt.slope_vs_elevation_plot("DEM_watershed.tif", "Slope_elevation.html")
    wbt.extract_raster_statistics("DEM_out.tif",
                                  "Watershed.tif",
                                  output=None,
                                  stat="total",
                                  out_table="stat.html")
    wbt.raster_histogram("DEM_watershed.tif", "hist.html")
    X, Y = hyspoparser.hypso(os.path.join(out_url, "hypso.html"))
    stat = hyspoparser.stat(os.path.join(out_url, "stat.html"))

    return boundary, X, Y, stat
def fpdems(dem, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Filtrage
    print('Filtrage du mnt...')
    wbt.feature_preserving_smoothing(dem, output)
    print('Terminé')
    print()
def EdgeDensity(dem, output, size, seuil):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Création du Edge Density
    print('Creation du EdgeDensity...')
    wbt.edge_density(dem, output, size, seuil)
    print('Terminé')
    print()
def Downslope_Ind(dem, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Création du Downslope Index
    print('Creation du Downslope Index...')
    wbt.downslope_index(dem, output)
    print('Terminé')
    print()
def AverNormVectAngDev(dem, output, size):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Average Normal Vector Angular Deviation
    print('Creation du Average Normal Vector Angular Deviation ...')
    wbt.average_normal_vector_angular_deviation(dem, output, size)
    print('Terminé')
    print()
def sphericalStdDevNormals(dem, output, size):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Création du Spherical Standard Deviation of Normals
    print('Creation du Spherical Standard Deviation of Normals...')
    wbt.spherical_std_dev_of_normals(dem, output, size)
    print('Terminé')
    print()
def tan_curvature(dem, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Création du tan curvature
    print('Creation du tan curvature...')
    wbt.tangential_curvature(dem, output)
    print('Terminé')
    print()
def CircularVarofAspect(dem, output, size):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Création du CircularVarianceOfAspect
    print('Creation du CircularVarianceOfAspect...')
    wbt.circular_variance_of_aspect(dem, output, size)
    print('Terminé')
    print()
def TWI(slope, sca, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Création du TWI
    print('Création du TWI...')
    wbt.wetness_index(sca, slope, output)
    print('Terminé')
    print()
Esempio n. 16
0
def breachDepression(dem, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création des répertoire de sortie
    head_output = os.path.dirname(output)
    if not os.path.exists(head_output):
        os.makedirs(head_output)

    # Création du MNT corrigé
    print('Creation du MNT corrigé')
    wbt.breach_depressions(dem, output)
    print('Terminé')
    print()
Esempio n. 17
0
def clip(dataurl, out_url, outurl2, min_length):
    if min_length:
        min_length = min_length
    else:
        min_length = 0.01

    wbt = whitebox.WhiteboxTools()
    wbt.work_dir = out_url
    wbt.clip_raster_to_polygon(dataurl, "working_area.shp", "DEM_clipped.tif")
    wbt.breach_depressions("DEM_clipped.tif", "DEM_breach.tif")
    wbt.fill_depressions("DEM_breach.tif", "DEM_fill.tif")
    wbt.flow_accumulation_full_workflow("DEM_fill.tif",
                                        "DEM_out.tif",
                                        "Flow_dir.tif",
                                        "Flow_acc.tif",
                                        log=True)
    wbt.basins("Flow_dir.tif", "Basins.tif")
    wbt.extract_streams("Flow_acc.tif", "streams.tif", threshold=-1)
    wbt.remove_short_streams("Flow_dir.tif",
                             "streams.tif",
                             "streams_del.tif",
                             min_length=min_length)
    wbt.find_main_stem("Flow_dir.tif", "streams_del.tif", "main_stream.tif")
    wbt.raster_streams_to_vector("streams_del.tif", "Flow_dir.tif",
                                 "riverswht.shp")
    wbt.raster_streams_to_vector("main_stream.tif", "Flow_dir.tif",
                                 "main_stream.shp")
    wbt.horton_stream_order("Flow_dir.tif", "streams_del.tif", "Horton.tif")
    wbt.strahler_stream_order("Flow_dir.tif", "streams_del.tif",
                              "Strahler.tif")
    wbt.raster_streams_to_vector("Horton.tif", "Flow_dir.tif", "Horton.shp")
    wbt.raster_streams_to_vector("Strahler.tif", "Flow_dir.tif",
                                 "Strahler.shp")
    # wbt.long_profile("Flow_dir.tif","streams_del.tif","DEM_fill.tif","Profile.html")
    # wbt.longest_flowpath("DEM_fill.tif","Basins.tif","longest_path.shp")
    file = gpd.read_file(os.path.join(out_url, "riverswht.shp"))
    file1 = gpd.read_file(os.path.join(out_url, "Horton.shp"))
    file2 = gpd.read_file(os.path.join(out_url, "Strahler.shp"))
    file.to_file(os.path.join(out_url, "riverswht.geojson"), driver="GeoJSON")
    file1.to_file(os.path.join(out_url, "Horton.geojson"), driver="GeoJSON")
    file2.to_file(os.path.join(out_url, "Strahler.geojson"), driver="GeoJSON")
    riverswht = gpd.read_file(os.path.join(out_url, "riverswht.geojson"))
    Horton = gpd.read_file(os.path.join(out_url, "Horton.geojson"))
    Strahler = gpd.read_file(os.path.join(out_url, "Strahler.geojson"))
    riverswht = file.to_json()
    Horton = file1.to_json()
    Strahler = file2.to_json()
    return riverswht, Horton, Strahler
def breachDepression(input, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création des répertoire de sortie
    head_output = os.path.dirname(output)
    if not os.path.exists(head_output):
        os.makedirs(head_output)

    # Création du MNT corrigé
    print('Creation du MNT corrigé')
    # wbt.breach_depressions_least_cost(dem=input, output=output, dist=size, fill=filling)
    wbt.breach_depressions(dem=input, output=output)
    print('Terminé')
    print()
def SCA(dem, output):

    wbt = whitebox.WhiteboxTools()
    wbt.verbose = False

    # Création du répertoire de sortie
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Création du SCA
    print('Création du SCA...')
    wbt.fd8_flow_accumulation(dem,
                              output,
                              out_type='specific contributing area')
    print('Terminé')
    print()
def relative_TPI(input, output, size):

    wbt = whitebox.WhiteboxTools()
    RUST_BACKTRACE = 1

    # Création des répertoire de sortie
    path = os.path.dirname(input)
    if not os.path.exists(os.path.dirname(output)):
        os.makedirs(os.path.dirname(output))

    # Répertoire de travail
    wbt.set_working_dir(path)
    wbt.verbose = False

    # Création du Relative TPI
    print('Création du TPI relatif...')
    wbt.relative_topographic_position(input, output, size, size)
    print('Terminé')
    print()
Esempio n. 21
0
def csv_points_to_shp(in_csv,
                      out_shp,
                      latitude="latitude",
                      longitude="longitude"):
    """Converts a csv file containing points (latitude, longitude) into a shapefile.

    Args:
        in_csv (str): File path or HTTP URL to the input csv file. For example, https://raw.githubusercontent.com/giswqs/data/main/world/world_cities.csv
        out_shp (str): File path to the output shapefile.
        latitude (str, optional): Column name for the latitude column. Defaults to 'latitude'.
        longitude (str, optional): Column name for the longitude column. Defaults to 'longitude'.

    """
    import whitebox

    if in_csv.startswith("http") and in_csv.endswith(".csv"):
        out_dir = os.path.join(os.path.expanduser("~"), "Downloads")
        out_name = os.path.basename(in_csv)

        if not os.path.exists(out_dir):
            os.makedirs(out_dir)
        download_from_url(in_csv, out_dir=out_dir)
        in_csv = os.path.join(out_dir, out_name)

    wbt = whitebox.WhiteboxTools()
    in_csv = os.path.abspath(in_csv)
    out_shp = os.path.abspath(out_shp)

    if not os.path.exists(in_csv):
        raise Exception("The provided csv file does not exist.")

    with open(in_csv, encoding="utf-8") as csv_file:
        reader = csv.DictReader(csv_file)
        fields = reader.fieldnames
        xfield = fields.index(longitude)
        yfield = fields.index(latitude)

    wbt.csv_points_to_vector(in_csv,
                             out_shp,
                             xfield=xfield,
                             yfield=yfield,
                             epsg=4326)
Esempio n. 22
0
def compile_pourpoints(srcfolder, dstfolder, dry_run=False, overwrite=False):
    wbt = whitebox.WhiteboxTools()
    wbt.work_dir = str(Path(".").resolve())
    wbt.verbose = False

    dstpath = Path(dstfolder)
    pbar = tqdm(list(Path(srcfolder).glob("*.gdb")))
    for gdb in pbar:
        pbar.set_description(gdb.stem)
        with TemporaryDirectory() as td:
            for lyrinfo in LAYERS:
                tmpfile = Path(td) / f"{lyrinfo['name']}.shp"
                points = (
                    geopandas.read_file(gdb, layer=lyrinfo["name"])
                    .pipe(lyrinfo["fxn"])
                    .assign(snapped=lyrinfo["snap"])
                    .reset_index()
                    .drop(columns=["index"])
                )
                if not points.empty:
                    points.to_file(tmpfile)
                    if lyrinfo["snap"]:
                        demfile = dstpath / gdb.stem / "DEM.tif"
                        dp = dem.DEMProcessor(wbt, demfile, dstpath / gdb.stem)
                        dp.snap_points(tmpfile, tmpfile)

            outdir = dstpath / gdb.stem
            if not dry_run and (overwrite or not outfile.exists()):
                all_points = pandas.concat(
                    [geopandas.read_file(shp) for shp in Path(td).glob("*.shp")],
                    ignore_index=True,
                    axis=0,
                )
                outdir.parent.mkdir(exist_ok=True, parents=True)
                for col in ["isin80s", "isin2010", "isin2016"]:
                    subset = all_points.loc[lambda df: df[col].eq(1)]
                    if not subset.empty:
                        subset.to_file(outdir / f"BMP_{col}.shp")
    return 0
Esempio n. 23
0
import os
import re
import shutil
import sys
import ast
import whitebox

wbt = whitebox.WhiteboxTools()


def to_camelcase(name):
    '''
    Convert snake_case name to CamelCase name 
    '''
    return ''.join(x.title() for x in name.split('_'))


def to_label(name):
    '''
    Convert snake_case name to Title case label 
    '''
    return ' '.join(x.title() for x in name.split('_'))


def to_snakecase(name):
    '''
    Convert CamelCase name to snake_case name 
    '''
    s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
    return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
Esempio n. 24
0
def catch(x, y, upload_folder, user_folder, file_name):
    # x, y, upload_folder, user_folder, file_name = read_in()

    # x, y, upload_folder, user_folder, file_name = [638311.1290535209, 4148774.824472582, 'file_uploads',
    #                                                'bbbbb',
    #                                                'dem.tar.gz']

    if file_name == 'srtm_turkey':
        try:
            xy_cor = transform(Proj(init='epsg:3857'), Proj(
                init='epsg:23036'), *zip([float(x), float(y)]))
            x, y = xy_cor[0][0], xy_cor[1][0]
            print(xy_cor)
            print(x, y)
            start = datetime.datetime.now()
            # bas = catchment_routine.CreateCacthment(480189.932, 4100069.151)
            # bas = catchment_routine.CreateCacthment(664421.0251895901, 4124028.181024239)
            bas = catchment_routine.CreateCacthment(x, y)
            bas.process_path = path
            bas.basin_dem = os.path.join(bas.process_path, "DEM_ED50_re.tif")
            # bas.process_path = r  "./file_uploads/srtm"
            upload_path = "./file_uploads"
            bas.init_grid()
            bas.readwindow()
            bas.conditioning()
            # bas.resample()
            bas.calculate_flow_dir()
            bas.calculate_accumlation()
            bas.dem_export(os.path.join(upload_path, user_folder))
            bas.snaptoacc()
            bas.snap_xy()
            ccc = bas.run_for_catchment()
            bas.to_shape(os.path.join(upload_path, user_folder))
            end = datetime.datetime.now() - start

        except BaseException as be:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
            print(exc_type, fname, exc_tb.tb_lineno)
            with open("python_err_log.csv", "a") as c_file:
                c_writer = csv.writer(c_file)
                c_writer.writerow([be.message])
                c_writer.writerow(["error"])
    else:
        file_name = extract_and_retreive(
            os.path.join(path, file_name))

        xnew = x
        ynew = y

        point_geom = Point(float(xnew), float(ynew))
        point = gpd.GeoDataFrame(
            index=[0], crs='epsg:23036', geometry=[point_geom])
        point.to_file(filename=os.path.join(
            path, "point.shp"), driver="ESRI Shapefile")

        wbt = whitebox.WhiteboxTools()
        wbt.set_verbose_mode(False)
        wbt.work_dir = path
        at = path

        wbt.breach_depressions("dem.tif", "DEM_breach.tif")
        wbt.fill_depressions("DEM_breach.tif", "DEM_fill.tif")
        wbt.flow_accumulation_full_workflow(
            "DEM_fill.tif", "DEM_out.tif", "Flow_dir.tif", "Flow_acc.tif", log=False)
        # wbt.basins("Flow_dir.tif", "Basins.tif")
        # wbt.extract_streams("Flow_acc.tif", "streams.tif", threshold=-1)
        # wbt.find_main_stem(
        #     "Flow_dir.tif", "streams.tif", "main_stream.tif")
        # wbt.raster_streams_to_vector(
        #     "streams.tif", "Flow_dir.tif", "riverswht.shp")
        # wbt.raster_streams_to_vector(
        #     "main_stream.tif", "Flow_dir.tif", "main_stream.shp")
        # wbt.horton_stream_order(
        #     "Flow_dir.tif", "streams.tif", "Horton.tif")
        # wbt.strahler_stream_order(
        #     "Flow_dir.tif", "streams.tif", "Strahler.tif")
        # wbt.raster_streams_to_vector(
        #     "Horton.tif", "Flow_dir.tif", "Horton.shp")
        # wbt.raster_streams_to_vector(
        #     "Strahler.tif", "Flow_dir.tif", "Strahler.shp")
        wbt.snap_pour_points("point.shp", "Flow_acc.tif",
                             "snap_point.shp", snap_dist=200)
        wbt.watershed("Flow_dir.tif", "snap_point.shp", "Watershed.tif")
        mask = None
        with rasterio.open(os.path.join(at, "Watershed.tif")) as src:
            image = src.read(1)  # first band
            results = (
                {'properties': {'raster_val': v}, 'geometry': s}
                for i, (s, v)
                in enumerate(
                shp(image, mask=mask, transform=src.transform)))

        geoms = list(results)
        boundary = shp(geoms[0]['geometry'])
        gpd_polygonized_raster = gpd.GeoDataFrame.from_features(geoms)
        # Filter nodata value
        gpd_polygonized_raster = gpd_polygonized_raster[gpd_polygonized_raster['raster_val'] == 1]
        # Convert to geojson

        gpd_polygonized_raster.crs = 'epsg:23036'
        gpd_polygonized_raster.to_file(
            driver='ESRI Shapefile', filename=os.path.join(at, "basin_boundary_23063.shp"))

        gpd_polygonized_raster = gpd_polygonized_raster.to_crs(
            'epsg:4326')  # world.to_crs(epsg=3395) would also work
        gpd_polygonized_raster.to_file(
            driver='ESRI Shapefile', filename=os.path.join(at, "basin_boundary.shp"))

        wbt.clip_raster_to_polygon(
            "DEM_out.tif", "basin_boundary_23063.shp", "DEM_watershed.tif")
        wbt.hypsometric_analysis("DEM_watershed.tif", "hypso.html")
        #wbt.slope_vs_elevation_plot(
        #    "DEM_watershed.tif", "Slope_elevation.html")
        wbt.zonal_statistics(
            "DEM_out.tif", "Watershed.tif", output=None, stat="total", out_table="stat.html")
        #wbt.raster_histogram("DEM_watershed.tif", "hist.html")

        gpd_polygonized_raster["area"] = gpd_polygonized_raster['geometry'].area
        Area = gpd_polygonized_raster['geometry'].area * 10000
        Area = Area.max()
        try:
            Centroid = [gpd_polygonized_raster.centroid.x[1], gpd_polygonized_raster.centroid.y[1]]
        except:
            Centroid = [gpd_polygonized_raster.centroid.x[0], gpd_polygonized_raster.centroid.y[0]]
        boundary = gpd_polygonized_raster.to_json()

        y = json.loads(boundary)
        # data = boundary['features'][0]['geometry']['coordinates']

        # logfile2.write(str(y['features'][0]['geometry']['coordinates']))

        data = y['features'][0]['geometry']['coordinates']

        try:
            if y['features'][1]['geometry']['coordinates'].__str__().__sizeof__() > data.__str__().__sizeof__():
                boundary = Polygon(y['features'][1]['geometry']['coordinates'])
            else:
                boundary = Polygon(data)
        except:
            boundary = Polygon(data)

        X, Y = hy.hypso(os.path.join(at, "hypso.html"))
        stat = hy.stat(os.path.join(at, "stat.html"))
        # logfile = open(
        #     r'D:\Github\model_experiment\NAM\datadir\basin_log33.txt', 'a+')
        # logfile.write(str(stat))
        basin_object = []
        text = "at"
        hypsometry = []
        hypsometry.append(X)
        hypsometry.append(Y)

        df_res = pd.DataFrame()
        df_res['X'] = X
        df_res['Y'] = Y
        j = df_res.to_json(orient='records')
        basin_object.append({"Polygon": json.dumps(boundary),
                             "hypso": j,
                             "stats": json.dumps(stat),
                             "status": 'success',
                             "Area": json.dumps(Area),
                             "Centroid": json.dumps(Centroid)})
        basin_object.append({"Polygon": json.dumps(boundary)})
        basin_object = json.dumps(basin_object)
        return basin_object