Ejemplo n.º 1
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD9 Reflectance data for a given date from the
    http://e4ftl01.cr.usgs.gov/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, band,
        resolution, hdf_library
    ] = args

    # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(TilesHorizontal, TilesVertical, Date, output_folder, band,
                     resolution, hdf_library)
    except:
        print("Was not able to download the file")

    # Define the output name of the collect data function
    name_collect = os.path.join(output_folder, 'Merged.tif')
    try:
        # Reproject the MODIS product to epsg_to
        epsg_to = '4326'
        name_reprojected = RC.reproject_MODIS(name_collect, epsg_to)

        # Clip the data to the users extend
        data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

        # Save results as Gtiff
        ReffileName = os.path.join(
            output_folder,
            'ReflectanceBand%d_MOD09GQ_-_daily_' % band + Date.strftime('%Y') +
            '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')
        DC.Save_as_tiff(name=ReffileName,
                        data=data,
                        geo=geo,
                        projection='WGS84')

        # remove the side products
        os.remove(os.path.join(output_folder, name_collect))
        os.remove(os.path.join(output_folder, name_reprojected))
    except:
        print('data for %02d-%02d-%d is not available' %
              (Date.day, Date.month, Date.year))

    return True
Ejemplo n.º 2
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD11 LST data for a given date from the
    https://e4ftl01.cr.usgs.gov/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, lonlim, latlim,
        TimeStep, hdf_library
    ] = args

    # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(TilesHorizontal, TilesVertical, Date, output_folder,
                     TimeStep, hdf_library)
    except:
        print("Was not able to download the file")

    # Define the output name of the collect data function
    name_collect = os.path.join(output_folder, 'Merged.tif')

    # Reproject the MODIS product to epsg_to
    #epsg_to ='4326'
    name_reprojected = RC.reproject_modis_wgs84(name_collect, method=2)

    # Clip the data to the users extend
    data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

    # Save results as Gtiff
    if TimeStep == 8:
        LSTfileName = os.path.join(
            output_folder, 'LST_MOD11A2_K_8-daily_' + Date.strftime('%Y') +
            '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')
    if TimeStep == 1:
        LSTfileName = os.path.join(
            output_folder, 'LST_MOD11A1_K_daily_' + Date.strftime('%Y') + '.' +
            Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')

    DC.Save_as_tiff(name=LSTfileName, data=data, geo=geo, projection='WGS84')

    # remove the side products
    os.remove(os.path.join(output_folder, name_collect))
    os.remove(os.path.join(output_folder, name_reprojected))

    return True
Ejemplo n.º 3
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD16 ET data for a given date from the
    ftp://ftp.ntsg.umt.edu/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, latlim, lonlim,
        timestep, hdf_library
    ] = args

    # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(TilesHorizontal, TilesVertical, Date, output_folder,
                     timestep, hdf_library)
    except:
        print("Was not able to download the file")

    # Define the output name of the collect data function
    name_collect = os.path.join(output_folder, 'Merged.tif')

    # Reproject the MODIS product to epsg_to
    epsg_to = '4326'
    name_reprojected = RC.reproject_MODIS(name_collect, epsg_to)

    # Clip the data to the users extend
    data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

    if timestep == 'monthly':
        ETfileName = os.path.join(
            output_folder, 'ET_MOD16A2_mm-month-1_monthly_' +
            Date.strftime('%Y') + '.' + Date.strftime('%m') + '.01.tif')
    elif timestep == '8-daily':
        ETfileName = os.path.join(
            output_folder,
            'ET_MOD16A2_mm-8days-1_8-daily_' + Date.strftime('%Y') + '.' +
            Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')

    DC.Save_as_tiff(name=ETfileName, data=data, geo=geo, projection='WGS84')

    # remove the side products
    os.remove(os.path.join(output_folder, name_collect))
    os.remove(os.path.join(output_folder, name_reprojected))

    return ()
Ejemplo n.º 4
0
def Download_GWF_from_WA_FTP(output_folder, filename_Out, lonlim, latlim):
    """
    This function retrieves GWF data for a given date from the
    ftp.wateraccounting.unesco-ihe.org server.

    Keyword arguments:
    output_folder -- name of the end file with the weekly ALEXI data
    End_filename -- name of the end file
    lonlim -- [ymin, ymax] (values must be between -60 and 70)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    try:
        # Collect account and FTP information
        username, password = WebAccounts.Accounts(Type='FTP_WA')
        ftpserver = "ftp.wateraccounting.unesco-ihe.org"

        # Set the file names and directories
        filename = "Gray_Water_Footprint.tif"
        local_filename = os.path.join(output_folder, filename)

        # Download data from FTP
        ftp = FTP(ftpserver)
        ftp.login(username, password)
        directory = "/WaterAccounting_Guest/Static_WA_Datasets/"
        ftp.cwd(directory)
        lf = open(local_filename, "wb")
        ftp.retrbinary("RETR " + filename, lf.write)
        lf.close()

        # Clip extend out of world data
        dataset, Geo_out = RC.clip_data(local_filename, latlim, lonlim)

        # make geotiff file
        DC.Save_as_tiff(name=filename_Out,
                        data=dataset,
                        geo=Geo_out,
                        projection="WGS84")

        # delete old tif file
        os.remove(local_filename)

    except:
        print("file not exists")

    return
Ejemplo n.º 5
0
def RetrieveData(args):
    """
    This function retrieves JRC data for a given date from the
    http://storage.googleapis.com/global-surface-water/downloads/ server.

    Keyword arguments:
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, Names_to_download, lonlim, latlim] = args

    # Collect the data from the JRC webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(Names_to_download, output_folder)
    except:
        print("Was not able to download the file")

    # Clip the data to the users extend
    if len(Names_to_download) == 1:
        trash_folder = os.path.join(output_folder, "Trash")
        data_in = os.path.join(trash_folder, Names_to_download[0])
        data_end, geo_end = RC.clip_data(data_in, latlim, lonlim)
    else:

        data_end = np.zeros([
            int((latlim[1] - latlim[0]) / 0.00025),
            int((lonlim[1] - lonlim[0]) / 0.00025)
        ])

        for Name_to_merge in Names_to_download:
            trash_folder = os.path.join(output_folder, "Trash")
            data_in = os.path.join(trash_folder, Name_to_merge)
            geo_out, proj, size_X, size_Y = RC.Open_array_info(data_in)
            lat_min_merge = np.maximum(latlim[0],
                                       geo_out[3] + size_Y * geo_out[5])
            lat_max_merge = np.minimum(latlim[1], geo_out[3])
            lon_min_merge = np.maximum(lonlim[0], geo_out[0])
            lon_max_merge = np.minimum(lonlim[1],
                                       geo_out[0] + size_X * geo_out[1])

            lonmerge = [lon_min_merge, lon_max_merge]
            latmerge = [lat_min_merge, lat_max_merge]
            data_one, geo_one = RC.clip_data(data_in, latmerge, lonmerge)

            Ystart = int((geo_one[3] - latlim[1]) / geo_one[5])
            Yend = int(Ystart + np.shape(data_one)[0])
            Xstart = int((geo_one[0] - lonlim[0]) / geo_one[1])
            Xend = int(Xstart + np.shape(data_one)[1])

            data_end[Ystart:Yend, Xstart:Xend] = data_one

        geo_end = tuple([lonlim[0], geo_one[1], 0, latlim[1], 0, geo_one[5]])

    # Save results as Gtiff
    fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif')
    DC.Save_as_tiff(name=fileName_out,
                    data=data_end,
                    geo=geo_end,
                    projection='WGS84')
    shutil.rmtree(trash_folder)
    return True
Ejemplo n.º 6
0
def DownloadData(output_folder, latlim, lonlim, parameter, resolution):
    """
    This function downloads DEM data from HydroSHED

    Keyword arguments:
    output_folder -- directory of the result
	latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    Resample -- 1 = The data will be resampled to 0.001 degree spatial
                    resolution
             -- 0 = The data will have the same pixel size as the data obtained
                    from the internet
    """
    # Define parameter depedent variables
    if parameter == "dir_3s":
        para_name = "DIR"
        unit = "-"
        resolution = '3s'
        parameter = 'dir'

    if parameter == "dem_3s":
        para_name = "DEM"
        unit = "m"
        resolution = '3s'
        parameter = 'dem'

    if parameter == "dir_15s":
        para_name = "DIR"
        unit = "-"
        resolution = '15s'
        parameter = 'dir'

    if parameter == "dem_15s":
        para_name = "DEM"
        unit = "m"
        resolution = '15s'
        parameter = 'dem'

# converts the latlim and lonlim into names of the tiles which must be
# downloaded
    if resolution == '3s':

        name, rangeLon, rangeLat = Find_Document_Names(latlim, lonlim,
                                                       parameter)

        # Memory for the map x and y shape (starts with zero)
        size_X_tot = 0
        size_Y_tot = 0

    if resolution == '15s':
        name = Find_Document_names_15s(latlim, lonlim, parameter, resolution)

    nameResults = []
    # Create a temporary folder for processing
    output_folder_trash = os.path.join(output_folder, "Temp")
    if not os.path.exists(output_folder_trash):
        os.makedirs(output_folder_trash)

    # Download, extract, and converts all the files to tiff files
    for nameFile in name:

        try:
            # Download the data from
            # http://earlywarning.usgs.gov/hydrodata/
            output_file, file_name = Download_Data(nameFile,
                                                   output_folder_trash,
                                                   parameter, para_name,
                                                   resolution)

            # extract zip data
            DC.Extract_Data(output_file, output_folder_trash)

            # Converts the data with a adf extention to a tiff extension.
            # The input is the file name and in which directory the data must be stored
            file_name_tiff = file_name.split('.')[0] + '_trans_temporary.tif'
            file_name_extract = file_name.split('_')[0:3]
            if resolution == '3s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1]

            if resolution == '15s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1] + '_15s'

            input_adf = os.path.join(output_folder_trash, file_name_extract2,
                                     file_name_extract2, 'hdr.adf')
            output_tiff = os.path.join(output_folder_trash, file_name_tiff)

            # convert data from adf to a tiff file
            output_tiff = DC.Convert_adf_to_tiff(input_adf, output_tiff)

            geo_out, proj, size_X, size_Y = RC.Open_array_info(output_tiff)
            if int(size_X) != int(6000) or int(size_Y) != int(6000):
                data = np.ones((6000, 6000)) * -9999

                # Create the latitude bound
                Vfile = str(nameFile)[1:3]
                SignV = str(nameFile)[0]
                SignVer = 1
                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(nameFile)[4:7]
                SignH = str(nameFile)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                Expected_X_min = Bound1
                Expected_Y_max = Bound2 + 5

                Xid_start = int(
                    np.round((geo_out[0] - Expected_X_min) / geo_out[1]))
                Xid_end = int(
                    np.round(
                        ((geo_out[0] + size_X * geo_out[1]) - Expected_X_min) /
                        geo_out[1]))
                Yid_start = int(
                    np.round((Expected_Y_max - geo_out[3]) / (-geo_out[5])))
                Yid_end = int(
                    np.round((Expected_Y_max - (geo_out[3] +
                                                (size_Y * geo_out[5]))) /
                             (-geo_out[5])))

                data[Yid_start:Yid_end,
                     Xid_start:Xid_end] = RC.Open_tiff_array(output_tiff)
                if np.max(data) == 255:
                    data[data == 255] = -9999
                data[data < -9999] = -9999

                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

        except:

            if resolution == '3s':
                # If tile not exist create a replacing zero tile (sea tiles)
                output = nameFile.split('.')[0] + "_trans_temporary.tif"
                output_tiff = os.path.join(output_folder_trash, output)
                file_name = nameFile
                data = np.ones((6000, 6000)) * -9999
                data = data.astype(np.float32)

                # Create the latitude bound
                Vfile = str(file_name)[1:3]
                SignV = str(file_name)[0]
                SignVer = 1
                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(file_name)[4:7]
                SignH = str(file_name)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                # Geospatial data for the tile
                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

            if resolution == '15s':

                print('no 15s data is in dataset')

        if resolution == '3s':

            # clip data
            Data, Geo_data = RC.clip_data(output_tiff, latlim, lonlim)
            size_Y_out = int(np.shape(Data)[0])
            size_X_out = int(np.shape(Data)[1])

            # Total size of the product so far
            size_Y_tot = int(size_Y_tot + size_Y_out)
            size_X_tot = int(size_X_tot + size_X_out)

            if nameFile is name[0]:
                Geo_x_end = Geo_data[0]
                Geo_y_end = Geo_data[3]
            else:
                Geo_x_end = np.min([Geo_x_end, Geo_data[0]])
                Geo_y_end = np.max([Geo_y_end, Geo_data[3]])

            # create name for chunk
            FileNameEnd = "%s_temporary.tif" % (nameFile)
            nameForEnd = os.path.join(output_folder_trash, FileNameEnd)
            nameResults.append(str(nameForEnd))

            # save chunk as tiff file
            DC.Save_as_tiff(name=nameForEnd,
                            data=Data,
                            geo=Geo_data,
                            projection="WGS84")

    if resolution == '3s':
        #size_X_end = int(size_X_tot) #!
        #size_Y_end = int(size_Y_tot) #!

        size_X_end = int(size_X_tot / len(rangeLat)) + 1  #!
        size_Y_end = int(size_Y_tot / len(rangeLon)) + 1  #!

        # Define the georeference of the end matrix
        geo_out = [Geo_x_end, Geo_data[1], 0, Geo_y_end, 0, Geo_data[5]]

        latlim_out = [geo_out[3] + geo_out[5] * size_Y_end, geo_out[3]]
        lonlim_out = [geo_out[0], geo_out[0] + geo_out[1] * size_X_end]

        # merge chunk together resulting in 1 tiff map
        datasetTot = Merge_DEM(latlim_out, lonlim_out, nameResults, size_Y_end,
                               size_X_end)

        datasetTot[datasetTot < -9999] = -9999

    if resolution == '15s':
        output_file_merged = os.path.join(output_folder_trash, 'merged.tif')
        datasetTot, geo_out = Merge_DEM_15s(output_folder_trash,
                                            output_file_merged, latlim, lonlim)

    # name of the end result
    output_DEM_name = "%s_HydroShed_%s_%s.tif" % (para_name, unit, resolution)

    Save_name = os.path.join(output_folder, output_DEM_name)

    # Make geotiff file
    DC.Save_as_tiff(name=Save_name,
                    data=datasetTot,
                    geo=geo_out,
                    projection="WGS84")
    os.chdir(output_folder)

    # Delete the temporary folder
    shutil.rmtree(output_folder_trash)
Ejemplo n.º 7
0
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, version,
                 Product):
    """
    This scripts downloads SSEBop ET data from the UNESCO-IHE ftp server.
    The output files display the total ET in mm for a period of one month.
    The name of the file corresponds to the first day of the month.

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    lonlim -- [ymin, ymax] (values must be between -90 and 90)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    if version == "FTP":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -59.2 or latlim[1] > 80:
            print(
                'Latitude above 80N or below -59.2S is not possible. Value set to maximum'
            )
            latlim[0] = np.max(latlim[0], -59.2)
            latlim[1] = np.min(latlim[1], 80)
        if lonlim[0] < -180 or lonlim[1] > 180:
            print(
                'Longitude must be between 180E and 180W. Now value is set to maximum'
            )
            lonlim[0] = np.max(lonlim[0], -180)
            lonlim[1] = np.min(lonlim[1], 180)

    # Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            Enddate = pd.Timestamp('2014-10-31')

    if version == "V4":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -60 or latlim[1] > 80.0022588483988670:
            print(
                'Latitude above 80N or below -59.2S is not possible. Value set to maximum'
            )
            latlim[0] = np.max(latlim[0], -60)
            latlim[1] = np.min(latlim[1], 80.0022588483988670)
        if lonlim[0] < -180 or lonlim[1] > 180.0002930387853439:
            print(
                'Longitude must be between 180E and 180W. Now value is set to maximum'
            )
            lonlim[0] = np.max(lonlim[0], -180)
            lonlim[1] = np.min(lonlim[1], 180.0002930387853439)

    # Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            import datetime
            Enddate = pd.Timestamp(datetime.datetime.now())

# Define directory and create it if not exists
    if Product == "ETact":
        output_folder = os.path.join(Dir, 'Evaporation', 'SSEBop', 'Monthly')
        freq_use = "MS"
    if Product == "ETpot":
        output_folder = os.path.join(Dir, 'Potential_Evapotranspiration',
                                     'FEWS', 'Daily')
        freq_use = "D"

    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Creates dates library
    Dates = pd.date_range(Startdate, Enddate, freq=freq_use)

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    # Loop over the dates
    for Date in Dates:

        # Define year and month
        year = Date.year
        month = Date.month
        day = Date.day

        if version == "FTP":

            # Date as printed in filename
            Filename_out = os.path.join(
                output_folder,
                'ETa_SSEBop_FTP_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

            # Define end filename
            Filename_dir = os.path.join("%s" % year,
                                        "m%s%02d.tif" % (str(year)[2:], month))
            Filename_only = "m%s%02d.tif" % (str(year)[2:], month)

        if version == "V4":

            # Date as printed in filename
            if Product == "ETpot":
                Filename_out = os.path.join(
                    output_folder,
                    'ETpot_FEWS_mm-day-1_daily_%s.%02s.%02s.tif' %
                    (Date.strftime('%Y'), Date.strftime('%m'),
                     Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = 'et%02s%02d%02d.tar.gz' % (str(year)[2:],
                                                               month, day)
                # The end file name after downloading and unzipping
                Filename_only = "et%02s%02d%02d.bil" % (str(year)[2:], month,
                                                        day)
                # Create bin folder
                temp_folder = os.path.join(output_folder, "Temp")
                if not os.path.exists(temp_folder):
                    os.makedirs(temp_folder)
                local_filename = os.path.join(temp_folder, Filename_only)

            if Product == "ETact":
                Filename_out = os.path.join(
                    output_folder,
                    'ETa_SSEBop_V4_mm-month-1_monthly_%s.%02s.%02s.tif' %
                    (Date.strftime('%Y'), Date.strftime('%m'),
                     Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = "m%s%02d.zip" % (str(year), month)
                # The end file name after downloading and unzipping
                Filename_only = "m%s%02d_modisSSEBopETv4_actual_mm.tif" % (
                    str(year), month)

                # Temporary filename for the downloaded global file
                local_filename = os.path.join(output_folder, Filename_only)

        # Download the data from FTP server if the file not exists
        if not os.path.exists(Filename_out):
            try:

                if version == "FTP":
                    Download_SSEBop_from_WA_FTP(local_filename, Filename_dir)
                if version == "V4":
                    if Product == "ETpot":
                        Download_SSEBop_from_Web(temp_folder,
                                                 Filename_only_zip, Product)
                    if Product == "ETact":
                        Download_SSEBop_from_Web(output_folder,
                                                 Filename_only_zip, Product)

                if Product == "ETpot":
                    Array_ETpot = RC.Open_bil_array(local_filename)
                    Array_ETpot = Array_ETpot / 100
                    Geo_out = tuple([-180.5, 1, 0, 90.5, 0, -1])
                    dest = DC.Save_as_MEM(Array_ETpot, Geo_out, "WGS84")
                    data, Geo_out = RC.clip_data(dest, latlim, lonlim)
                    DC.Save_as_tiff(Filename_out, data, Geo_out, "WGS84")

                if Product == "ETact":
                    # Clip dataset
                    RC.Clip_Dataset_GDAL(local_filename, Filename_out, latlim,
                                         lonlim)
                    os.remove(local_filename)

            except:
                print("Was not able to download file with date %s" % Date)

        # Adjust waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)

    if version == "V4":
        import glob
        os.chdir(output_folder)
        if Product == "ETact":
            zipfiles = glob.glob("*.zip")
            for zipfile in zipfiles:
                os.remove(os.path.join(output_folder, zipfile))
            xmlfiles = glob.glob("*.xml")
            for xmlfile in xmlfiles:
                os.remove(os.path.join(output_folder, xmlfile))
        if Product == "ETpot":
            import shutil
            Temp_dir = os.path.join(output_folder, "Temp")
            shutil.rmtree(Temp_dir)

    return
Ejemplo n.º 8
0
def DownloadData(output_folder, latlim, lonlim, dataset, level=None):
    """
    This function downloads SoilGrids data from SoilGrids.org

    Keyword arguments:
    output_folder -- directory of the result
	 latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    level -- "sl1" .... "sl7"
    dataset -- ground dataset
    """
    # Define parameter depedent variables
    if dataset == "BLDFIE":
        nameEnd = os.path.join(output_folder,
                               'BulkDensity_%s_SoilGrids_kg-m-3.tif' % level)
        level_name = "%s_" % level
    if dataset == "CLYPPT":
        nameEnd = os.path.join(
            output_folder,
            'ClayContentMassFraction_%s_SoilGrids_percentage.tif' % level)
        level_name = "%s_" % level
    if dataset == "ORCDRC":
        nameEnd = os.path.join(
            output_folder,
            'SoilOrganicCarbonContent_%s_SoilGrids_g_kg.tif' % level)
        level_name = "%s_" % level
    if dataset == "OCSTHA":
        nameEnd = os.path.join(
            output_folder,
            'SoilOrganicCarbonStock_%s_SoilGrids_tonnes-ha-1.tif' % level)
        level_name = "%s_" % level
    if dataset == "CRFVOL":
        nameEnd = os.path.join(
            output_folder,
            'CoarseFragmentVolumetric_%s_SoilGrids_percentage.tif' % level)
        level_name = "%s_" % level
    if dataset == "SLTPPT":
        nameEnd = os.path.join(
            output_folder,
            'SiltContentMassFraction_%s_SoilGrids_percentage.tif' % level)
        level_name = "%s_" % level
    if dataset == "SNDPPT":
        nameEnd = os.path.join(
            output_folder,
            'SandContentMassFraction_%s_SoilGrids_percentage.tif' % level)
        level_name = "%s_" % level
    if dataset == "BDRICM":
        nameEnd = os.path.join(output_folder,
                               'DepthToBedrock_SoilGrids_cm.tif')
        level_name = ""
    if dataset == "BDTICM":
        nameEnd = os.path.join(output_folder,
                               'AbsoluteDepthToBedrock_SoilGrids_cm.tif')
        level_name = ""
    if dataset == "BDRLOG":
        nameEnd = os.path.join(
            output_folder,
            'PredictedProbabilityOfOccurrence_SoilGrids_percentage.tif')
        level_name = ""

    if not os.path.exists(nameEnd):

        # Create trash folder
        output_folder_trash = os.path.join(output_folder, "Trash")
        if not os.path.exists(output_folder_trash):
            os.makedirs(output_folder_trash)

        # Download, extract, and converts all the files to tiff files
        try:
            # Download the data from
            output_file = Download_Data(output_folder_trash, level_name,
                                        dataset)

            # Clip the data
            RC.clip_data(output_filename, latlim, lonlim, output_name=nameEnd)

        except:
            print("Was not able to create the wanted dataset")

        try:
            shutil.rmtree(output_folder_trash)
        except:
            print("Was not able to remove the trash bin")

    return ()
Ejemplo n.º 9
0
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Type, Waitbar):
    """
    This scripts downloads ETmonitor ET data from the UNESCO-IHE ftp server.
    The output files display the total ET in mm for a period of one month.
    The name of the file corresponds to the first day of the month.

    Keyword arguments:
	 Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    lonlim -- [ymin, ymax] (values must be between -90 and 90)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """
    # Check the latitude and longitude and otherwise set lat or lon on greatest extent
    if latlim[0] < -90 or latlim[1] > 90:
        print(
            'Latitude above 90N or below 90S is not possible. Value set to maximum'
        )
        latlim[0] = np.max(latlim[0], -90)
        latlim[1] = np.min(latlim[1], 90)
    if lonlim[0] < -180 or lonlim[1] > 180:
        print(
            'Longitude must be between 180E and 180W. Now value is set to maximum'
        )
        lonlim[0] = np.max(lonlim[0], -180)
        lonlim[1] = np.min(lonlim[1], 180)

# Check Startdate and Enddate
    if not Startdate:
        Startdate = pd.Timestamp('2008-01-01')
    if not Enddate:
        Enddate = pd.Timestamp('2012-12-31')

    # Creates dates library
    Dates = pd.date_range(Startdate, Enddate, freq="MS")

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    # Define directory and create it if not exists
    if Type == "act":
        output_folder = os.path.join(Dir, 'Evaporation', 'ETmonitor',
                                     'Monthly')
    if Type == "pot":
        output_folder = os.path.join(Dir, 'ETpot', 'ETmonitor', 'Monthly')
    if Type == "ei":
        output_folder = os.path.join(Dir, 'Ei', 'ETmonitor', 'Monthly')
    if Type == "es":
        output_folder = os.path.join(Dir, 'Es', 'ETmonitor', 'Monthly')
    if Type == "ew":
        output_folder = os.path.join(Dir, 'Ew', 'ETmonitor', 'Monthly')
    if Type == "tr":
        output_folder = os.path.join(Dir, 'Transpiration', 'ETmonitor',
                                     'Monthly')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    for Date in Dates:

        # Define year and month
        year = Date.year
        month = Date.month

        # Define end filename and Date as printed in filename
        if Type == "act":
            Filename_in = "ET_ETmonitor_mm-month_%d_%02d_01.tif" % (year,
                                                                    month)
            Filename_out = os.path.join(
                output_folder,
                'ETa_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

        if Type == "pot":
            Filename_in = "ETpot_ETmonitor_mm-month_%d_%02d_01.tif" % (year,
                                                                       month)
            Filename_out = os.path.join(
                output_folder,
                'ETpot_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

        if Type == "ei":
            Filename_in = "Ei_ETmonitor_mm-month_%d_%02d_01.tif" % (year,
                                                                    month)
            Filename_out = os.path.join(
                output_folder,
                'Ei_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

        if Type == "es":
            Filename_in = "Es_ETmonitor_mm-month_%d_%02d_01.tif" % (year,
                                                                    month)
            Filename_out = os.path.join(
                output_folder,
                'Es_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

        if Type == "ew":
            Filename_in = "Ew_ETmonitor_mm-month_%d_%02d_01.tif" % (year,
                                                                    month)
            Filename_out = os.path.join(
                output_folder,
                'Ew_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

        if Type == "tr":
            Filename_in = "Tr_ETmonitor_mm-month_%d_%02d_01.tif" % (year,
                                                                    month)
            Filename_out = os.path.join(
                output_folder,
                'Tr_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

# Temporary filename for the downloaded global file
        local_filename = os.path.join(output_folder, Filename_in)

        # Download the data from FTP server if the file not exists
        if not os.path.exists(Filename_out):
            try:
                Download_ETmonitor_from_WA_FTP(local_filename, Filename_in,
                                               Type)

                # Reproject dataset
                epsg_to = '4326'
                name_reprojected_ETmonitor = RC.reproject_modis_wgs84(
                    local_filename)

                # Clip dataset
                RC.clip_data(name_reprojected_ETmonitor,
                             latlim,
                             lonlim,
                             output_name=Filename_out)
                os.remove(name_reprojected_ETmonitor)
                os.remove(local_filename)

            except:
                print("Was not able to download file with date %s" % Date)

        # Adjust waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)

    return
Ejemplo n.º 10
0
def Add_Reservoirs(output_nc, Diff_Water_Volume, Regions):

    import numpy as np

    import watools.General.raster_conversions as RC
    import watools.General.data_conversions as DC

    # Extract data from NetCDF file
    Discharge_dict = RC.Open_nc_dict(output_nc, "dischargedict_dynamic")
    River_dict = RC.Open_nc_dict(output_nc, "riverdict_static")
    DEM_dict = RC.Open_nc_dict(output_nc, "demdict_static")
    Distance_dict = RC.Open_nc_dict(output_nc, "distancedict_static")
    Rivers = RC.Open_nc_array(output_nc, "rivers")
    acc_pixels = RC.Open_nc_array(output_nc, "accpix")

    # Open data array info based on example data
    geo_out, epsg, size_X, size_Y, size_Z, time = RC.Open_nc_info(output_nc)

    # Create ID Matrix
    y, x = np.indices((size_Y, size_X))
    ID_Matrix = np.int32(
        np.ravel_multi_index(np.vstack((y.ravel(), x.ravel())),
                             (size_Y, size_X),
                             mode='clip').reshape(x.shape)) + 1
    del x, y

    Acc_Pixels_Rivers = Rivers * acc_pixels
    ID_Rivers = Rivers * ID_Matrix

    Amount_of_Reservoirs = len(Regions)

    Reservoir_is_in_River = np.ones([len(Regions), 3]) * -9999

    for reservoir in range(0, Amount_of_Reservoirs):

        region = Regions[reservoir, :]

        dest = DC.Save_as_MEM(Acc_Pixels_Rivers, geo_out, projection='WGS84')
        Rivers_Acc_Pixels_reservoir, Geo_out = RC.clip_data(
            dest, latlim=[region[2], region[3]], lonlim=[region[0], region[1]])

        dest = DC.Save_as_MEM(ID_Rivers, geo_out, projection='WGS84')
        Rivers_ID_reservoir, Geo_out = RC.clip_data(
            dest, latlim=[region[2], region[3]], lonlim=[region[0], region[1]])

        size_Y_reservoir, size_X_reservoir = np.shape(
            Rivers_Acc_Pixels_reservoir)
        IDs_Edges = []
        IDs_Edges = np.append(IDs_Edges, Rivers_Acc_Pixels_reservoir[0, :])
        IDs_Edges = np.append(IDs_Edges, Rivers_Acc_Pixels_reservoir[:, 0])
        IDs_Edges = np.append(
            IDs_Edges,
            Rivers_Acc_Pixels_reservoir[int(size_Y_reservoir) - 1, :])
        IDs_Edges = np.append(
            IDs_Edges, Rivers_Acc_Pixels_reservoir[:,
                                                   int(size_X_reservoir) - 1])
        Value_Reservoir = np.max(np.unique(IDs_Edges))

        y_pix_res, x_pix_res = np.argwhere(
            Rivers_Acc_Pixels_reservoir == Value_Reservoir)[0]
        ID_reservoir = Rivers_ID_reservoir[y_pix_res, x_pix_res]

        # Find exact reservoir area in river directory
        for River_part in River_dict.items():
            if len(np.argwhere(River_part[1] == ID_reservoir)) > 0:
                Reservoir_is_in_River[reservoir, 0] = np.argwhere(
                    River_part[1] == ID_reservoir)  #River_part_good
                Reservoir_is_in_River[reservoir,
                                      1] = River_part[0]  #River_Add_Reservoir
                Reservoir_is_in_River[reservoir, 2] = 1  #Reservoir_is_in_River

    numbers = abs(Reservoir_is_in_River[:, 1].argsort() -
                  len(Reservoir_is_in_River) + 1)

    for number in range(0, len(Reservoir_is_in_River)):

        row_reservoir = np.argwhere(numbers == number)[0][0]

        if not Reservoir_is_in_River[row_reservoir, 2] == -9999:

            # Get discharge into the reservoir:
            Flow_in_res_m3 = Discharge_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][:,
                                    int(Reservoir_is_in_River[row_reservoir,
                                                              0])]

            # Get difference reservoir
            Change_Reservoir_m3 = Diff_Water_Volume[row_reservoir, :, 2]

            # Total Change outflow
            Change_outflow_m3 = np.minimum(Flow_in_res_m3, Change_Reservoir_m3)

            Difference = Change_outflow_m3 - Change_Reservoir_m3
            if abs(np.sum(Difference)) > 10000 and np.sum(
                    Change_Reservoir_m3[Change_outflow_m3 > 0]) > 0:
                Change_outflow_m3[Change_outflow_m3 < 0] = Change_outflow_m3[
                    Change_outflow_m3 < 0] * np.sum(
                        Change_outflow_m3[Change_outflow_m3 > 0]) / np.sum(
                            Change_Reservoir_m3[Change_outflow_m3 > 0])

            # Find key name (which is also the lenght of the river dictionary)
            i = len(River_dict)

            #River_with_reservoirs_dict[i]=list((River_dict[River_Add_Reservoir][River_part_good[0][0]:]).flat) < MAAK DIRECTORIES ARRAYS OP DEZE MANIER DAN IS DE ARRAY 1D
            River_dict[i] = River_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            River_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = River_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:int(Reservoir_is_in_River[row_reservoir, 0]) + 1]

            DEM_dict[i] = DEM_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            DEM_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = DEM_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:int(Reservoir_is_in_River[row_reservoir, 0]) + 1]

            Distance_dict[i] = Distance_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            Distance_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = Distance_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:int(Reservoir_is_in_River[row_reservoir, 0]) + 1]

            Discharge_dict[i] = Discharge_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][:,
                                    int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            Discharge_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = Discharge_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:, :int(Reservoir_is_in_River[row_reservoir, 0]) +
                             1]
            Discharge_dict[int(Reservoir_is_in_River[
                row_reservoir,
                1])][:, 1:int(Reservoir_is_in_River[row_reservoir, 0]) +
                     1] = Discharge_dict[int(
                         Reservoir_is_in_River[row_reservoir, 1]
                     )][:, 1:int(Reservoir_is_in_River[row_reservoir, 0]) +
                        1] - Change_outflow_m3[:, None]
            Next_ID = River_dict[int(Reservoir_is_in_River[row_reservoir,
                                                           1])][0]

            times = 0
            while len(River_dict) > times:
                for River_part in River_dict.items():
                    if River_part[-1][-1] == Next_ID:
                        Next_ID = River_part[-1][0]
                        item = River_part[0]
                        #Always 10 procent of the incoming discharge will pass the dam
                        Change_outflow_m3[:, None] = np.minimum(
                            0.9 * Discharge_dict[item][:, -1:],
                            Change_outflow_m3[:, None])

                        Discharge_dict[item][:, 1:] = Discharge_dict[
                            item][:, 1:] - Change_outflow_m3[:, None]
                        print(item)
                        times = 0
                    times += 1

    return (Discharge_dict, River_dict, DEM_dict, Distance_dict)
Ejemplo n.º 11
0
def Find_Area_Volume_Relation(region, input_JRC, input_nc):

    # Find relation between V and A

    import numpy as np
    import watools.General.raster_conversions as RC
    import watools.General.data_conversions as DC
    from scipy.optimize import curve_fit
    import matplotlib.pyplot as plt

    def func(x, a, b):
        """
        This function is used for finding relation area and volume

        """
        return (a * x**b)

    def func3(x, a, b, c, d):
        """
        This function is used for finding relation area and volume

        """
        return (a * (x - c)**b + d)

    #Array, Geo_out = RC.clip_data(input_JRC,latlim=[14.528,14.985],lonlim =[35.810,36.005])
    Array, Geo_out = RC.clip_data(
        input_JRC,
        latlim=[region[2], region[3]],
        lonlim=[region[0], region[1]
                ])  # This reservoir was not filled when SRTM was taken
    size_Y = int(np.shape([Array])[-2])
    size_X = int(np.shape([Array])[-1])

    Water_array = np.zeros(np.shape(Array))
    buffer_zone = 4
    Array[Array > 0] = 1
    for i in range(0, size_Y):
        for j in range(0, size_X):
            Water_array[i, j] = np.max(Array[
                np.maximum(0, i -
                           buffer_zone):np.minimum(size_Y, i + buffer_zone +
                                                   1),
                np.maximum(0, j -
                           buffer_zone):np.minimum(size_X, j + buffer_zone +
                                                   1)])
    del Array

    # Open DEM and reproject
    DEM_Array = RC.Open_nc_array(input_nc, "dem")
    Geo_out_dem, proj_dem, size_X_dem, size_Y_dem, size_Z_dem, time = RC.Open_nc_info(
        input_nc)

    # Save Example as memory file
    dest_example = DC.Save_as_MEM(Water_array, Geo_out, projection='WGS84')
    dest_dem = DC.Save_as_MEM(DEM_Array, Geo_out_dem, projection='WGS84')

    # reproject DEM by using example
    dest_out = RC.reproject_dataset_example(dest_dem, dest_example, method=2)
    DEM = dest_out.GetRasterBand(1).ReadAsArray()

    # find DEM water heights
    DEM_water = np.zeros(np.shape(Water_array))
    DEM_water[Water_array != 1] = np.nan
    DEM_water[Water_array == 1.] = DEM[Water_array == 1.]

    # Get array with areas
    import watools.Functions.Start.Area_converter as Area
    dlat, dlon = Area.Calc_dlat_dlon(Geo_out, size_X, size_Y)
    area_in_m2 = dlat * dlon

    # find volume and Area
    min_DEM_water = int(np.round(np.nanmin(DEM_water)))
    max_DEM_water = int(np.round(np.nanmax(DEM_water)))

    Reservoir_characteristics = np.zeros([1, 5])
    i = 0

    for height in range(min_DEM_water + 1, max_DEM_water):
        DEM_water_below_height = np.zeros(np.shape(DEM_water))
        DEM_water[np.isnan(DEM_water)] = 1000000
        DEM_water_below_height[DEM_water < height] = 1
        pixels = np.sum(DEM_water_below_height)

        area = np.sum(DEM_water_below_height * area_in_m2)
        if height == min_DEM_water + 1:
            volume = 0.5 * area
            histogram = pixels
            Reservoir_characteristics[:] = [
                height, pixels, area, volume, histogram
            ]
        else:
            area_previous = Reservoir_characteristics[i, 2]
            volume_previous = Reservoir_characteristics[i, 3]
            volume = volume_previous + 0.5 * (
                area - area_previous) + 1 * area_previous
            histogram_previous = Reservoir_characteristics[i, 1]
            histogram = pixels - histogram_previous
            Reservoir_characteristics_one = [
                height, pixels, area, volume, histogram
            ]
            Reservoir_characteristics = np.append(
                Reservoir_characteristics, Reservoir_characteristics_one)
            i += 1
            Reservoir_characteristics = np.resize(Reservoir_characteristics,
                                                  (i + 1, 5))

    maxi = int(len(Reservoir_characteristics[:, 3]))

    # find minimum value for reservoirs height (DEM is same value if reservoir was already filled whe SRTM was created)
    Historgram = Reservoir_characteristics[:, 4]
    hist_mean = np.mean(Historgram)
    hist_std = np.std(Historgram)

    mini_tresh = hist_std * 5 + hist_mean

    Check_hist = np.zeros([len(Historgram)])
    Check_hist[Historgram > mini_tresh] = Historgram[Historgram > mini_tresh]
    if np.max(Check_hist) != 0.0:
        col = np.argwhere(Historgram == np.max(Check_hist))[0][0]
        mini = col + 1
    else:
        mini = 0

    fitted = 0

    # find starting point reservoirs
    V0 = Reservoir_characteristics[mini, 3]
    A0 = Reservoir_characteristics[mini, 2]

    # Calculate the best maxi reservoir characteristics, based on the normal V = a*x**b relation
    while fitted == 0:
        try:
            if mini == 0:
                popt1, pcov1 = curve_fit(
                    func, Reservoir_characteristics[mini:maxi, 2],
                    Reservoir_characteristics[mini:maxi, 3])
            else:
                popt1, pcov1 = curve_fit(
                    func, Reservoir_characteristics[mini:maxi, 2] - A0,
                    Reservoir_characteristics[mini:maxi, 3] - V0)
            fitted = 1
        except:
            maxi -= 1

        if maxi < mini:
            print('ERROR: was not able to find optimal fit')
            fitted = 1

    # Remove last couple of pixels of maxi
    maxi_end = int(np.round(maxi - 0.2 * (maxi - mini)))

    done = 0
    times = 0

    while done == 0 and times > 20 and maxi_end < mini:
        try:
            if mini == 0:
                popt, pcov = curve_fit(
                    func, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])
            else:
                popt, pcov = curve_fit(
                    func3, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])

        except:
            maxi_end = int(maxi)
            if mini == 0:
                popt, pcov = curve_fit(
                    func, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])
            else:
                popt, pcov = curve_fit(
                    func3, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])

        if mini == 0:
            plt.plot(Reservoir_characteristics[mini:maxi_end, 2],
                     Reservoir_characteristics[mini:maxi_end, 3], 'ro')
            t = np.arange(0., np.max(Reservoir_characteristics[:, 2]), 1000)
            plt.plot(t, popt[0] * (t)**popt[1], 'g--')
            plt.axis([
                0,
                np.max(Reservoir_characteristics[mini:maxi_end, 2]), 0,
                np.max(Reservoir_characteristics[mini:maxi_end, 3])
            ])
            plt.show()
            done = 1

        else:
            plt.plot(Reservoir_characteristics[mini:maxi_end, 2],
                     Reservoir_characteristics[mini:maxi_end, 3], 'ro')
            t = np.arange(0., np.max(Reservoir_characteristics[:, 2]), 1000)
            plt.plot(t, popt[0] * (t - popt[2])**popt[1] + popt[3], 'g--')
            plt.axis([
                0,
                np.max(Reservoir_characteristics[mini:maxi_end, 2]), 0,
                np.max(Reservoir_characteristics[mini:maxi_end, 3])
            ])
            plt.show()
            Volume_error = popt[3] / V0 * 100 - 100
            print('error Volume = %s percent' % Volume_error)
            print('error Area = %s percent' % (A0 / popt[2] * 100 - 100))

            if Volume_error < 30 and Volume_error > -30:
                done = 1
            else:
                times += 1
                maxi_end -= 1
                print('Another run is done in order to improve the result')

    if done == 0:
        popt = np.append(popt1, [A0, V0])

    if len(popt) == 2:
        popt = np.append(popt, [0, 0])

    return (popt)