コード例 #1
0
def main(files_DEM_dir, files_DEM, files_Basin, files_Runoff, files_Extraction, startdate, enddate, input_nc, resolution, Format_DEM_dir, Format_DEM, Format_Basin, Format_Runoff, Format_Extraction):

    # Define a year to get the epsg and geo
    Startdate_timestamp = pd.Timestamp(startdate)
    year = Startdate_timestamp.year

    ############################## Drainage Direction #####################################

    # Open Array DEM dir as netCDF
    if Format_DEM_dir == "NetCDF":
        file_DEM_dir = os.path.join(files_DEM_dir, "%d.nc" %year)
        DataCube_DEM_dir = RC.Open_nc_array(file_DEM_dir, "Drainage_Direction")
        geo_out_example, epsg_example, size_X_example, size_Y_example, size_Z_example, Time_example = RC.Open_nc_info(files_DEM_dir)

        # Create memory file for reprojection
        gland = DC.Save_as_MEM(DataCube_DEM_dir, geo_out_example, epsg_example)
        dataset_example = file_name_DEM_dir = gland

    # Open Array DEM dir as TIFF
    if Format_DEM_dir == "TIFF":
        file_name_DEM_dir = os.path.join(files_DEM_dir,"DIR_HydroShed_-_%s.tif" %resolution)
        DataCube_DEM_dir = RC.Open_tiff_array(file_name_DEM_dir)
        geo_out_example, epsg_example, size_X_example, size_Y_example = RC.Open_array_info(file_name_DEM_dir)
        dataset_example = file_name_DEM_dir

    # Calculate Area per pixel in m2
    import watools.Functions.Start.Area_converter as AC
    DataCube_Area = AC.Degrees_to_m2(file_name_DEM_dir)

    ################################## DEM ##########################################

    # Open Array DEM as netCDF
    if Format_DEM == "NetCDF":
        file_DEM = os.path.join(files_DEM, "%d.nc" %year)
        DataCube_DEM = RC.Open_nc_array(file_DEM, "Elevation")

    # Open Array DEM as TIFF
    if Format_DEM == "TIFF":
        file_name_DEM = os.path.join(files_DEM,"DEM_HydroShed_m_%s.tif" %resolution)
        destDEM = RC.reproject_dataset_example(file_name_DEM, dataset_example, method=1)
        DataCube_DEM = destDEM.GetRasterBand(1).ReadAsArray()
        
    ################################ Landuse ##########################################

    # Open Array Basin as netCDF
    if Format_Basin == "NetCDF":
        file_Basin = os.path.join(files_Basin, "%d.nc" %year)
        DataCube_Basin = RC.Open_nc_array(file_Basin, "Landuse")
        geo_out, epsg, size_X, size_Y, size_Z, Time = RC.Open_nc_info(file_Basin, "Landuse")
        dest_basin = DC.Save_as_MEM(DataCube_Basin, geo_out, str(epsg))
        destLU = RC.reproject_dataset_example(dest_basin, dataset_example, method=1)
        DataCube_LU_CR = destLU.GetRasterBand(1).ReadAsArray()
        DataCube_Basin = np.zeros([size_Y_example, size_X_example])
        DataCube_Basin[DataCube_LU_CR > 0] = 1

    # Open Array Basin as TIFF
    if Format_Basin == "TIFF":
        file_name_Basin = files_Basin
        destLU = RC.reproject_dataset_example(file_name_Basin, dataset_example, method=1)
        DataCube_LU_CR = destLU.GetRasterBand(1).ReadAsArray()
        DataCube_Basin = np.zeros([size_Y_example, size_X_example])
        DataCube_Basin[DataCube_LU_CR > 0] = 1

    ################################ Surface Runoff ##########################################

    # Open Array runoff as netCDF
    if Format_Runoff == "NetCDF":
        DataCube_Runoff = RC.Open_ncs_array(files_Runoff, "Surface_Runoff", startdate, enddate)
        size_Z_example = DataCube_Runoff.shape[0]
        file_Runoff = os.path.join(files_Runoff, "%d.nc" %year)
        geo_out, epsg, size_X, size_Y, size_Z, Time = RC.Open_nc_info(file_Runoff, "Surface_Runoff")
        DataCube_Runoff_CR = np.ones([size_Z_example, size_Y_example, size_X_example]) * np.nan
        for i in range(0, size_Z):
            DataCube_Runoff_one = DataCube_Runoff[i,:,:]
            dest_Runoff_one = DC.Save_as_MEM(DataCube_Runoff_one, geo_out, str(epsg))
            dest_Runoff = RC.reproject_dataset_example(dest_Runoff_one, dataset_example, method=4)
            DataCube_Runoff_CR[i,:,:] = dest_Runoff.GetRasterBand(1).ReadAsArray()

        DataCube_Runoff_CR[:, DataCube_LU_CR == 0] = -9999
        DataCube_Runoff_CR[DataCube_Runoff_CR < 0] = -9999

    # Open Array runoff as TIFF
    if Format_Runoff == "TIFF":
        DataCube_Runoff_CR = RC.Get3Darray_time_series_monthly(files_Runoff, startdate, enddate, Example_data = dataset_example)

    ################################ Surface Withdrawal ##########################################

    # Open Array Extraction as netCDF
    if Format_Extraction == "NetCDF":
        DataCube_Extraction = RC.Open_ncs_array(files_Extraction, "Surface_Withdrawal", startdate, enddate)
        size_Z_example = DataCube_Extraction.shape[0]
        file_Extraction = os.path.join(files_Extraction, "%d.nc" %year)
        geo_out, epsg, size_X, size_Y, size_Z, Time = RC.Open_nc_info(file_Extraction, "Surface_Withdrawal")
        DataCube_Extraction_CR = np.ones([size_Z_example, size_Y_example, size_X_example]) * np.nan
        for i in range(0, size_Z):
            DataCube_Extraction_one = DataCube_Extraction[i,:,:]
            dest_Extraction_one = DC.Save_as_MEM(DataCube_Extraction_one, geo_out, str(epsg))
            dest_Extraction = RC.reproject_dataset_example(dest_Extraction_one, dataset_example, method=4)
            DataCube_Extraction_CR[i,:,:] = dest_Extraction.GetRasterBand(1).ReadAsArray()

        DataCube_Extraction_CR[:, DataCube_LU_CR == 0] = -9999
        DataCube_Extraction_CR[DataCube_Extraction_CR < 0] = -9999

    # Open Array Extraction as TIFF
    if Format_Extraction == "TIFF":
        DataCube_Extraction_CR = RC.Get3Darray_time_series_monthly(files_Extraction, startdate, enddate, Example_data = dataset_example)

    ################################ Create input netcdf ##########################################
    # Save data in one NetCDF file
    geo_out_example = np.array(geo_out_example)

    # Latitude and longitude
    lon_ls = np.arange(size_X_example)*geo_out_example[1]+geo_out_example[0] + 0.5 * geo_out_example[1]
    lat_ls = np.arange(size_Y_example)*geo_out_example[5]+geo_out_example[3] - 0.5 * geo_out_example[5]

    lat_n = len(lat_ls)
    lon_n = len(lon_ls)

    # Create NetCDF file
    nc_file = netCDF4.Dataset(input_nc, 'w')
    nc_file.set_fill_on()

    # Create dimensions
    lat_dim = nc_file.createDimension('latitude', lat_n)
    lon_dim = nc_file.createDimension('longitude', lon_n)

    # Create NetCDF variables
    crso = nc_file.createVariable('crs', 'i4')
    crso.long_name = 'Lon/Lat Coords in WGS84'
    crso.standard_name = 'crs'
    crso.grid_mapping_name = 'latitude_longitude'
    crso.projection = epsg_example
    crso.longitude_of_prime_meridian = 0.0
    crso.semi_major_axis = 6378137.0
    crso.inverse_flattening = 298.257223563
    crso.geo_reference = geo_out_example

    lat_var = nc_file.createVariable('latitude', 'f8', ('latitude',))
    lat_var.units = 'degrees_north'
    lat_var.standard_name = 'latitude'
    lat_var.pixel_size = geo_out_example[5]

    lon_var = nc_file.createVariable('longitude', 'f8', ('longitude',))
    lon_var.units = 'degrees_east'
    lon_var.standard_name = 'longitude'
    lon_var.pixel_size = geo_out_example[1]

    Dates = pd.date_range(startdate,enddate,freq = 'MS')
    time_or=np.zeros(len(Dates))
    i = 0
    for Date in Dates:
        time_or[i] = Date.toordinal()
        i += 1
    nc_file.createDimension('time', None)
    timeo = nc_file.createVariable('time', 'f4', ('time',))
    timeo.units = 'Monthly'
    timeo.standard_name = 'time'

    # Variables
    demdir_var = nc_file.createVariable('demdir', 'i',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    demdir_var.long_name = 'Flow Direction Map'
    demdir_var.grid_mapping = 'crs'

    dem_var = nc_file.createVariable('dem', 'f8',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    dem_var.long_name = 'Altitude'
    dem_var.units = 'meters'
    dem_var.grid_mapping = 'crs'

    basin_var = nc_file.createVariable('basin', 'i',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    basin_var.long_name = 'Altitude'
    basin_var.units = 'meters'
    basin_var.grid_mapping = 'crs'

    area_var = nc_file.createVariable('area', 'f8',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    area_var.long_name = 'area in squared meters'
    area_var.units = 'squared_meters'
    area_var.grid_mapping = 'crs'

    runoff_var = nc_file.createVariable('Runoff_M', 'f8',
                                   ('time', 'latitude', 'longitude'),
                                   fill_value=-9999)
    runoff_var.long_name = 'Runoff'
    runoff_var.units = 'm3/month'
    runoff_var.grid_mapping = 'crs'

    extraction_var = nc_file.createVariable('Extraction_M', 'f8',
                                    ('time', 'latitude', 'longitude'),
                                    fill_value=-9999)
    extraction_var.long_name = 'Surface water Extraction'
    extraction_var.units = 'm3/month'
    extraction_var.grid_mapping = 'crs'


    # Load data
    lat_var[:] = lat_ls
    lon_var[:] = lon_ls
    timeo[:] = time_or

    # Static variables
    demdir_var[:, :] = DataCube_DEM_dir[:, :]
    dem_var[:, :] = DataCube_DEM[:, :]
    basin_var[:, :] = DataCube_Basin[:, :]
    area_var[:, :] = DataCube_Area[:, :]
    for i in range(len(Dates)):
        runoff_var[i,:,:] = DataCube_Runoff_CR[i,:,:]
    for i in range(len(Dates)):
        extraction_var[i,:,:] = DataCube_Extraction_CR[i,:,:]

    # Close file
    nc_file.close()
    return()
コード例 #2
0
ファイル: DataAccess.py プロジェクト: wateraccounting/watools
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, version,
                 Product):
    """
    This scripts downloads SSEBop ET data from the UNESCO-IHE ftp server.
    The output files display the total ET in mm for a period of one month.
    The name of the file corresponds to the first day of the month.

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    lonlim -- [ymin, ymax] (values must be between -90 and 90)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    if version == "FTP":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -59.2 or latlim[1] > 80:
            print(
                'Latitude above 80N or below -59.2S is not possible. Value set to maximum'
            )
            latlim[0] = np.max(latlim[0], -59.2)
            latlim[1] = np.min(latlim[1], 80)
        if lonlim[0] < -180 or lonlim[1] > 180:
            print(
                'Longitude must be between 180E and 180W. Now value is set to maximum'
            )
            lonlim[0] = np.max(lonlim[0], -180)
            lonlim[1] = np.min(lonlim[1], 180)

    # Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            Enddate = pd.Timestamp('2014-10-31')

    if version == "V4":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -60 or latlim[1] > 80.0022588483988670:
            print(
                'Latitude above 80N or below -59.2S is not possible. Value set to maximum'
            )
            latlim[0] = np.max(latlim[0], -60)
            latlim[1] = np.min(latlim[1], 80.0022588483988670)
        if lonlim[0] < -180 or lonlim[1] > 180.0002930387853439:
            print(
                'Longitude must be between 180E and 180W. Now value is set to maximum'
            )
            lonlim[0] = np.max(lonlim[0], -180)
            lonlim[1] = np.min(lonlim[1], 180.0002930387853439)

    # Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            import datetime
            Enddate = pd.Timestamp(datetime.datetime.now())

# Define directory and create it if not exists
    if Product == "ETact":
        output_folder = os.path.join(Dir, 'Evaporation', 'SSEBop', 'Monthly')
        freq_use = "MS"
    if Product == "ETpot":
        output_folder = os.path.join(Dir, 'Potential_Evapotranspiration',
                                     'FEWS', 'Daily')
        freq_use = "D"

    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Creates dates library
    Dates = pd.date_range(Startdate, Enddate, freq=freq_use)

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    # Loop over the dates
    for Date in Dates:

        # Define year and month
        year = Date.year
        month = Date.month
        day = Date.day

        if version == "FTP":

            # Date as printed in filename
            Filename_out = os.path.join(
                output_folder,
                'ETa_SSEBop_FTP_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

            # Define end filename
            Filename_dir = os.path.join("%s" % year,
                                        "m%s%02d.tif" % (str(year)[2:], month))
            Filename_only = "m%s%02d.tif" % (str(year)[2:], month)

        if version == "V4":

            # Date as printed in filename
            if Product == "ETpot":
                Filename_out = os.path.join(
                    output_folder,
                    'ETpot_FEWS_mm-day-1_daily_%s.%02s.%02s.tif' %
                    (Date.strftime('%Y'), Date.strftime('%m'),
                     Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = 'et%02s%02d%02d.tar.gz' % (str(year)[2:],
                                                               month, day)
                # The end file name after downloading and unzipping
                Filename_only = "et%02s%02d%02d.bil" % (str(year)[2:], month,
                                                        day)
                # Create bin folder
                temp_folder = os.path.join(output_folder, "Temp")
                if not os.path.exists(temp_folder):
                    os.makedirs(temp_folder)
                local_filename = os.path.join(temp_folder, Filename_only)

            if Product == "ETact":
                Filename_out = os.path.join(
                    output_folder,
                    'ETa_SSEBop_V4_mm-month-1_monthly_%s.%02s.%02s.tif' %
                    (Date.strftime('%Y'), Date.strftime('%m'),
                     Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = "m%s%02d.zip" % (str(year), month)
                # The end file name after downloading and unzipping
                Filename_only = "m%s%02d_modisSSEBopETv4_actual_mm.tif" % (
                    str(year), month)

                # Temporary filename for the downloaded global file
                local_filename = os.path.join(output_folder, Filename_only)

        # Download the data from FTP server if the file not exists
        if not os.path.exists(Filename_out):
            try:

                if version == "FTP":
                    Download_SSEBop_from_WA_FTP(local_filename, Filename_dir)
                if version == "V4":
                    if Product == "ETpot":
                        Download_SSEBop_from_Web(temp_folder,
                                                 Filename_only_zip, Product)
                    if Product == "ETact":
                        Download_SSEBop_from_Web(output_folder,
                                                 Filename_only_zip, Product)

                if Product == "ETpot":
                    Array_ETpot = RC.Open_bil_array(local_filename)
                    Array_ETpot = Array_ETpot / 100
                    Geo_out = tuple([-180.5, 1, 0, 90.5, 0, -1])
                    dest = DC.Save_as_MEM(Array_ETpot, Geo_out, "WGS84")
                    data, Geo_out = RC.clip_data(dest, latlim, lonlim)
                    DC.Save_as_tiff(Filename_out, data, Geo_out, "WGS84")

                if Product == "ETact":
                    # Clip dataset
                    RC.Clip_Dataset_GDAL(local_filename, Filename_out, latlim,
                                         lonlim)
                    os.remove(local_filename)

            except:
                print("Was not able to download file with date %s" % Date)

        # Adjust waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)

    if version == "V4":
        import glob
        os.chdir(output_folder)
        if Product == "ETact":
            zipfiles = glob.glob("*.zip")
            for zipfile in zipfiles:
                os.remove(os.path.join(output_folder, zipfile))
            xmlfiles = glob.glob("*.xml")
            for xmlfile in xmlfiles:
                os.remove(os.path.join(output_folder, xmlfile))
        if Product == "ETpot":
            import shutil
            Temp_dir = os.path.join(output_folder, "Temp")
            shutil.rmtree(Temp_dir)

    return
コード例 #3
0
ファイル: Part3_Reservoirs.py プロジェクト: CMicha/watools
def Add_Reservoirs(output_nc, Diff_Water_Volume, Regions):

    import numpy as np

    import watools.General.raster_conversions as RC
    import watools.General.data_conversions as DC

    # Extract data from NetCDF file
    Discharge_dict = RC.Open_nc_dict(output_nc, "dischargedict_dynamic")
    River_dict = RC.Open_nc_dict(output_nc, "riverdict_static")
    DEM_dict = RC.Open_nc_dict(output_nc, "demdict_static")
    Distance_dict = RC.Open_nc_dict(output_nc, "distancedict_static")
    Rivers = RC.Open_nc_array(output_nc, "rivers")
    acc_pixels = RC.Open_nc_array(output_nc, "accpix")

    # Open data array info based on example data
    geo_out, epsg, size_X, size_Y, size_Z, time = RC.Open_nc_info(output_nc)

    # Create ID Matrix
    y, x = np.indices((size_Y, size_X))
    ID_Matrix = np.int32(
        np.ravel_multi_index(np.vstack((y.ravel(), x.ravel())),
                             (size_Y, size_X),
                             mode='clip').reshape(x.shape)) + 1
    del x, y

    Acc_Pixels_Rivers = Rivers * acc_pixels
    ID_Rivers = Rivers * ID_Matrix

    Amount_of_Reservoirs = len(Regions)

    Reservoir_is_in_River = np.ones([len(Regions), 3]) * -9999

    for reservoir in range(0, Amount_of_Reservoirs):

        region = Regions[reservoir, :]

        dest = DC.Save_as_MEM(Acc_Pixels_Rivers, geo_out, projection='WGS84')
        Rivers_Acc_Pixels_reservoir, Geo_out = RC.clip_data(
            dest, latlim=[region[2], region[3]], lonlim=[region[0], region[1]])

        dest = DC.Save_as_MEM(ID_Rivers, geo_out, projection='WGS84')
        Rivers_ID_reservoir, Geo_out = RC.clip_data(
            dest, latlim=[region[2], region[3]], lonlim=[region[0], region[1]])

        size_Y_reservoir, size_X_reservoir = np.shape(
            Rivers_Acc_Pixels_reservoir)
        IDs_Edges = []
        IDs_Edges = np.append(IDs_Edges, Rivers_Acc_Pixels_reservoir[0, :])
        IDs_Edges = np.append(IDs_Edges, Rivers_Acc_Pixels_reservoir[:, 0])
        IDs_Edges = np.append(
            IDs_Edges,
            Rivers_Acc_Pixels_reservoir[int(size_Y_reservoir) - 1, :])
        IDs_Edges = np.append(
            IDs_Edges, Rivers_Acc_Pixels_reservoir[:,
                                                   int(size_X_reservoir) - 1])
        Value_Reservoir = np.max(np.unique(IDs_Edges))

        y_pix_res, x_pix_res = np.argwhere(
            Rivers_Acc_Pixels_reservoir == Value_Reservoir)[0]
        ID_reservoir = Rivers_ID_reservoir[y_pix_res, x_pix_res]

        # Find exact reservoir area in river directory
        for River_part in River_dict.items():
            if len(np.argwhere(River_part[1] == ID_reservoir)) > 0:
                Reservoir_is_in_River[reservoir, 0] = np.argwhere(
                    River_part[1] == ID_reservoir)  #River_part_good
                Reservoir_is_in_River[reservoir,
                                      1] = River_part[0]  #River_Add_Reservoir
                Reservoir_is_in_River[reservoir, 2] = 1  #Reservoir_is_in_River

    numbers = abs(Reservoir_is_in_River[:, 1].argsort() -
                  len(Reservoir_is_in_River) + 1)

    for number in range(0, len(Reservoir_is_in_River)):

        row_reservoir = np.argwhere(numbers == number)[0][0]

        if not Reservoir_is_in_River[row_reservoir, 2] == -9999:

            # Get discharge into the reservoir:
            Flow_in_res_m3 = Discharge_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][:,
                                    int(Reservoir_is_in_River[row_reservoir,
                                                              0])]

            # Get difference reservoir
            Change_Reservoir_m3 = Diff_Water_Volume[row_reservoir, :, 2]

            # Total Change outflow
            Change_outflow_m3 = np.minimum(Flow_in_res_m3, Change_Reservoir_m3)

            Difference = Change_outflow_m3 - Change_Reservoir_m3
            if abs(np.sum(Difference)) > 10000 and np.sum(
                    Change_Reservoir_m3[Change_outflow_m3 > 0]) > 0:
                Change_outflow_m3[Change_outflow_m3 < 0] = Change_outflow_m3[
                    Change_outflow_m3 < 0] * np.sum(
                        Change_outflow_m3[Change_outflow_m3 > 0]) / np.sum(
                            Change_Reservoir_m3[Change_outflow_m3 > 0])

            # Find key name (which is also the lenght of the river dictionary)
            i = len(River_dict)

            #River_with_reservoirs_dict[i]=list((River_dict[River_Add_Reservoir][River_part_good[0][0]:]).flat) < MAAK DIRECTORIES ARRAYS OP DEZE MANIER DAN IS DE ARRAY 1D
            River_dict[i] = River_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            River_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = River_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:int(Reservoir_is_in_River[row_reservoir, 0]) + 1]

            DEM_dict[i] = DEM_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            DEM_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = DEM_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:int(Reservoir_is_in_River[row_reservoir, 0]) + 1]

            Distance_dict[i] = Distance_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            Distance_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = Distance_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:int(Reservoir_is_in_River[row_reservoir, 0]) + 1]

            Discharge_dict[i] = Discharge_dict[int(Reservoir_is_in_River[
                row_reservoir, 1])][:,
                                    int(Reservoir_is_in_River[row_reservoir,
                                                              0]):]
            Discharge_dict[int(
                Reservoir_is_in_River[row_reservoir, 1])] = Discharge_dict[int(
                    Reservoir_is_in_River[
                        row_reservoir,
                        1])][:, :int(Reservoir_is_in_River[row_reservoir, 0]) +
                             1]
            Discharge_dict[int(Reservoir_is_in_River[
                row_reservoir,
                1])][:, 1:int(Reservoir_is_in_River[row_reservoir, 0]) +
                     1] = Discharge_dict[int(
                         Reservoir_is_in_River[row_reservoir, 1]
                     )][:, 1:int(Reservoir_is_in_River[row_reservoir, 0]) +
                        1] - Change_outflow_m3[:, None]
            Next_ID = River_dict[int(Reservoir_is_in_River[row_reservoir,
                                                           1])][0]

            times = 0
            while len(River_dict) > times:
                for River_part in River_dict.items():
                    if River_part[-1][-1] == Next_ID:
                        Next_ID = River_part[-1][0]
                        item = River_part[0]
                        #Always 10 procent of the incoming discharge will pass the dam
                        Change_outflow_m3[:, None] = np.minimum(
                            0.9 * Discharge_dict[item][:, -1:],
                            Change_outflow_m3[:, None])

                        Discharge_dict[item][:, 1:] = Discharge_dict[
                            item][:, 1:] - Change_outflow_m3[:, None]
                        print(item)
                        times = 0
                    times += 1

    return (Discharge_dict, River_dict, DEM_dict, Distance_dict)
コード例 #4
0
def reproject_dataset_example(dataset, dataset_example, method=1):
    """
    A sample function to reproject and resample a GDAL dataset from within
    Python. The user can define the wanted projection and shape by defining an example dataset.

    Keywords arguments:
    dataset -- 'C:/file/to/path/file.tif' or a gdal file (gdal.Open(filename))
        string that defines the input tiff file or gdal file
    dataset_example -- 'C:/file/to/path/file.tif' or a gdal file (gdal.Open(filename))
        string that defines the input tiff file or gdal file
    method -- 1,2,3,4 default = 1
        1 = Nearest Neighbour, 2 = Bilinear, 3 = lanzcos, 4 = average
    """
    # open dataset that must be transformed
    try:
        if os.path.splitext(dataset)[-1] == '.tif':
            g = gdal.Open(dataset)
        else:
            g = dataset
    except:
        g = dataset
    epsg_from = Get_epsg(g)

    #exceptions
    if epsg_from == 9001:
        epsg_from = 5070

    # open dataset that is used for transforming the dataset
    try:
        if os.path.splitext(dataset_example)[-1] == '.tif':
            gland = gdal.Open(dataset_example)
            epsg_to = Get_epsg(gland)
        elif os.path.splitext(dataset_example)[-1] == '.nc':
            import watools.General.data_conversions as DC
            geo_out, epsg_to, size_X, size_Y, size_Z, Time = Open_nc_info(
                dataset_example)
            data = np.zeros([size_Y, size_X])
            gland = DC.Save_as_MEM(data, geo_out, str(epsg_to))
        else:
            gland = dataset_example
            epsg_to = Get_epsg(gland)
    except:
        gland = dataset_example
        epsg_to = Get_epsg(gland)

    # Set the EPSG codes
    osng = osr.SpatialReference()
    osng.ImportFromEPSG(epsg_to)
    wgs84 = osr.SpatialReference()
    wgs84.ImportFromEPSG(epsg_from)

    # Get shape and geo transform from example
    geo_land = gland.GetGeoTransform()
    col = gland.RasterXSize
    rows = gland.RasterYSize

    # Create new raster
    mem_drv = gdal.GetDriverByName('MEM')
    dest1 = mem_drv.Create('', col, rows, 1, gdal.GDT_Float32)
    dest1.SetGeoTransform(geo_land)
    dest1.SetProjection(osng.ExportToWkt())

    # Perform the projection/resampling
    if method is 1:
        gdal.ReprojectImage(g, dest1, wgs84.ExportToWkt(), osng.ExportToWkt(),
                            gdal.GRA_NearestNeighbour)
    if method is 2:
        gdal.ReprojectImage(g, dest1, wgs84.ExportToWkt(), osng.ExportToWkt(),
                            gdal.GRA_Bilinear)
    if method is 3:
        gdal.ReprojectImage(g, dest1, wgs84.ExportToWkt(), osng.ExportToWkt(),
                            gdal.GRA_Lanczos)
    if method is 4:
        gdal.ReprojectImage(g, dest1, wgs84.ExportToWkt(), osng.ExportToWkt(),
                            gdal.GRA_Average)
    return (dest1)
コード例 #5
0
ファイル: Part3_Reservoirs.py プロジェクト: CMicha/watools
def Find_Area_Volume_Relation(region, input_JRC, input_nc):

    # Find relation between V and A

    import numpy as np
    import watools.General.raster_conversions as RC
    import watools.General.data_conversions as DC
    from scipy.optimize import curve_fit
    import matplotlib.pyplot as plt

    def func(x, a, b):
        """
        This function is used for finding relation area and volume

        """
        return (a * x**b)

    def func3(x, a, b, c, d):
        """
        This function is used for finding relation area and volume

        """
        return (a * (x - c)**b + d)

    #Array, Geo_out = RC.clip_data(input_JRC,latlim=[14.528,14.985],lonlim =[35.810,36.005])
    Array, Geo_out = RC.clip_data(
        input_JRC,
        latlim=[region[2], region[3]],
        lonlim=[region[0], region[1]
                ])  # This reservoir was not filled when SRTM was taken
    size_Y = int(np.shape([Array])[-2])
    size_X = int(np.shape([Array])[-1])

    Water_array = np.zeros(np.shape(Array))
    buffer_zone = 4
    Array[Array > 0] = 1
    for i in range(0, size_Y):
        for j in range(0, size_X):
            Water_array[i, j] = np.max(Array[
                np.maximum(0, i -
                           buffer_zone):np.minimum(size_Y, i + buffer_zone +
                                                   1),
                np.maximum(0, j -
                           buffer_zone):np.minimum(size_X, j + buffer_zone +
                                                   1)])
    del Array

    # Open DEM and reproject
    DEM_Array = RC.Open_nc_array(input_nc, "dem")
    Geo_out_dem, proj_dem, size_X_dem, size_Y_dem, size_Z_dem, time = RC.Open_nc_info(
        input_nc)

    # Save Example as memory file
    dest_example = DC.Save_as_MEM(Water_array, Geo_out, projection='WGS84')
    dest_dem = DC.Save_as_MEM(DEM_Array, Geo_out_dem, projection='WGS84')

    # reproject DEM by using example
    dest_out = RC.reproject_dataset_example(dest_dem, dest_example, method=2)
    DEM = dest_out.GetRasterBand(1).ReadAsArray()

    # find DEM water heights
    DEM_water = np.zeros(np.shape(Water_array))
    DEM_water[Water_array != 1] = np.nan
    DEM_water[Water_array == 1.] = DEM[Water_array == 1.]

    # Get array with areas
    import watools.Functions.Start.Area_converter as Area
    dlat, dlon = Area.Calc_dlat_dlon(Geo_out, size_X, size_Y)
    area_in_m2 = dlat * dlon

    # find volume and Area
    min_DEM_water = int(np.round(np.nanmin(DEM_water)))
    max_DEM_water = int(np.round(np.nanmax(DEM_water)))

    Reservoir_characteristics = np.zeros([1, 5])
    i = 0

    for height in range(min_DEM_water + 1, max_DEM_water):
        DEM_water_below_height = np.zeros(np.shape(DEM_water))
        DEM_water[np.isnan(DEM_water)] = 1000000
        DEM_water_below_height[DEM_water < height] = 1
        pixels = np.sum(DEM_water_below_height)

        area = np.sum(DEM_water_below_height * area_in_m2)
        if height == min_DEM_water + 1:
            volume = 0.5 * area
            histogram = pixels
            Reservoir_characteristics[:] = [
                height, pixels, area, volume, histogram
            ]
        else:
            area_previous = Reservoir_characteristics[i, 2]
            volume_previous = Reservoir_characteristics[i, 3]
            volume = volume_previous + 0.5 * (
                area - area_previous) + 1 * area_previous
            histogram_previous = Reservoir_characteristics[i, 1]
            histogram = pixels - histogram_previous
            Reservoir_characteristics_one = [
                height, pixels, area, volume, histogram
            ]
            Reservoir_characteristics = np.append(
                Reservoir_characteristics, Reservoir_characteristics_one)
            i += 1
            Reservoir_characteristics = np.resize(Reservoir_characteristics,
                                                  (i + 1, 5))

    maxi = int(len(Reservoir_characteristics[:, 3]))

    # find minimum value for reservoirs height (DEM is same value if reservoir was already filled whe SRTM was created)
    Historgram = Reservoir_characteristics[:, 4]
    hist_mean = np.mean(Historgram)
    hist_std = np.std(Historgram)

    mini_tresh = hist_std * 5 + hist_mean

    Check_hist = np.zeros([len(Historgram)])
    Check_hist[Historgram > mini_tresh] = Historgram[Historgram > mini_tresh]
    if np.max(Check_hist) != 0.0:
        col = np.argwhere(Historgram == np.max(Check_hist))[0][0]
        mini = col + 1
    else:
        mini = 0

    fitted = 0

    # find starting point reservoirs
    V0 = Reservoir_characteristics[mini, 3]
    A0 = Reservoir_characteristics[mini, 2]

    # Calculate the best maxi reservoir characteristics, based on the normal V = a*x**b relation
    while fitted == 0:
        try:
            if mini == 0:
                popt1, pcov1 = curve_fit(
                    func, Reservoir_characteristics[mini:maxi, 2],
                    Reservoir_characteristics[mini:maxi, 3])
            else:
                popt1, pcov1 = curve_fit(
                    func, Reservoir_characteristics[mini:maxi, 2] - A0,
                    Reservoir_characteristics[mini:maxi, 3] - V0)
            fitted = 1
        except:
            maxi -= 1

        if maxi < mini:
            print('ERROR: was not able to find optimal fit')
            fitted = 1

    # Remove last couple of pixels of maxi
    maxi_end = int(np.round(maxi - 0.2 * (maxi - mini)))

    done = 0
    times = 0

    while done == 0 and times > 20 and maxi_end < mini:
        try:
            if mini == 0:
                popt, pcov = curve_fit(
                    func, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])
            else:
                popt, pcov = curve_fit(
                    func3, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])

        except:
            maxi_end = int(maxi)
            if mini == 0:
                popt, pcov = curve_fit(
                    func, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])
            else:
                popt, pcov = curve_fit(
                    func3, Reservoir_characteristics[mini:maxi_end, 2],
                    Reservoir_characteristics[mini:maxi_end, 3])

        if mini == 0:
            plt.plot(Reservoir_characteristics[mini:maxi_end, 2],
                     Reservoir_characteristics[mini:maxi_end, 3], 'ro')
            t = np.arange(0., np.max(Reservoir_characteristics[:, 2]), 1000)
            plt.plot(t, popt[0] * (t)**popt[1], 'g--')
            plt.axis([
                0,
                np.max(Reservoir_characteristics[mini:maxi_end, 2]), 0,
                np.max(Reservoir_characteristics[mini:maxi_end, 3])
            ])
            plt.show()
            done = 1

        else:
            plt.plot(Reservoir_characteristics[mini:maxi_end, 2],
                     Reservoir_characteristics[mini:maxi_end, 3], 'ro')
            t = np.arange(0., np.max(Reservoir_characteristics[:, 2]), 1000)
            plt.plot(t, popt[0] * (t - popt[2])**popt[1] + popt[3], 'g--')
            plt.axis([
                0,
                np.max(Reservoir_characteristics[mini:maxi_end, 2]), 0,
                np.max(Reservoir_characteristics[mini:maxi_end, 3])
            ])
            plt.show()
            Volume_error = popt[3] / V0 * 100 - 100
            print('error Volume = %s percent' % Volume_error)
            print('error Area = %s percent' % (A0 / popt[2] * 100 - 100))

            if Volume_error < 30 and Volume_error > -30:
                done = 1
            else:
                times += 1
                maxi_end -= 1
                print('Another run is done in order to improve the result')

    if done == 0:
        popt = np.append(popt1, [A0, V0])

    if len(popt) == 2:
        popt = np.append(popt, [0, 0])

    return (popt)