예제 #1
0
def Convert_dict_to_array(River_dict, Array_dict, Reference_data):

    import numpy as np
    import os
    import watools.General.raster_conversions as RC

    if os.path.splitext(Reference_data)[-1] == '.nc':
        # Get raster information
        geo_out, proj, size_X, size_Y, size_Z, Time = RC.Open_nc_info(
            Reference_data)
    else:
        # Get raster information
        geo_out, proj, size_X, size_Y = RC.Open_array_info(Reference_data)

    # Create ID Matrix
    y, x = np.indices((size_Y, size_X))
    ID_Matrix = np.int32(
        np.ravel_multi_index(np.vstack((y.ravel(), x.ravel())),
                             (size_Y, size_X),
                             mode='clip').reshape(x.shape)) + 1

    # Get tiff array time dimension:
    time_dimension = int(np.shape(Array_dict[0])[0])

    # create an empty array
    DataCube = np.ones([time_dimension, size_Y, size_X]) * np.nan

    for river_part in range(0, len(River_dict)):
        for river_pixel in range(1, len(River_dict[river_part])):
            river_pixel_ID = River_dict[river_part][river_pixel]
            if len(np.argwhere(ID_Matrix == river_pixel_ID)) > 0:
                row, col = np.argwhere(ID_Matrix == river_pixel_ID)[0][:]
                DataCube[:, row, col] = Array_dict[river_part][:, river_pixel]

    return (DataCube)
예제 #2
0
def Run(input_nc, output_nc, input_JRC):

    # Define names
    #Name_py_Discharge_dict_CR2 = os.path.join(Dir_Basin, 'Simulations', 'Simulation_%d' %Simulation, 'Sheet_5', 'Discharge_dict_CR2_simulation%d.npy' %(Simulation))
    #Name_py_River_dict_CR2 = os.path.join(Dir_Basin, 'Simulations', 'Simulation_%d' %Simulation, 'Sheet_5', 'River_dict_CR2_simulation%d.npy' %(Simulation))
    #Name_py_DEM_dict_CR2 = os.path.join(Dir_Basin, 'Simulations', 'Simulation_%d' %Simulation, 'Sheet_5', 'DEM_dict_CR2_simulation%d.npy' %(Simulation))
    #Name_py_Distance_dict_CR2 = os.path.join(Dir_Basin, 'Simulations', 'Simulation_%d' %Simulation, 'Sheet_5', 'Distance_dict_CR2_simulation%d.npy' %(Simulation))

    #if not (os.path.exists(Name_py_Discharge_dict_CR2) and os.path.exists(Name_py_River_dict_CR2) and os.path.exists(Name_py_DEM_dict_CR2) and os.path.exists(Name_py_Distance_dict_CR2)):
    # Copy dicts as starting adding reservoir
    import watools.General.raster_conversions as RC
    import numpy as np
    from datetime import date

    Discharge_dict_CR2 = RC.Open_nc_dict(output_nc, "dischargedict_dynamic")

    DEM_dataset = RC.Open_nc_array(input_nc, "dem")
    time = RC.Open_nc_array(output_nc, "time")

    Startdate = date.fromordinal(time[0])
    Enddate = date.fromordinal(time[-1])

    # Define names for reservoirs calculations
    #Name_py_Diff_Water_Volume =  os.path.join(Dir_Basin,'Simulations','Simulation_%d' %Simulation, 'Sheet_5','Diff_Water_Volume_CR2_simulation%d.npy' %(Simulation))
    #Name_py_Regions =  os.path.join(Dir_Basin,'Simulations','Simulation_%d' %Simulation, 'Sheet_5','Regions_simulation%d.npy' %(Simulation))

    geo_out, proj, size_X, size_Y = RC.Open_array_info(input_JRC)

    Boundaries = dict()
    Boundaries['Lonmin'] = geo_out[0]
    Boundaries['Lonmax'] = geo_out[0] + size_X * geo_out[1]
    Boundaries['Latmin'] = geo_out[3] + size_Y * geo_out[5]
    Boundaries['Latmax'] = geo_out[3]

    Regions = Calc_Regions(input_nc, output_nc, input_JRC, Boundaries)

    Amount_months = len(Discharge_dict_CR2[0])

    Diff_Water_Volume = np.zeros([len(Regions), Amount_months, 3])
    reservoir = 0

    for region in Regions:

        popt = Find_Area_Volume_Relation(region, input_JRC, input_nc)

        Area_Reservoir_Values = GEE_calc_reservoir_area(
            region, Startdate, Enddate)

        Diff_Water_Volume[reservoir, :, :] = Calc_Diff_Storage(
            Area_Reservoir_Values, popt)
        reservoir += 1

    ################# 7.3 Add storage reservoirs and change outflows ##################
    Discharge_dict_CR2, River_dict_CR2, DEM_dict_CR2, Distance_dict_CR2 = Add_Reservoirs(
        output_nc, Diff_Water_Volume, Regions)
    return (Discharge_dict_CR2, River_dict_CR2, DEM_dict_CR2,
            Distance_dict_CR2)
예제 #3
0
파일: Theta_FC.py 프로젝트: CMicha/watools
def Calc_Property(Dir, latlim, lonlim, SL):	

    import watools
    
    # Define level
    if SL == "sl3":
        level = "Topsoil"
    elif SL == "sl6":
        level = "Subsoil" 
    
    # check if you need to download
    filename_out_thetasat = os.path.join(Dir, 'SoilGrids', 'Theta_Sat' ,'Theta_Sat2_%s_SoilGrids_kg-kg.tif' %level)
    if not os.path.exists(filename_out_thetasat):
       if SL == "sl3":
           watools.Products.SoilGrids.Theta_Sat2.Topsoil(Dir, latlim, lonlim)
       elif SL == "sl6":
           watools.Products.SoilGrids.Theta_Sat2.Subsoil(Dir, latlim, lonlim)

    filedir_out_thetafc = os.path.join(Dir, 'SoilGrids', 'Theta_FC')
    if not os.path.exists(filedir_out_thetafc):
        os.makedirs(filedir_out_thetafc)   
              
    # Define theta field capacity output
    filename_out_thetafc = os.path.join(filedir_out_thetafc, 'Theta_FC2_%s_SoilGrids_cm3-cm3.tif' %level)

    if not os.path.exists(filename_out_thetafc):
            
        # Get info layer
        geo_out, proj, size_X, size_Y = RC.Open_array_info(filename_out_thetasat)
        
        # Open dataset
        theta_sat = RC.Open_tiff_array(filename_out_thetasat)
        
        # Calculate theta field capacity
        theta_FC = np.ones(theta_sat.shape) * -9999   
        theta_FC = np.where(theta_sat < 0.301, 0.042, np.arccosh(theta_sat + 0.7) - 0.32 * (theta_sat + 0.7) + 0.2)        
               
        # Save as tiff
        DC.Save_as_tiff(filename_out_thetafc, theta_FC, geo_out, proj)

    return           
           
           
           
           
           
           
           
           
           
           
           
           
           
           
           
def fuel_wood(output_folder, lu_fh, AREA, ndm_fhs, fraction_fhs, ndmdates):
    """
    Calculate natural livestock feed production

    INPUTS
    ----------
    lu_fh : str
        filehandle for land use map
    ndm_fhs: nd array
        array of filehandles of NDM maps
    abv_grnd_biomass_ratio: dict
        dictionnary 'LULC':[above ground biomass]
    """
    Data_Path_Fuel = "Fuel"
    out_folder = os.path.join(output_folder, Data_Path_Fuel)
    if not os.path.exists(out_folder):
        os.mkdir(out_folder)

    area_ha = AREA * 100
    LULC = RC.Open_tiff_array(lu_fh)
    geo_out, proj, size_X, size_Y = RC.Open_array_info(lu_fh)

    fuel_classes = [1, 8, 9, 10, 11, 12, 13]
    fuel_mask = np.zeros(LULC.shape)
    for fc in fuel_classes:
        fuel_mask[np.where(LULC == fc)] = 1

    fuel_fhs_landscape = []
    fuel_fhs_incremental = []

    for d in range(len(ndm_fhs)):
        ndm_fh = ndm_fhs[d]
        fraction_fh = fraction_fhs[d]
        yield_fract = RC.Open_tiff_array(fraction_fh)
        date1 = ndmdates[d]
        year = '%d' % date1.year
        month = '%02d' % date1.month
        #        year = ndm_fh[-14:-10]
        #        month = ndm_fh[-9:-7]
        out_fh_l = out_folder + '\\fuel_prod_landscape_%s_%s.tif' % (year,
                                                                     month)
        out_fh_i = out_folder + '\\fuel_prod_incremental_%s_%s.tif' % (year,
                                                                       month)
        NDM = becgis.open_as_array(ndm_fh, nan_values=True)

        NDM_fuel_incremental = NDM * .05 * fuel_mask * yield_fract * area_ha / 1e6
        NDM_fuel_landscape = NDM * .05 * fuel_mask * (
            1 - yield_fract) * area_ha / 1e6
        DC.Save_as_tiff(out_fh_i, NDM_fuel_incremental, geo_out)
        DC.Save_as_tiff(out_fh_l, NDM_fuel_landscape, geo_out)
        fuel_fhs_landscape.append(out_fh_l)
        fuel_fhs_incremental.append(out_fh_i)

    return fuel_fhs_landscape, fuel_fhs_incremental
예제 #5
0
def monthly_to_yearly(in_files):

    #month_range = pd.date_range(start= state_date, end= end_date, freq= 'MS').strftime("%Y.%m").tolist()
    #print(month_range)

    month_list = [
        '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'
    ]
    month_word = [
        'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct',
        'Nov', 'Dec'
    ]

    files = glob.glob(in_files)
    #print(files)

    # Get array information and define projection
    geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0])
    if int(proj.split('"')[-2]) == 4326:
        proj = "WGS84"
    count = 0
    for i in month_list:
        files_list = []
        data = []
        for file in files:
            if re.search(".*\." + i + "\.01.*", file):
                files_list.append(file)

        for j in files_list:
            photo = Image.open(j)
            month = np.array(photo)
            data.append(month)

        #print(data)
        arr_month = np.array(data)

        month_avg = np.average(arr_month, axis=1)
        #print(month_avg)
        #print(month_avg.shape)

        # Save tiff file
        for m_index, m_word in zip(month_list, month_word):
            if m_index == i:
                DC.Save_as_tiff(
                    r"D:\chapter3analysis\precipitation\Average\{}.tif".format(
                        m_word), month_avg, geo_out, proj)

        print(month_word[count])
        count += 1
def recycle(output_folder, et_bg_fhs, recy_ratio, lu_fh, et_type):
    Data_Path_rec = "temp_et_recycle"
    out_folder = os.path.join(output_folder, Data_Path_rec)
    geo_out, proj, size_X, size_Y = RC.Open_array_info(lu_fh)
    if not os.path.exists(out_folder):
        os.mkdir(out_folder)
    recycle_fhs = []
    for et_fh in et_bg_fhs:
        out_fh = out_folder + "\\recycled_et_" + et_type + et_fh[
            -11:-4] + ".tif"
        et = becgis.open_as_array(et_fh, nan_values=True)
        et_recy = et * recy_ratio
        DC.Save_as_tiff(out_fh, et_recy, geo_out)
        recycle_fhs.append(out_fh)
    return recycle_fhs
def split_yield(output_folder, p_fhs, et_blue_fhs, et_green_fhs,
                ab=(1.0, 1.0)):
    Data_Path_split = "split_y"
    out_folder = os.path.join(output_folder, Data_Path_split)
    if not os.path.exists(out_folder):
        os.mkdir(out_folder)
    sp_yield_fhs = []
    geo_out, proj, size_X, size_Y = RC.Open_array_info(p_fhs[0])
    for m in range(len(p_fhs)):
        out_fh = out_folder + '\\split_yield' + et_blue_fhs[m][-12:]
        P = RC.Open_tiff_array(p_fhs[m])
        ETBLUE = RC.Open_tiff_array(et_blue_fhs[m])
        ETGREEN = RC.Open_tiff_array(et_green_fhs[m])
        etbfraction = ETBLUE / (ETBLUE + ETGREEN)
        pfraction = P / np.nanmax(P)
        fraction = sh3.split_Yield(pfraction, etbfraction, ab[0], ab[1])
        DC.Save_as_tiff(out_fh, fraction, geo_out)
        sp_yield_fhs.append(out_fh)
    return sp_yield_fhs
def lapse_rate(Dir, temperature_map, DEMmap):
    """
    This function downscales the GLDAS temperature map by using the DEM map

    Keyword arguments:
    temperature_map -- 'C:/' path to the temperature map
    DEMmap -- 'C:/' path to the DEM map
    """

    # calculate average altitudes corresponding to T resolution
    dest = RC.reproject_dataset_example(DEMmap, temperature_map, method=4)
    DEM_ave_out_name = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_ave.tif')
    geo_out, proj, size_X, size_Y = RC.Open_array_info(temperature_map)
    DEM_ave_data = dest.GetRasterBand(1).ReadAsArray()
    DC.Save_as_tiff(DEM_ave_out_name, DEM_ave_data, geo_out, proj)
    dest = None

    # determine lapse-rate [degress Celcius per meter]
    lapse_rate_number = 0.0065

    # open maps as numpy arrays
    dest = RC.reproject_dataset_example(DEM_ave_out_name, DEMmap, method=2)
    dem_avg = dest.GetRasterBand(1).ReadAsArray()
    dem_avg[dem_avg < 0] = 0
    dest = None

    # Open the temperature dataset
    dest = RC.reproject_dataset_example(temperature_map, DEMmap, method=2)
    T = dest.GetRasterBand(1).ReadAsArray()
    dest = None

    # Open Demmap
    demmap = RC.Open_tiff_array(DEMmap)
    dem_avg[demmap <= 0] = 0
    demmap[demmap == -32768] = np.nan

    # calculate first part
    T = T + ((dem_avg - demmap) * lapse_rate_number)

    return T
예제 #9
0
def Degrees_to_m2(Reference_data):
    """
    This functions calculated the area of each pixel in squared meter.

    Parameters
    ----------
    Reference_data: str
        Path to a tiff file or nc file or memory file of which the pixel area must be defined

    Returns
    -------
    area_in_m2: array
        Array containing the area of each pixel in squared meters

    """
    try:
        # Get the extension of the example data
        filename, file_extension = os.path.splitext(Reference_data)

        # Get raster information
        if str(file_extension) == '.tif':
            geo_out, proj, size_X, size_Y = RC.Open_array_info(Reference_data)
        elif str(file_extension) == '.nc':
            geo_out, epsg, size_X, size_Y, size_Z, Time = RC.Open_nc_info(
                Reference_data)

    except:
        geo_out = Reference_data.GetGeoTransform()
        size_X = Reference_data.RasterXSize()
        size_Y = Reference_data.RasterYSize()

    # Calculate the difference in latitude and longitude in meters
    dlat, dlon = Calc_dlat_dlon(geo_out, size_X, size_Y)

    # Calculate the area in squared meters
    area_in_m2 = dlat * dlon

    return (area_in_m2)
def monthly_to_yearly(state_date, end_date, in_files, out_file):

    month_range = pd.date_range(start=state_date, end=end_date,
                                freq='MS').strftime("%Y.%m").tolist()
    #print(month_range)
    files_list = []
    data = []

    files = glob.glob(in_files)
    #print(files)

    # Get array information and define projection
    geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0])
    if int(proj.split('"')[-2]) == 4326:
        proj = "WGS84"

    for i in month_range:
        if 'P_CHIRPS.v2.0_mm-month-1_monthly_' + i + '.01.tif' in files:
            files_list.append('P_CHIRPS.v2.0_mm-month-1_monthly_' + i +
                              '.01.tif')
        else:
            print("No such file")

    for j in files_list:
        photo = Image.open(j)
        month = np.array(photo)
        data.append(month)

    #print(data)
    arr_year = np.array(data)

    #print(year_sum)

    year_sum = arr_year.sum(axis=0)

    # Save tiff file
    DC.Save_as_tiff(out_file, year_sum, geo_out, proj)
예제 #11
0
def Calc_surface_withdrawal(Dir_Basin, nc_outname, Startdate, Enddate,
                            Example_dataset, ETref_Product, P_Product):

    from netCDF4 import Dataset

    import watools.Functions.Four as Four
    import watools.General.raster_conversions as RC

    # Open variables in netcdf
    fh = Dataset(nc_outname)
    Variables_NC = [var for var in fh.variables]
    fh.close()

    # Open or calculate Blue Evapotranspiration
    if not "Blue_Evapotranspiration" in Variables_NC:
        # Calc ET blue and green
        DataCube_ETblue, DataCube_ETgreen = Four.SplitET.Blue_Green(
            Dir_Basin, nc_outname, ETref_Product, P_Product, Startdate,
            Enddate)
    else:
        DataCube_ETblue = RC.Open_nc_array(nc_outname,
                                           "Blue_Evapotranspiration",
                                           Startdate, Enddate)

    # Open data array info based on example data
    geo_out, epsg, size_X, size_Y = RC.Open_array_info(Example_dataset)

    # Open array with surface water fractions
    DataCube_frac_sw = RC.Open_nc_array(nc_outname,
                                        "Fraction_Surface_Water_Supply")

    # Total amount of ETblue taken out of rivers
    DataCube_surface_withdrawal = DataCube_ETblue * DataCube_frac_sw[
        None, :, :]

    return (DataCube_surface_withdrawal)
def adjust_P(Dir, pressure_map, DEMmap):
    """
    This function downscales the GLDAS air pressure map by using the DEM map

    Keyword arguments:
    pressure_map -- 'C:/' path to the pressure map
    DEMmap -- 'C:/' path to the DEM map
    """

    # calculate average latitudes
    destDEMave = RC.reproject_dataset_example(DEMmap, pressure_map, method=4)
    DEM_ave_out_name = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_ave.tif')
    geo_out, proj, size_X, size_Y = RC.Open_array_info(pressure_map)
    DEM_ave_data = destDEMave.GetRasterBand(1).ReadAsArray()
    DC.Save_as_tiff(DEM_ave_out_name, DEM_ave_data, geo_out, proj)

    # open maps as numpy arrays
    dest = RC.reproject_dataset_example(DEM_ave_out_name, DEMmap, method=2)
    dem_avg = dest.GetRasterBand(1).ReadAsArray()
    dest = None

    # open maps as numpy arrays
    dest = RC.reproject_dataset_example(pressure_map, DEMmap, method=2)
    P = dest.GetRasterBand(1).ReadAsArray()
    dest = None

    demmap = RC.Open_tiff_array(DEMmap)
    dem_avg[demmap <= 0] = 0
    demmap[demmap == -32768] = np.nan

    # calculate second part
    P = P + (101.3 * ((293 - 0.0065 * (demmap - dem_avg)) / 293)**5.26 - 101.3)

    os.remove(DEM_ave_out_name)

    return P
예제 #13
0
def main(Dir, Startdate = '', Enddate = '',
         latlim = [-60, 60], lonlim = [-180, 180], pixel_size = False, cores = False, LANDSAF =  0, SourceLANDSAF=  '', Waitbar = 1):
    """
    This function downloads TRMM3B43 V7 (monthly) data

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    cores -- The number of cores used to run the routine.
             It can be 'False' to avoid using parallel computing
             routines.
    Waitbar -- 1 (Default) will print the waitbar
    """

    print('Create monthly Reference ET data for period %s till %s' %(Startdate, Enddate))

    # An array of monthly dates which will be calculated
    Dates = pd.date_range(Startdate,Enddate,freq = 'MS')

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)

	# Calculate the ETref day by day for every month
    for Date in Dates:

        # Collect date data
        Y=Date.year
        M=Date.month
        Mday=calendar.monthrange(Y,M)[1]
        Days=pd.date_range(Date,Date+pd.Timedelta(days=Mday),freq='D')
        StartTime=Date.strftime('%Y')+'-'+Date.strftime('%m')+ '-01'
        EndTime=Date.strftime('%Y')+'-'+Date.strftime('%m')+'-'+str(Mday)

        # Get ETref on daily basis
        daily(Dir=Dir, Startdate=StartTime,Enddate=EndTime,latlim=latlim, lonlim=lonlim, pixel_size = pixel_size, cores=cores, LANDSAF=LANDSAF, SourceLANDSAF=SourceLANDSAF, Waitbar = 0)

        # Load DEM
        if not pixel_size:
            nameDEM='DEM_HydroShed_m_3s.tif'
            DEMmap=os.path.join(Dir,'HydroSHED','DEM',nameDEM )
        else:
            DEMmap=os.path.join(Dir,'HydroSHED','DEM','DEM_HydroShed_m_reshaped_for_ETref.tif')
        # Get some geo-data to save results
        geo_ET, proj, size_X, size_Y = RC.Open_array_info(DEMmap)

        dataMonth=np.zeros([size_Y,size_X])

        for Day in Days[:-1]:
            DirDay=os.path.join(Dir,'ETref','Daily','ETref_mm-day-1_daily_' + Day.strftime('%Y.%m.%d') + '.tif')
            dataDay=gdal.Open(DirDay)
            Dval=dataDay.GetRasterBand(1).ReadAsArray().astype(np.float32)
            Dval[Dval<0]=0
            dataMonth=dataMonth+Dval
            dataDay=None

        # make geotiff file
        output_folder_month=os.path.join(Dir,'ETref','Monthly')
        if os.path.exists(output_folder_month)==False:
            os.makedirs(output_folder_month)
        DirMonth=os.path.join(output_folder_month,'ETref_mm-month-1_monthly_'+Date.strftime('%Y.%m.%d') + '.tif')

        # Create the tiff file
        DC.Save_as_tiff(DirMonth,dataMonth, geo_ET, proj)

        # Create Waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)
예제 #14
0
def Save_as_NC(namenc,
               DataCube,
               Var,
               Reference_filename,
               Startdate='',
               Enddate='',
               Time_steps='',
               Scaling_factor=1):
    """
    This function save the array as a netcdf file

    Keyword arguments:
    namenc -- string, complete path of the output file with .nc extension
    DataCube -- [array], dataset of the nc file, can be a 2D or 3D array [time, lat, lon], must be same size as reference data
    Var -- string, the name of the variable
    Reference_filename -- string, complete path to the reference file name
    Startdate -- 'YYYY-mm-dd', needs to be filled when you want to save a 3D array,  defines the Start datum of the dataset
    Enddate -- 'YYYY-mm-dd', needs to be filled when you want to save a 3D array, defines the End datum of the dataset
    Time_steps -- 'monthly' or 'daily', needs to be filled when you want to save a 3D array, defines the timestep of the dataset
    Scaling_factor -- number, scaling_factor of the dataset, default = 1
    """
    # Import modules
    import watools.General.raster_conversions as RC
    from netCDF4 import Dataset

    if not os.path.exists(namenc):

        # Get raster information
        geo_out, proj, size_X, size_Y = RC.Open_array_info(Reference_filename)

        # Create the lat/lon rasters
        lon = np.arange(size_X) * geo_out[1] + geo_out[0] - 0.5 * geo_out[1]
        lat = np.arange(size_Y) * geo_out[5] + geo_out[3] - 0.5 * geo_out[5]

        # Create the nc file
        nco = Dataset(namenc, 'w', format='NETCDF4_CLASSIC')
        nco.description = '%s data' % Var

        # Create dimensions, variables and attributes:
        nco.createDimension('longitude', size_X)
        nco.createDimension('latitude', size_Y)

        # Create time dimension if the parameter is time dependent
        if Startdate is not '':
            if Time_steps == 'monthly':
                Dates = pd.date_range(Startdate, Enddate, freq='MS')
            if Time_steps == 'daily':
                Dates = pd.date_range(Startdate, Enddate, freq='D')
            time_or = np.zeros(len(Dates))
            i = 0
            for Date in Dates:
                time_or[i] = Date.toordinal()
                i += 1
            nco.createDimension('time', None)
            timeo = nco.createVariable('time', 'f4', ('time', ))
            timeo.units = '%s' % Time_steps
            timeo.standard_name = 'time'

        # Create the lon variable
        lono = nco.createVariable('longitude', 'f8', ('longitude', ))
        lono.standard_name = 'longitude'
        lono.units = 'degrees_east'
        lono.pixel_size = geo_out[1]

        # Create the lat variable
        lato = nco.createVariable('latitude', 'f8', ('latitude', ))
        lato.standard_name = 'latitude'
        lato.units = 'degrees_north'
        lato.pixel_size = geo_out[5]

        # Create container variable for CRS: lon/lat WGS84 datum
        crso = nco.createVariable('crs', 'i4')
        crso.long_name = 'Lon/Lat Coords in WGS84'
        crso.grid_mapping_name = 'latitude_longitude'
        crso.projection = proj
        crso.longitude_of_prime_meridian = 0.0
        crso.semi_major_axis = 6378137.0
        crso.inverse_flattening = 298.257223563
        crso.geo_reference = geo_out

        # Create the data variable
        if Startdate is not '':
            preco = nco.createVariable('%s' % Var,
                                       'f8', ('time', 'latitude', 'longitude'),
                                       zlib=True,
                                       least_significant_digit=1)
            timeo[:] = time_or
        else:
            preco = nco.createVariable('%s' % Var,
                                       'f8', ('latitude', 'longitude'),
                                       zlib=True,
                                       least_significant_digit=1)

        # Set the data variable information
        preco.scale_factor = Scaling_factor
        preco.add_offset = 0.00
        preco.grid_mapping = 'crs'
        preco.set_auto_maskandscale(False)

        # Set the lat/lon variable
        lono[:] = lon
        lato[:] = lat

        # Set the data variable
        if Startdate is not '':
            for i in range(len(Dates)):
                preco[i, :, :] = DataCube[i, :, :] * 1. / np.float(
                    Scaling_factor)
        else:
            preco[:, :] = DataCube[:, :] * 1. / np.float(Scaling_factor)

        nco.close()
    return ()
예제 #15
0
def Calc_Regions(input_nc, output_nc, input_JRC, Boundaries):

    import numpy as np
    import watools.General.raster_conversions as RC

    sensitivity = 700  # 900 is less sensitive 1 is very sensitive

    # Get JRC array and information
    Array_JRC_occ = RC.Open_tiff_array(input_JRC)
    Geo_out, proj, size_X, size_Y = RC.Open_array_info(input_JRC)

    # Get Basin boundary based on LU
    Array_LU = RC.Open_nc_array(input_nc, "basin")
    LU_array = RC.resize_array_example(Array_LU, Array_JRC_occ)
    basin_array = np.zeros(np.shape(LU_array))
    basin_array[LU_array > 0] = 1
    del LU_array

    # find all pixels with water occurence
    Array_JRC_occ[basin_array < 1] = 0
    Array_JRC_occ[Array_JRC_occ < 30] = 0
    Array_JRC_occ[Array_JRC_occ >= 30] = 1
    del basin_array

    # sum larger areas to find lakes
    x_size = np.round(int(np.shape(Array_JRC_occ)[0]) / 30)
    y_size = np.round(int(np.shape(Array_JRC_occ)[1]) / 30)
    sum_array = np.zeros([x_size, y_size])

    for i in range(0, len(sum_array)):
        for j in range(0, len(sum_array[1])):
            sum_array[i, j] = np.sum(Array_JRC_occ[i * 30:(i + 1) * 30,
                                                   j * 30:(j + 1) * 30])

    del Array_JRC_occ

    lakes = np.argwhere(sum_array >= sensitivity)
    lake_info = np.zeros([1, 4])

    i = 0
    k = 1

    # find all neighboring pixels
    for lake in lakes:
        added = 0
        for j in range(0, k):
            if (lake[0] >= lake_info[j, 0] and lake[0] <= lake_info[j, 1]
                    and lake[1] >= lake_info[j, 2]
                    and lake[1] <= lake_info[j, 3]):
                lake_info[j, 0] = np.maximum(
                    np.minimum(lake_info[j, 0], lake[0] - 8), 0)
                lake_info[j, 1] = np.minimum(
                    np.maximum(lake_info[j, 1], lake[0] + 8), x_size)
                lake_info[j, 2] = np.maximum(
                    np.minimum(lake_info[j, 2], lake[1] - 8), 0)
                lake_info[j, 3] = np.minimum(
                    np.maximum(lake_info[j, 3], lake[1] + 8), y_size)
                added = 1

        if added == 0:
            lake_info_one = np.zeros([4])
            lake_info_one[0] = np.maximum(0, lake[0] - 8)
            lake_info_one[1] = np.minimum(x_size, lake[0] + 8)
            lake_info_one[2] = np.maximum(0, lake[1] - 8)
            lake_info_one[3] = np.minimum(y_size, lake[1] + 8)
            lake_info = np.append(lake_info, lake_info_one)
            lake_info = np.resize(lake_info, (k + 1, 4))
            k += 1

    # merge all overlaping regions
    p = 0
    lake_info_end = np.zeros([1, 4])

    for i in range(1, k):
        added = 0
        lake_info_one = lake_info[i, :]
        lake_y_region = list(
            range(int(lake_info_one[0]), int(lake_info_one[1] + 1)))
        lake_x_region = list(
            range(int(lake_info_one[2]), int(lake_info_one[3] + 1)))

        for j in range(0, p + 1):
            if len(lake_y_region) + len(
                    list(
                        range(int(lake_info_end[j, 0]),
                              int(lake_info_end[j, 1] + 1)))
            ) is not len(
                    np.unique(
                        np.append(
                            lake_y_region,
                            list(
                                range(int(lake_info_end[j, 0]),
                                      int(lake_info_end[j, 1] + 1)))))
            ) and len(lake_x_region) + len(
                    list(
                        range(int(lake_info_end[j, 2]),
                              int(lake_info_end[j, 3] + 1)))) is not len(
                                  np.unique(
                                      np.append(
                                          lake_x_region,
                                          list(
                                              range(
                                                  int(lake_info_end[j, 2]),
                                                  int(lake_info_end[j, 3] +
                                                      1)))))):
                lake_info_end[j, 0] = np.min(
                    np.unique(
                        np.append(
                            lake_y_region,
                            list(
                                range(int(lake_info_end[j, 0]),
                                      int(lake_info_end[j, 1] + 1))))))
                lake_info_end[j, 1] = np.max(
                    np.unique(
                        np.append(
                            lake_y_region,
                            list(
                                range(int(lake_info_end[j, 0]),
                                      int(lake_info_end[j, 1] + 1))))))
                lake_info_end[j, 2] = np.min(
                    np.unique(
                        np.append(
                            lake_x_region,
                            list(
                                range(int(lake_info_end[j, 2]),
                                      int(lake_info_end[j, 3] + 1))))))
                lake_info_end[j, 3] = np.max(
                    np.unique(
                        np.append(
                            lake_x_region,
                            list(
                                range(int(lake_info_end[j, 2]),
                                      int(lake_info_end[j, 3] + 1))))))
                added = 1

        if added == 0:
            lake_info_one = lake_info[i, :]
            lake_info_end = np.append(lake_info_end, lake_info_one)
            lake_info_end = np.resize(lake_info_end, (p + 2, 4))

            p += 1

    # calculate the area
    Regions = np.zeros([p, 4])
    pixel_x_size = Geo_out[1] * 30
    pixel_y_size = Geo_out[5] * 30
    for region in range(1, p + 1):
        Regions[region - 1,
                0] = Geo_out[0] + pixel_x_size * lake_info_end[region, 2]
        Regions[region - 1,
                1] = Geo_out[0] + pixel_x_size * (lake_info_end[region, 3] + 1)
        Regions[region - 1,
                2] = Geo_out[3] + pixel_y_size * (lake_info_end[region, 1] + 1)
        Regions[region - 1,
                3] = Geo_out[3] + pixel_y_size * lake_info_end[region, 0]

    return (Regions)
def livestock_feed(output_folder, lu_fh, AREA, ndm_fhs, feed_dict, live_feed,
                   cattle_fh, fraction_fhs, ndmdates):
    """
    Calculate natural livestock feed production

    INPUTS
    ----------
    lu_fh : str
        filehandle for land use map
    ndm_fhs: nd array
        array of filehandles of NDM maps
    ndm_dates: nd array
        array of dates for NDM maps
    feed_dict: dict
        dictionnary 'pasture class':[list of LULC]
    feed_pct: dict
        dictionnary 'pasture class':[percent available as feed]
    cattle_fh : str
        filehandle for cattle map
    """
    Data_Path_Feed = "Feed"
    out_folder = os.path.join(output_folder, Data_Path_Feed)
    if not os.path.exists(out_folder):
        os.mkdir(out_folder)

    area_ha = AREA * 100
    LULC = RC.Open_tiff_array(lu_fh)
    #  cattle = RC.Open_tiff_array(cattle_fh)
    geo_out, proj, size_X, size_Y = RC.Open_array_info(lu_fh)

    f_pct = np.zeros(LULC.shape)
    for lu_type in list(feed_dict.keys()):
        classes = feed_dict[lu_type]
        mask = np.logical_or.reduce([LULC == value for value in classes])
        f_pct[mask] = live_feed[lu_type]
    feed_fhs_landscape = []
    feed_fhs_incremental = []
    for d in range(len(ndm_fhs)):
        ndm_fh = ndm_fhs[d]
        fraction_fh = fraction_fhs[d]
        date1 = ndmdates[d]
        year = '%d' % date1.year
        month = '%02d' % date1.month

        yield_fract = RC.Open_tiff_array(fraction_fh)

        out_fh_l = out_folder + '\\feed_prod_landscape_%s_%s.tif' % (year,
                                                                     month)
        out_fh_i = out_folder + '\\feed_prod_incremental_%s_%s.tif' % (year,
                                                                       month)
        #        out_fh2 = out_folder+'\\Feed_prod_pH_%s_%s.tif' %(year, month)
        NDM = becgis.open_as_array(ndm_fh, nan_values=True)
        NDM_feed = NDM * f_pct
        NDM_feed_incremental = NDM_feed * yield_fract * area_ha / 1e6
        NDM_feed_landscape = (NDM_feed * (1 - yield_fract)) * area_ha / 1e6
        DC.Save_as_tiff(out_fh_l, NDM_feed_landscape, geo_out)
        DC.Save_as_tiff(out_fh_i, NDM_feed_incremental, geo_out)
        #        NDM_feed_perHead = NDM_feed / cattle
        #        DC.Save_as_tiff(out_fh2, NDM_feed, geo_out)
        feed_fhs_landscape.append(out_fh_l)
        feed_fhs_incremental.append(out_fh_i)
    return feed_fhs_landscape, feed_fhs_incremental
예제 #17
0
def ETref(Date, args):
    """
    This function starts to calculate ETref (daily) data based on Hydroshed, GLDAS, and (CFSR/LANDSAF) in parallel or single core

    Keyword arguments:
    Date -- panda timestamp
    args -- includes all the parameters that are needed for the ETref
	"""

    # unpack the arguments
    [Dir, lonlim, latlim, pixel_size, LANDSAF] = args

    # Set the paths
    nameTmin = 'Tair-min_GLDAS-NOAH_C_daily_' + Date.strftime(
        '%Y.%m.%d') + ".tif"
    tmin_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                            'tair_f_inst', 'min', nameTmin)

    nameTmax = 'Tair-max_GLDAS-NOAH_C_daily_' + Date.strftime(
        '%Y.%m.%d') + ".tif"
    tmax_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                            'tair_f_inst', 'max', nameTmax)

    nameHumid = 'Hum_GLDAS-NOAH_kg-kg_daily_' + Date.strftime(
        '%Y.%m.%d') + ".tif"
    humid_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                             'qair_f_inst', 'mean', nameHumid)

    namePress = 'P_GLDAS-NOAH_kpa_daily_' + Date.strftime('%Y.%m.%d') + ".tif"
    press_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                             'psurf_f_inst', 'mean', namePress)

    nameWind = 'W_GLDAS-NOAH_m-s-1_daily_' + Date.strftime('%Y.%m.%d') + ".tif"
    wind_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                            'wind_f_inst', 'mean', nameWind)

    if LANDSAF == 1:

        nameShortClearname = 'ShortWave_Clear_Daily_W-m2_' + Date.strftime(
            '%Y-%m-%d') + '.tif'
        input2_str = os.path.join(Dir, 'Landsaf_Clipped',
                                  'Shortwave_Clear_Sky', nameShortClearname)

        nameShortNetname = 'ShortWave_Net_Daily_W-m2_' + Date.strftime(
            '%Y-%m-%d') + '.tif'
        input1_str = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Net',
                                  nameShortNetname)

        input3_str = 'not'

    else:
        if Date < pd.Timestamp(pd.datetime(2011, 4, 1)):

            nameDownLong = 'DLWR_CFSR_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input2_str = os.path.join(Dir, 'Radiation', 'CFSR', nameDownLong)

            nameDownShort = 'DSWR_CFSR_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input1_str = os.path.join(Dir, 'Radiation', 'CFSR', nameDownShort)

            nameUpLong = 'ULWR_CFSR_W-m2_' + Date.strftime('%Y.%m.%d') + ".tif"
            input3_str = os.path.join(Dir, 'Radiation', 'CFSR', nameUpLong)

        else:
            nameDownLong = 'DLWR_CFSRv2_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input2_str = os.path.join(Dir, 'Radiation', 'CFSRv2', nameDownLong)

            nameDownShort = 'DSWR_CFSRv2_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input1_str = os.path.join(Dir, 'Radiation', 'CFSRv2',
                                      nameDownShort)

            nameUpLong = 'ULWR_CFSRv2_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input3_str = os.path.join(Dir, 'Radiation', 'CFSRv2', nameUpLong)

# The day of year
    DOY = Date.dayofyear

    # Load DEM
    if not pixel_size:
        DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                                  'DEM_HydroShed_m_3s.tif')
    else:
        DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                                  'DEM_HydroShed_m_3s.tif')
        dest, ulx, lry, lrx, uly, epsg_to = RC.reproject_dataset_epsg(
            DEMmap_str, pixel_spacing=pixel_size, epsg_to=4326, method=2)
        DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                                  'DEM_HydroShed_m_reshaped_for_ETref.tif')
        DEM_data = dest.GetRasterBand(1).ReadAsArray()
        geo_dem = [ulx, pixel_size, 0.0, uly, 0.0, -pixel_size]
        DC.Save_as_tiff(name=DEMmap_str,
                        data=DEM_data,
                        geo=geo_dem,
                        projection='4326')

    # Calc ETref
    ETref = calc_ETref(Dir, tmin_str, tmax_str, humid_str, press_str, wind_str,
                       input1_str, input2_str, input3_str, DEMmap_str, DOY)

    # Make directory for the MODIS ET data
    output_folder = os.path.join(Dir, 'ETref', 'Daily')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Create the output names
    NameETref = 'ETref_mm-day-1_daily_' + Date.strftime('%Y.%m.%d') + '.tif'
    NameEnd = os.path.join(output_folder, NameETref)

    # Collect geotiff information
    geo_out, proj, size_X, size_Y = RC.Open_array_info(DEMmap_str)

    # Create daily ETref tiff files
    DC.Save_as_tiff(name=NameEnd, data=ETref, geo=geo_out, projection=proj)
예제 #18
0
def DownloadData(output_folder, latlim, lonlim, parameter, resolution):
    """
    This function downloads DEM data from HydroSHED

    Keyword arguments:
    output_folder -- directory of the result
	latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    Resample -- 1 = The data will be resampled to 0.001 degree spatial
                    resolution
             -- 0 = The data will have the same pixel size as the data obtained
                    from the internet
    """
    # Define parameter depedent variables
    if parameter == "dir_3s":
        para_name = "DIR"
        unit = "-"
        resolution = '3s'
        parameter = 'dir'

    if parameter == "dem_3s":
        para_name = "DEM"
        unit = "m"
        resolution = '3s'
        parameter = 'dem'

    if parameter == "dir_15s":
        para_name = "DIR"
        unit = "-"
        resolution = '15s'
        parameter = 'dir'

    if parameter == "dem_15s":
        para_name = "DEM"
        unit = "m"
        resolution = '15s'
        parameter = 'dem'

# converts the latlim and lonlim into names of the tiles which must be
# downloaded
    if resolution == '3s':

        name, rangeLon, rangeLat = Find_Document_Names(latlim, lonlim,
                                                       parameter)

        # Memory for the map x and y shape (starts with zero)
        size_X_tot = 0
        size_Y_tot = 0

    if resolution == '15s':
        name = Find_Document_names_15s(latlim, lonlim, parameter, resolution)

    nameResults = []
    # Create a temporary folder for processing
    output_folder_trash = os.path.join(output_folder, "Temp")
    if not os.path.exists(output_folder_trash):
        os.makedirs(output_folder_trash)

    # Download, extract, and converts all the files to tiff files
    for nameFile in name:

        try:
            # Download the data from
            # http://earlywarning.usgs.gov/hydrodata/
            output_file, file_name = Download_Data(nameFile,
                                                   output_folder_trash,
                                                   parameter, para_name,
                                                   resolution)

            # extract zip data
            DC.Extract_Data(output_file, output_folder_trash)

            # Converts the data with a adf extention to a tiff extension.
            # The input is the file name and in which directory the data must be stored
            file_name_tiff = file_name.split('.')[0] + '_trans_temporary.tif'
            file_name_extract = file_name.split('_')[0:3]
            if resolution == '3s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1]

            if resolution == '15s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1] + '_15s'

            input_adf = os.path.join(output_folder_trash, file_name_extract2,
                                     file_name_extract2, 'hdr.adf')
            output_tiff = os.path.join(output_folder_trash, file_name_tiff)

            # convert data from adf to a tiff file
            output_tiff = DC.Convert_adf_to_tiff(input_adf, output_tiff)

            geo_out, proj, size_X, size_Y = RC.Open_array_info(output_tiff)
            if int(size_X) != int(6000) or int(size_Y) != int(6000):
                data = np.ones((6000, 6000)) * -9999

                # Create the latitude bound
                Vfile = str(nameFile)[1:3]
                SignV = str(nameFile)[0]
                SignVer = 1
                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(nameFile)[4:7]
                SignH = str(nameFile)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                Expected_X_min = Bound1
                Expected_Y_max = Bound2 + 5

                Xid_start = int(
                    np.round((geo_out[0] - Expected_X_min) / geo_out[1]))
                Xid_end = int(
                    np.round(
                        ((geo_out[0] + size_X * geo_out[1]) - Expected_X_min) /
                        geo_out[1]))
                Yid_start = int(
                    np.round((Expected_Y_max - geo_out[3]) / (-geo_out[5])))
                Yid_end = int(
                    np.round((Expected_Y_max - (geo_out[3] +
                                                (size_Y * geo_out[5]))) /
                             (-geo_out[5])))

                data[Yid_start:Yid_end,
                     Xid_start:Xid_end] = RC.Open_tiff_array(output_tiff)
                if np.max(data) == 255:
                    data[data == 255] = -9999
                data[data < -9999] = -9999

                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

        except:

            if resolution == '3s':
                # If tile not exist create a replacing zero tile (sea tiles)
                output = nameFile.split('.')[0] + "_trans_temporary.tif"
                output_tiff = os.path.join(output_folder_trash, output)
                file_name = nameFile
                data = np.ones((6000, 6000)) * -9999
                data = data.astype(np.float32)

                # Create the latitude bound
                Vfile = str(file_name)[1:3]
                SignV = str(file_name)[0]
                SignVer = 1
                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(file_name)[4:7]
                SignH = str(file_name)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                # Geospatial data for the tile
                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

            if resolution == '15s':

                print('no 15s data is in dataset')

        if resolution == '3s':

            # clip data
            Data, Geo_data = RC.clip_data(output_tiff, latlim, lonlim)
            size_Y_out = int(np.shape(Data)[0])
            size_X_out = int(np.shape(Data)[1])

            # Total size of the product so far
            size_Y_tot = int(size_Y_tot + size_Y_out)
            size_X_tot = int(size_X_tot + size_X_out)

            if nameFile is name[0]:
                Geo_x_end = Geo_data[0]
                Geo_y_end = Geo_data[3]
            else:
                Geo_x_end = np.min([Geo_x_end, Geo_data[0]])
                Geo_y_end = np.max([Geo_y_end, Geo_data[3]])

            # create name for chunk
            FileNameEnd = "%s_temporary.tif" % (nameFile)
            nameForEnd = os.path.join(output_folder_trash, FileNameEnd)
            nameResults.append(str(nameForEnd))

            # save chunk as tiff file
            DC.Save_as_tiff(name=nameForEnd,
                            data=Data,
                            geo=Geo_data,
                            projection="WGS84")

    if resolution == '3s':
        #size_X_end = int(size_X_tot) #!
        #size_Y_end = int(size_Y_tot) #!

        size_X_end = int(size_X_tot / len(rangeLat)) + 1  #!
        size_Y_end = int(size_Y_tot / len(rangeLon)) + 1  #!

        # Define the georeference of the end matrix
        geo_out = [Geo_x_end, Geo_data[1], 0, Geo_y_end, 0, Geo_data[5]]

        latlim_out = [geo_out[3] + geo_out[5] * size_Y_end, geo_out[3]]
        lonlim_out = [geo_out[0], geo_out[0] + geo_out[1] * size_X_end]

        # merge chunk together resulting in 1 tiff map
        datasetTot = Merge_DEM(latlim_out, lonlim_out, nameResults, size_Y_end,
                               size_X_end)

        datasetTot[datasetTot < -9999] = -9999

    if resolution == '15s':
        output_file_merged = os.path.join(output_folder_trash, 'merged.tif')
        datasetTot, geo_out = Merge_DEM_15s(output_folder_trash,
                                            output_file_merged, latlim, lonlim)

    # name of the end result
    output_DEM_name = "%s_HydroShed_%s_%s.tif" % (para_name, unit, resolution)

    Save_name = os.path.join(output_folder, output_DEM_name)

    # Make geotiff file
    DC.Save_as_tiff(name=Save_name,
                    data=datasetTot,
                    geo=geo_out,
                    projection="WGS84")
    os.chdir(output_folder)

    # Delete the temporary folder
    shutil.rmtree(output_folder_trash)
예제 #19
0
def Merge_DEM_15s(output_folder_trash, output_file_merged, latlim, lonlim):

    os.chdir(output_folder_trash)
    tiff_files = glob.glob('*.tif')
    resolution_geo = []
    lonmin = lonlim[0]
    lonmax = lonlim[1]
    latmin = latlim[0]
    latmax = latlim[1]
    resolution_geo = 0.00416667

    size_x_tot = int(np.round((lonmax - lonmin) / resolution_geo))
    size_y_tot = int(np.round((latmax - latmin) / resolution_geo))

    data_tot = np.ones([size_y_tot, size_x_tot]) * -9999.

    for tiff_file in tiff_files:
        inFile = os.path.join(output_folder_trash, tiff_file)
        geo, proj, size_X, size_Y = RC.Open_array_info(inFile)
        resolution_geo = geo[1]

        lonmin_one = geo[0]
        lonmax_one = geo[0] + size_X * geo[1]
        latmin_one = geo[3] + size_Y * geo[5]
        latmax_one = geo[3]

        if lonmin_one < lonmin:
            lonmin_clip = lonmin
        else:
            lonmin_clip = lonmin_one

        if lonmax_one > lonmax:
            lonmax_clip = lonmax
        else:
            lonmax_clip = lonmax_one

        if latmin_one < latmin:
            latmin_clip = latmin
        else:
            latmin_clip = latmin_one

        if latmax_one > latmax:
            latmax_clip = latmax
        else:
            latmax_clip = latmax_one

        size_x_clip = int(
            np.round((lonmax_clip - lonmin_clip) / resolution_geo))
        size_y_clip = int(
            np.round((latmax_clip - latmin_clip) / resolution_geo))

        inFile = os.path.join(output_folder_trash, tiff_file)
        geo, proj, size_X, size_Y = RC.Open_array_info(inFile)
        Data = RC.Open_tiff_array(inFile)
        lonmin_tiff = geo[0]
        latmax_tiff = geo[3]
        lon_tiff_position = int(
            np.round((lonmin_clip - lonmin_tiff) / resolution_geo))
        lat_tiff_position = int(
            np.round((latmax_tiff - latmax_clip) / resolution_geo))
        lon_data_tot_position = int(
            np.round((lonmin_clip - lonmin) / resolution_geo))
        lat_data_tot_position = int(
            np.round((latmax - latmax_clip) / resolution_geo))

        Data[Data < -9999.] = -9999.
        data_tot[lat_data_tot_position:lat_data_tot_position + size_y_clip,
                 lon_data_tot_position:lon_data_tot_position +
                 size_x_clip][data_tot[
                     lat_data_tot_position:lat_data_tot_position + size_y_clip,
                     lon_data_tot_position:lon_data_tot_position +
                     size_x_clip] == -9999] = Data[
                         lat_tiff_position:lat_tiff_position + size_y_clip,
                         lon_tiff_position:lon_tiff_position +
                         size_x_clip][data_tot[
                             lat_data_tot_position:lat_data_tot_position +
                             size_y_clip,
                             lon_data_tot_position:lon_data_tot_position +
                             size_x_clip] == -9999]

    geo_out = [lonmin, resolution_geo, 0.0, latmax, 0.0, -1 * resolution_geo]
    geo_out = tuple(geo_out)
    data_tot[data_tot < -9999.] = -9999.

    return (data_tot, geo_out)
예제 #20
0
def main(Dir,
         Startdate='',
         Enddate='',
         latlim=[-50, 50],
         lonlim=[-180, 180],
         cores=False,
         Waitbar=1):
    """
    This function downloads RFE V2.0 (monthly) data

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    cores -- The number of cores used to run the routine.
             It can be 'False' to avoid using parallel computing
             routines.
    Waitbar -- 1 (Default) will print a waitbar
    """
    # Download data
    print('\nDownload monthly RFE precipitation data for period %s till %s' %
          (Startdate, Enddate))

    # Check variables
    if not Startdate:
        Startdate = pd.Timestamp('2001-01-01')
    if not Enddate:
        Enddate = pd.Timestamp('Now')
    Dates = pd.date_range(Startdate, Enddate, freq='MS')

    # Make directory
    output_folder = os.path.join(Dir, 'Precipitation', 'RFE', 'Monthly/')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    for Date in Dates:
        month = Date.month
        year = Date.year
        end_day = calendar.monthrange(year, month)[1]
        Startdate_one_month = '%s-%02s-01' % (year, month)
        Enddate_one_month = '%s-%02s-%02s' % (year, month, end_day)

        DownloadData(Dir, Startdate_one_month, Enddate_one_month, latlim,
                     lonlim, 0, cores)

        Dates_daily = pd.date_range(Startdate_one_month,
                                    Enddate_one_month,
                                    freq='D')

        # Make directory
        input_folder_daily = os.path.join(Dir, 'Precipitation', 'RFE',
                                          'Daily/')
        i = 0

        for Date_daily in Dates_daily:
            file_name = 'P_RFE.v2.0_mm-day-1_daily_%s.%02s.%02s.tif' % (
                Date_daily.strftime('%Y'), Date_daily.strftime('%m'),
                Date_daily.strftime('%d'))
            file_name_daily_path = os.path.join(input_folder_daily, file_name)
            if os.path.exists(file_name_daily_path):
                if Date_daily == Dates_daily[i]:
                    Raster_monthly = RC.Open_tiff_array(file_name_daily_path)
                else:
                    Raster_monthly += RC.Open_tiff_array(file_name_daily_path)
            else:
                if Date_daily == Dates_daily[i]:
                    i += 1

        geo_out, proj, size_X, size_Y = RC.Open_array_info(
            file_name_daily_path)
        file_name = 'P_RFE.v2.0_mm-month-1_monthly_%s.%02s.01.tif' % (
            Date.strftime('%Y'), Date.strftime('%m'))
        file_name_output = os.path.join(output_folder, file_name)
        DC.Save_as_tiff(file_name_output,
                        Raster_monthly,
                        geo_out,
                        projection="WGS84")

        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)
예제 #21
0
def main(files_DEM_dir, files_DEM, files_Basin, files_Runoff, files_Extraction, startdate, enddate, input_nc, resolution, Format_DEM_dir, Format_DEM, Format_Basin, Format_Runoff, Format_Extraction):

    # Define a year to get the epsg and geo
    Startdate_timestamp = pd.Timestamp(startdate)
    year = Startdate_timestamp.year

    ############################## Drainage Direction #####################################

    # Open Array DEM dir as netCDF
    if Format_DEM_dir == "NetCDF":
        file_DEM_dir = os.path.join(files_DEM_dir, "%d.nc" %year)
        DataCube_DEM_dir = RC.Open_nc_array(file_DEM_dir, "Drainage_Direction")
        geo_out_example, epsg_example, size_X_example, size_Y_example, size_Z_example, Time_example = RC.Open_nc_info(files_DEM_dir)

        # Create memory file for reprojection
        gland = DC.Save_as_MEM(DataCube_DEM_dir, geo_out_example, epsg_example)
        dataset_example = file_name_DEM_dir = gland

    # Open Array DEM dir as TIFF
    if Format_DEM_dir == "TIFF":
        file_name_DEM_dir = os.path.join(files_DEM_dir,"DIR_HydroShed_-_%s.tif" %resolution)
        DataCube_DEM_dir = RC.Open_tiff_array(file_name_DEM_dir)
        geo_out_example, epsg_example, size_X_example, size_Y_example = RC.Open_array_info(file_name_DEM_dir)
        dataset_example = file_name_DEM_dir

    # Calculate Area per pixel in m2
    import watools.Functions.Start.Area_converter as AC
    DataCube_Area = AC.Degrees_to_m2(file_name_DEM_dir)

    ################################## DEM ##########################################

    # Open Array DEM as netCDF
    if Format_DEM == "NetCDF":
        file_DEM = os.path.join(files_DEM, "%d.nc" %year)
        DataCube_DEM = RC.Open_nc_array(file_DEM, "Elevation")

    # Open Array DEM as TIFF
    if Format_DEM == "TIFF":
        file_name_DEM = os.path.join(files_DEM,"DEM_HydroShed_m_%s.tif" %resolution)
        destDEM = RC.reproject_dataset_example(file_name_DEM, dataset_example, method=1)
        DataCube_DEM = destDEM.GetRasterBand(1).ReadAsArray()
        
    ################################ Landuse ##########################################

    # Open Array Basin as netCDF
    if Format_Basin == "NetCDF":
        file_Basin = os.path.join(files_Basin, "%d.nc" %year)
        DataCube_Basin = RC.Open_nc_array(file_Basin, "Landuse")
        geo_out, epsg, size_X, size_Y, size_Z, Time = RC.Open_nc_info(file_Basin, "Landuse")
        dest_basin = DC.Save_as_MEM(DataCube_Basin, geo_out, str(epsg))
        destLU = RC.reproject_dataset_example(dest_basin, dataset_example, method=1)
        DataCube_LU_CR = destLU.GetRasterBand(1).ReadAsArray()
        DataCube_Basin = np.zeros([size_Y_example, size_X_example])
        DataCube_Basin[DataCube_LU_CR > 0] = 1

    # Open Array Basin as TIFF
    if Format_Basin == "TIFF":
        file_name_Basin = files_Basin
        destLU = RC.reproject_dataset_example(file_name_Basin, dataset_example, method=1)
        DataCube_LU_CR = destLU.GetRasterBand(1).ReadAsArray()
        DataCube_Basin = np.zeros([size_Y_example, size_X_example])
        DataCube_Basin[DataCube_LU_CR > 0] = 1

    ################################ Surface Runoff ##########################################

    # Open Array runoff as netCDF
    if Format_Runoff == "NetCDF":
        DataCube_Runoff = RC.Open_ncs_array(files_Runoff, "Surface_Runoff", startdate, enddate)
        size_Z_example = DataCube_Runoff.shape[0]
        file_Runoff = os.path.join(files_Runoff, "%d.nc" %year)
        geo_out, epsg, size_X, size_Y, size_Z, Time = RC.Open_nc_info(file_Runoff, "Surface_Runoff")
        DataCube_Runoff_CR = np.ones([size_Z_example, size_Y_example, size_X_example]) * np.nan
        for i in range(0, size_Z):
            DataCube_Runoff_one = DataCube_Runoff[i,:,:]
            dest_Runoff_one = DC.Save_as_MEM(DataCube_Runoff_one, geo_out, str(epsg))
            dest_Runoff = RC.reproject_dataset_example(dest_Runoff_one, dataset_example, method=4)
            DataCube_Runoff_CR[i,:,:] = dest_Runoff.GetRasterBand(1).ReadAsArray()

        DataCube_Runoff_CR[:, DataCube_LU_CR == 0] = -9999
        DataCube_Runoff_CR[DataCube_Runoff_CR < 0] = -9999

    # Open Array runoff as TIFF
    if Format_Runoff == "TIFF":
        DataCube_Runoff_CR = RC.Get3Darray_time_series_monthly(files_Runoff, startdate, enddate, Example_data = dataset_example)

    ################################ Surface Withdrawal ##########################################

    # Open Array Extraction as netCDF
    if Format_Extraction == "NetCDF":
        DataCube_Extraction = RC.Open_ncs_array(files_Extraction, "Surface_Withdrawal", startdate, enddate)
        size_Z_example = DataCube_Extraction.shape[0]
        file_Extraction = os.path.join(files_Extraction, "%d.nc" %year)
        geo_out, epsg, size_X, size_Y, size_Z, Time = RC.Open_nc_info(file_Extraction, "Surface_Withdrawal")
        DataCube_Extraction_CR = np.ones([size_Z_example, size_Y_example, size_X_example]) * np.nan
        for i in range(0, size_Z):
            DataCube_Extraction_one = DataCube_Extraction[i,:,:]
            dest_Extraction_one = DC.Save_as_MEM(DataCube_Extraction_one, geo_out, str(epsg))
            dest_Extraction = RC.reproject_dataset_example(dest_Extraction_one, dataset_example, method=4)
            DataCube_Extraction_CR[i,:,:] = dest_Extraction.GetRasterBand(1).ReadAsArray()

        DataCube_Extraction_CR[:, DataCube_LU_CR == 0] = -9999
        DataCube_Extraction_CR[DataCube_Extraction_CR < 0] = -9999

    # Open Array Extraction as TIFF
    if Format_Extraction == "TIFF":
        DataCube_Extraction_CR = RC.Get3Darray_time_series_monthly(files_Extraction, startdate, enddate, Example_data = dataset_example)

    ################################ Create input netcdf ##########################################
    # Save data in one NetCDF file
    geo_out_example = np.array(geo_out_example)

    # Latitude and longitude
    lon_ls = np.arange(size_X_example)*geo_out_example[1]+geo_out_example[0] + 0.5 * geo_out_example[1]
    lat_ls = np.arange(size_Y_example)*geo_out_example[5]+geo_out_example[3] - 0.5 * geo_out_example[5]

    lat_n = len(lat_ls)
    lon_n = len(lon_ls)

    # Create NetCDF file
    nc_file = netCDF4.Dataset(input_nc, 'w')
    nc_file.set_fill_on()

    # Create dimensions
    lat_dim = nc_file.createDimension('latitude', lat_n)
    lon_dim = nc_file.createDimension('longitude', lon_n)

    # Create NetCDF variables
    crso = nc_file.createVariable('crs', 'i4')
    crso.long_name = 'Lon/Lat Coords in WGS84'
    crso.standard_name = 'crs'
    crso.grid_mapping_name = 'latitude_longitude'
    crso.projection = epsg_example
    crso.longitude_of_prime_meridian = 0.0
    crso.semi_major_axis = 6378137.0
    crso.inverse_flattening = 298.257223563
    crso.geo_reference = geo_out_example

    lat_var = nc_file.createVariable('latitude', 'f8', ('latitude',))
    lat_var.units = 'degrees_north'
    lat_var.standard_name = 'latitude'
    lat_var.pixel_size = geo_out_example[5]

    lon_var = nc_file.createVariable('longitude', 'f8', ('longitude',))
    lon_var.units = 'degrees_east'
    lon_var.standard_name = 'longitude'
    lon_var.pixel_size = geo_out_example[1]

    Dates = pd.date_range(startdate,enddate,freq = 'MS')
    time_or=np.zeros(len(Dates))
    i = 0
    for Date in Dates:
        time_or[i] = Date.toordinal()
        i += 1
    nc_file.createDimension('time', None)
    timeo = nc_file.createVariable('time', 'f4', ('time',))
    timeo.units = 'Monthly'
    timeo.standard_name = 'time'

    # Variables
    demdir_var = nc_file.createVariable('demdir', 'i',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    demdir_var.long_name = 'Flow Direction Map'
    demdir_var.grid_mapping = 'crs'

    dem_var = nc_file.createVariable('dem', 'f8',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    dem_var.long_name = 'Altitude'
    dem_var.units = 'meters'
    dem_var.grid_mapping = 'crs'

    basin_var = nc_file.createVariable('basin', 'i',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    basin_var.long_name = 'Altitude'
    basin_var.units = 'meters'
    basin_var.grid_mapping = 'crs'

    area_var = nc_file.createVariable('area', 'f8',
                                           ('latitude', 'longitude'),
                                           fill_value=-9999)
    area_var.long_name = 'area in squared meters'
    area_var.units = 'squared_meters'
    area_var.grid_mapping = 'crs'

    runoff_var = nc_file.createVariable('Runoff_M', 'f8',
                                   ('time', 'latitude', 'longitude'),
                                   fill_value=-9999)
    runoff_var.long_name = 'Runoff'
    runoff_var.units = 'm3/month'
    runoff_var.grid_mapping = 'crs'

    extraction_var = nc_file.createVariable('Extraction_M', 'f8',
                                    ('time', 'latitude', 'longitude'),
                                    fill_value=-9999)
    extraction_var.long_name = 'Surface water Extraction'
    extraction_var.units = 'm3/month'
    extraction_var.grid_mapping = 'crs'


    # Load data
    lat_var[:] = lat_ls
    lon_var[:] = lon_ls
    timeo[:] = time_or

    # Static variables
    demdir_var[:, :] = DataCube_DEM_dir[:, :]
    dem_var[:, :] = DataCube_DEM[:, :]
    basin_var[:, :] = DataCube_Basin[:, :]
    area_var[:, :] = DataCube_Area[:, :]
    for i in range(len(Dates)):
        runoff_var[i,:,:] = DataCube_Runoff_CR[i,:,:]
    for i in range(len(Dates)):
        extraction_var[i,:,:] = DataCube_Extraction_CR[i,:,:]

    # Close file
    nc_file.close()
    return()
def Nearest_Interpolate(Dir_in, Startdate, Enddate, Dir_out=None):
    """
    This functions calculates monthly tiff files based on the 16 daily tiff files. (will calculate the average)

    Parameters
    ----------
    Dir_in : str
        Path to the input data
    Startdate : str
        Contains the start date of the model 'yyyy-mm-dd'
    Enddate : str
        Contains the end date of the model 'yyyy-mm-dd'
    Dir_out : str
        Path to the output data, default is same as Dir_in

    """
    # import WA+ modules
    import watools.General.data_conversions as DC
    import watools.General.raster_conversions as RC

    # Change working directory
    os.chdir(Dir_in)

    # Find all eight daily files
    files = glob.glob('*16-daily*.tif')

    # Create array with filename and keys (DOY and year) of all the 8 daily files
    i = 0
    DOY_Year = np.zeros([len(files), 3])
    for File in files:

        # Get the time characteristics from the filename
        year = File.split('.')[-4][-4:]
        month = File.split('.')[-3]
        day = File.split('.')[-2]

        # Create pandas Timestamp
        date_file = '%s-%02s-%02s' % (year, month, day)
        Datum = pd.Timestamp(date_file)

        # Get day of year
        DOY = Datum.strftime('%j')

        # Save data in array
        DOY_Year[i, 0] = i
        DOY_Year[i, 1] = DOY
        DOY_Year[i, 2] = year

        # Loop over files
        i += 1

    # Check enddate:
    Enddate_split = Enddate.split('-')
    month_range = calendar.monthrange(int(Enddate_split[0]),
                                      int(Enddate_split[1]))[1]
    Enddate = '%d-%02d-%02d' % (int(Enddate_split[0]), int(
        Enddate_split[1]), month_range)

    # Check startdate:
    Startdate_split = Startdate.split('-')
    Startdate = '%d-%02d-01' % (int(Startdate_split[0]), int(
        Startdate_split[1]))

    # Define end and start date
    Dates = pd.date_range(Startdate, Enddate, freq='MS')
    DatesEnd = pd.date_range(Startdate, Enddate, freq='M')

    # Get array information and define projection
    geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0])
    if int(proj.split('"')[-2]) == 4326:
        proj = "WGS84"

    # Get the No Data Value
    dest = gdal.Open(files[0])
    NDV = dest.GetRasterBand(1).GetNoDataValue()

    # Loop over months and create monthly tiff files
    i = 0
    for date in Dates:
        # Get Start and end DOY of the current month
        DOY_month_start = date.strftime('%j')
        DOY_month_end = DatesEnd[i].strftime('%j')

        # Search for the files that are between those DOYs
        year = date.year
        DOYs = DOY_Year[DOY_Year[:, 2] == year]
        DOYs_oneMonth = DOYs[np.logical_and(
            (DOYs[:, 1] + 16) >= int(DOY_month_start),
            DOYs[:, 1] <= int(DOY_month_end))]

        # Create empty arrays
        Monthly = np.zeros([size_Y, size_X])
        Weight_tot = np.zeros([size_Y, size_X])
        Data_one_month = np.ones([size_Y, size_X]) * np.nan

        # Loop over the files that are within the DOYs
        for EightDays in DOYs_oneMonth[:, 1]:

            # Calculate the amount of days in this month of each file
            Weight = np.ones([size_Y, size_X])

            # For start of month
            if np.min(DOYs_oneMonth[:, 1]) == EightDays:
                Weight = Weight * int(EightDays + 16 - int(DOY_month_start))

            # For end of month
            elif np.max(DOYs_oneMonth[:, 1]) == EightDays:
                Weight = Weight * (int(DOY_month_end) - EightDays + 1)

            # For the middle of the month
            else:
                Weight = Weight * 16

            row = DOYs_oneMonth[np.argwhere(
                DOYs_oneMonth[:, 1] == EightDays)[0][0], :][0]

            # Open the array of current file
            input_name = os.path.join(Dir_in, files[int(row)])
            Data = RC.Open_tiff_array(input_name)

            # Remove NDV
            Weight[Data == NDV] = 0
            Data[Data == NDV] = np.nan

            # Multiply weight time data (per day)
            Data = Data * Weight

            # Calculate the total weight and data
            Weight_tot += Weight
            Monthly[~np.isnan(Data)] += Data[~np.isnan(Data)]

        # Go to next month
        i += 1

        # Calculate the average
        Data_one_month[Weight_tot != 0.] = Monthly[
            Weight_tot != 0.] / Weight_tot[Weight_tot != 0.]

        # Define output directory
        if Dir_out == None:
            Dir_out = Dir_in

        # Define output name
        output_name = os.path.join(
            Dir_out, files[int(row)].replace('16-daily', 'monthly'))
        output_name = output_name[:-9] + '%02d.01.tif' % (date.month)

        # Save tiff file
        DC.Save_as_tiff(output_name, Data_one_month, geo_out, proj)

    return
예제 #23
0
def CollectLANDSAF(SourceLANDSAF, Dir, Startdate, Enddate, latlim, lonlim):
    """
    This function collects and clip LANDSAF data

    Keyword arguments:
    SourceLANDSAF -- 'C:/'  path to the LANDSAF source data (The directory includes SIS and SID)
    Dir -- 'C:/' path to the WA map
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    latlim -- [ymin, ymax] (values must be between -60 and 60)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    # Make an array of the days of which the ET is taken
    Dates = pd.date_range(Startdate, Enddate, freq='D')

    # make directories
    SISdir = os.path.join(Dir, 'Landsaf_Clipped', 'SIS')
    if os.path.exists(SISdir) is False:
        os.makedirs(SISdir)

    SIDdir = os.path.join(Dir, 'Landsaf_Clipped', 'SID')
    if os.path.exists(SIDdir) is False:
        os.makedirs(SIDdir)

    ShortwaveBasin(SourceLANDSAF,
                   Dir,
                   latlim,
                   lonlim,
                   Dates=[Startdate, Enddate])
    DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                              'DEM_HydroShed_m_3s.tif')
    geo_out, proj, size_X, size_Y = RC.Open_array_info(DEMmap_str)

    # Open DEM map
    demmap = RC.Open_tiff_array(DEMmap_str)
    demmap[demmap < 0] = 0

    # make lat and lon arrays)
    dlat = geo_out[5]
    dlon = geo_out[1]
    lat = geo_out[3] + (np.arange(size_Y) + 0.5) * dlat
    lon = geo_out[0] + (np.arange(size_X) + 0.5) * dlon

    for date in Dates:
        # day of year
        day = date.dayofyear
        Horizontal, Sloping, sinb, sinb_hor, fi, slope, ID = SlopeInfluence(
            demmap, lat, lon, day)

        SIDname = os.path.join(
            SIDdir, 'SAF_SID_Daily_W-m2_' + date.strftime('%Y-%m-%d') + '.tif')
        SISname = os.path.join(
            SISdir, 'SAF_SIS_Daily_W-m2_' + date.strftime('%Y-%m-%d') + '.tif')

        #PREPARE SID MAPS
        SIDdest = RC.reproject_dataset_example(SIDname, DEMmap_str, method=3)
        SIDdata = SIDdest.GetRasterBand(1).ReadAsArray()

        #PREPARE SIS MAPS
        SISdest = RC.reproject_dataset_example(SISname, DEMmap_str, method=3)
        SISdata = SISdest.GetRasterBand(1).ReadAsArray()

        # Calculate ShortWave net
        Short_Wave_Net = SIDdata * (Sloping /
                                    Horizontal) + SISdata * 86400 / 1e6

        # Calculate ShortWave Clear
        Short_Wave = Sloping
        Short_Wave_Clear = Short_Wave * (0.75 + demmap * 2 * 10**-5)

        # make directories
        PathClear = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Clear_Sky')
        if os.path.exists(PathClear) is False:
            os.makedirs(PathClear)

        PathNet = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Net')
        if os.path.exists(PathNet) is False:
            os.makedirs(PathNet)

        # name Shortwave Clear and Net
        nameFileNet = 'ShortWave_Net_Daily_W-m2_' + date.strftime(
            '%Y-%m-%d') + '.tif'
        nameNet = os.path.join(PathNet, nameFileNet)

        nameFileClear = 'ShortWave_Clear_Daily_W-m2_' + date.strftime(
            '%Y-%m-%d') + '.tif'
        nameClear = os.path.join(PathClear, nameFileClear)

        # Save net and clear short wave radiation
        DC.Save_as_tiff(nameNet, Short_Wave_Net, geo_out, proj)
        DC.Save_as_tiff(nameClear, Short_Wave_Clear, geo_out, proj)
    return
예제 #24
0
def Calc_Rainy_Days(Dir_Basin, Data_Path_P, Startdate, Enddate):
    """
    This functions calculates the amount of rainy days based on daily precipitation data.

    Parameters
    ----------
    Dir_Basin : str
        Path to all the output data of the Basin
    Data_Path_P : str
        Path to the daily rainfall data
    Startdate : str
        Contains the start date of the model 'yyyy-mm-dd'
    Enddate : str
        Contains the end date of the model 'yyyy-mm-dd'

    Returns
    -------
    Data_Path_RD : str
        Path from the Dir_Basin to the rainy days data

    """
    # import WA+ modules
    import watools.General.data_conversions as DC
    import watools.General.raster_conversions as RC

    # Create an output directory to store the rainy days tiffs
    Data_Path_RD = os.path.join(Dir_Basin, 'Rainy_Days')
    if not os.path.exists(Data_Path_RD):
        os.mkdir(Data_Path_RD)

    # Define the dates that must be created
    Dates = pd.date_range(Startdate, Enddate, freq ='MS')

    # Set working directory to the rainfall folder
    os.chdir(Data_Path_P)

    # Open all the daily data and store the data in a 3D array
    for Date in Dates:
        # Define the year and month and amount of days in month
        year = Date.year
        month = Date.month
        daysinmonth = calendar.monthrange(year, month)[1]

        # Set the third (time) dimension of array starting at 0
        i = 0

        # Find all files of that month
        files = glob.glob('*daily_%d.%02d.*.tif' %(year, month))

        # Check if the amount of files corresponds with the amount of days in month
        if len(files) is not daysinmonth:
            print('ERROR: Not all Rainfall days for month %d and year %d are downloaded'  %(month, year))

        # Loop over the days and store data in raster
        for File in files:
            dir_file = os.path.join(Data_Path_P, File)

            # Get array information and create empty numpy array for daily rainfall when looping the first file
            if File == files[0]:

                # Open geolocation info and define projection
                geo_out, proj, size_X, size_Y = RC.Open_array_info(dir_file)
                if int(proj.split('"')[-2]) == 4326:
                    proj = "WGS84"

                # Create empty array for the whole month
                P_Daily = np.zeros([daysinmonth,size_Y, size_X])

            # Open data and put the data in 3D array
            Data = RC.Open_tiff_array(dir_file)

            # Remove the weird numbers
            Data[Data<0] = 0

            # Add the precipitation to the monthly cube
            P_Daily[i, :, :] = Data
            i += 1

        # Define a rainy day
        P_Daily[P_Daily > 0.201] = 1
        P_Daily[P_Daily != 1] = 0

        # Sum the amount of rainy days
        RD_one_month = np.nansum(P_Daily,0)

        # Define output name
        Outname = os.path.join(Data_Path_RD, 'Rainy_Days_NumOfDays_monthly_%d.%02d.01.tif' %(year, month))

        # Save tiff file
        DC.Save_as_tiff(Outname, RD_one_month, geo_out, proj)

    return(Data_Path_RD)
예제 #25
0
def calc_ETref(Dir, tmin_str, tmax_str, humid_str, press_str, wind_str,
               down_short_str, down_long_str, up_long_str, DEMmap_str, DOY):
    """
    This function calculates the ETref by using all the input parameters (path)
    according to FAO standards
    see: http://www.fao.org/docrep/x0490e/x0490e08.htm#TopOfPage

    Keyword arguments:
    tmin_str -- 'C:/'  path to the minimal temperature tiff file [degrees Celcius], e.g. from GLDAS
    tmax_str -- 'C:/'  path to the maximal temperature tiff file [degrees Celcius], e.g. from GLDAS
    humid_str -- 'C:/'  path to the humidity tiff file [kg/kg], e.g. from GLDAS
    press_str -- 'C:/'  path to the air pressure tiff file [kPa], e.g. from GLDAS
    wind_str -- 'C:/'  path to the wind velocity tiff file [m/s], e.g. from GLDAS
    down_short_str -- 'C:/'  path to the downward shortwave radiation tiff file [W*m-2], e.g. from CFSR/LANDSAF
    down_long_str -- 'C:/'  path to the downward longwave radiation tiff file [W*m-2], e.g. from CFSR/LANDSAF
    up_long_str -- 'C:/'  path to the upward longwave radiation tiff file [W*m-2], e.g. from CFSR/LANDSAF
    DEMmap_str -- 'C:/'  path to the DEM tiff file [m] e.g. from HydroSHED
    DOY -- Day of the year
    """

    # Get some geo-data to save results
    GeoT, Projection, xsize, ysize = RC.Open_array_info(DEMmap_str)
    #NDV, xsize, ysize, GeoT, Projection, DataType = GetGeoInfo(DEMmap_str)
    raster_shape = [xsize, ysize]

    # Create array to store results
    ETref = np.zeros(raster_shape)

    # gap fill
    tmin_str_GF = RC.gap_filling(tmin_str, -9999)
    tmax_str_GF = RC.gap_filling(tmax_str, -9999)
    humid_str_GF = RC.gap_filling(humid_str, -9999)
    press_str_GF = RC.gap_filling(press_str, -9999)
    wind_str_GF = RC.gap_filling(wind_str, -9999)
    down_short_str_GF = RC.gap_filling(down_short_str, np.nan)
    down_long_str_GF = RC.gap_filling(down_long_str, np.nan)
    if up_long_str is not 'not':
        up_long_str_GF = RC.gap_filling(up_long_str, np.nan)
    else:
        up_long_str_GF = 'nan'

    #dictionary containing all tthe paths to the input-maps
    inputs = dict({
        'tmin': tmin_str_GF,
        'tmax': tmax_str_GF,
        'humid': humid_str_GF,
        'press': press_str_GF,
        'wind': wind_str_GF,
        'down_short': down_short_str_GF,
        'down_long': down_long_str_GF,
        'up_long': up_long_str_GF
    })

    #dictionary containing numpy arrays of al initial and intermediate variables
    input_array = dict({
        'tmin': None,
        'tmax': None,
        'humid': None,
        'press': None,
        'wind': None,
        'albedo': None,
        'down_short': None,
        'down_long': None,
        'up_short': None,
        'up_long': None,
        'net_radiation': None,
        'ea': None,
        'es': None,
        'delta': None
    })

    #APPLY LAPSE RATE CORRECTION ON TEMPERATURE
    tmin = lapse_rate(Dir, inputs['tmin'], DEMmap_str)
    tmax = lapse_rate(Dir, inputs['tmax'], DEMmap_str)

    #PROCESS PRESSURE MAPS
    press = adjust_P(Dir, inputs['press'], DEMmap_str)

    #PREPARE HUMIDITY MAPS
    dest = RC.reproject_dataset_example(inputs['humid'], DEMmap_str, method=2)
    humid = dest.GetRasterBand(1).ReadAsArray()
    dest = None

    #CORRECT WIND MAPS
    dest = RC.reproject_dataset_example(inputs['wind'], DEMmap_str, method=2)
    wind = dest.GetRasterBand(1).ReadAsArray() * 0.75
    dest = None

    #PROCESS GLDAS DATA
    input_array['ea'], input_array['es'], input_array['delta'] = process_GLDAS(
        tmax, tmin, humid, press)

    ea = input_array['ea']
    es = input_array['es']
    delta = input_array['delta']

    if up_long_str == 'not':

        #CORRECT WIND MAPS
        dest = RC.reproject_dataset_example(down_short_str,
                                            DEMmap_str,
                                            method=2)
        Short_Net_data = dest.GetRasterBand(1).ReadAsArray() * 0.75
        dest = None

        dest = RC.reproject_dataset_example(down_long_str,
                                            DEMmap_str,
                                            method=2)
        Short_Clear_data = dest.GetRasterBand(1).ReadAsArray() * 0.75
        dest = None

        # Calculate Long wave Net radiation
        Rnl = 4.903e-9 * (
            ((tmin + 273.16)**4 +
             (tmax + 273.16)**4) / 2) * (0.34 - 0.14 * np.sqrt(ea)) * (
                 1.35 * Short_Net_data / Short_Clear_data - 0.35)

        # Calulate Net Radiation and converted to MJ*d-1*m-2
        net_radiation = (Short_Net_data * 0.77 + Rnl) * 86400 / 10**6

    else:
        #OPEN DOWNWARD SHORTWAVE RADIATION
        dest = RC.reproject_dataset_example(inputs['down_short'],
                                            DEMmap_str,
                                            method=2)
        down_short = dest.GetRasterBand(1).ReadAsArray()
        dest = None
        down_short, tau, bias = slope_correct(down_short, press, ea,
                                              DEMmap_str, DOY)

        #OPEN OTHER RADS
        up_short = down_short * 0.23

        dest = RC.reproject_dataset_example(inputs['down_long'],
                                            DEMmap_str,
                                            method=2)
        down_long = dest.GetRasterBand(1).ReadAsArray()
        dest = None

        dest = RC.reproject_dataset_example(inputs['up_long'],
                                            DEMmap_str,
                                            method=2)
        up_long = dest.GetRasterBand(1).ReadAsArray()
        dest = None

        #OPEN NET RADIATION AND CONVERT W*m-2 TO MJ*d-1*m-2
        net_radiation = ((down_short - up_short) +
                         (down_long - up_long)) * 86400 / 10**6

    #CALCULATE ETref
    ETref = (0.408 * delta * net_radiation + 0.665 * 10**-3 * press *
             (900 / ((tmax + tmin) / 2 + 273)) * wind *
             (es - ea)) / (delta + 0.665 * 10**-3 * press * (1 + 0.34 * wind))

    # Set limits ETref
    ETref[ETref < 0] = 0
    ETref[ETref > 400] = np.nan

    #return a reference ET map (numpy array), a dictionary containing all intermediate information and a bias of the slope correction on down_short
    return ETref
예제 #26
0
def Calc_Property(Dir, latlim, lonlim, SL):

    import watools.Collect.SoilGrids as SG

    # Download needed layers
    SG.Clay_Content(Dir, latlim, lonlim, level=SL)
    #SG.Organic_Carbon_Content(Dir, latlim, lonlim, level=SL)
    SG.Bulk_Density(Dir, latlim, lonlim, level=SL)

    # Define path to layers
    filename_clay = os.path.join(
        Dir, 'SoilGrids', 'Clay_Content',
        'ClayContentMassFraction_%s_SoilGrids_percentage.tif' % SL)
    #filename_om = os.path.join(Dir, 'SoilGrids', 'Soil_Organic_Carbon_Content' ,'SoilOrganicCarbonContent_%s_SoilGrids_g_kg.tif' %SL)
    filename_bulkdensity = os.path.join(
        Dir, 'SoilGrids', 'Bulk_density',
        'BulkDensity_%s_SoilGrids_kg-m-3.tif' % SL)

    # Define path for output
    if SL == "sl3":
        level = "Topsoil"
    elif SL == "sl6":
        level = "Subsoil"

    filedir_out_densbulk = os.path.join(Dir, 'SoilGrids', 'Bulk_density')
    if not os.path.exists(filedir_out_densbulk):
        os.makedirs(filedir_out_densbulk)
    filedir_out_thetasat = os.path.join(Dir, 'SoilGrids', 'Theta_Sat')
    if not os.path.exists(filedir_out_thetasat):
        os.makedirs(filedir_out_thetasat)

    #filename_out_densbulk = os.path.join(filedir_out_densbulk ,'Bulk_Density_%s_SoilGrids_g-cm-3.tif' %level)
    filename_out_thetasat = os.path.join(
        filedir_out_thetasat, 'Theta_Sat2_%s_SoilGrids_kg-kg.tif' % level)

    #if not (os.path.exists(filename_out_densbulk) and os.path.exists(filename_out_thetasat)):
    if not os.path.exists(filename_out_thetasat):

        # Open datasets
        dest_clay = gdal.Open(filename_clay)
        #dest_om = gdal.Open(filename_om)
        dest_bulk = gdal.Open(filename_bulkdensity)

        # Open Array info
        geo_out, proj, size_X, size_Y = RC.Open_array_info(filename_clay)

        # Open Arrays
        Clay = dest_clay.GetRasterBand(1).ReadAsArray()
        #OM = dest_om.GetRasterBand(1).ReadAsArray()
        Clay = np.float_(Clay)
        Clay[Clay > 100] = np.nan
        #OM = np.float_(OM)
        #OM[OM<0]=np.nan
        #OM = OM/1000

        # Calculate bulk density
        #bulk_dens = 1/(0.6117 + 0.3601 * Clay/100 + 0.002172 * np.power(OM * 100, 2)+ 0.01715 * np.log(OM * 100))
        bulk_dens = dest_bulk.GetRasterBand(1).ReadAsArray()
        bulk_dens = bulk_dens / 1000

        # Calculate theta saturated
        theta_sat = 0.85 * (1 - (bulk_dens / 2.65)) + 0.13 * Clay / 100

        # Save data
        #DC.Save_as_tiff(filename_out_densbulk, bulk_dens, geo_out, "WGS84")
        DC.Save_as_tiff(filename_out_thetasat, theta_sat, geo_out, "WGS84")

    return ()
예제 #27
0
파일: main.py 프로젝트: CMicha/watools
def Calculate(WA_HOME_folder, Basin, P_Product, ET_Product, ETref_Product,
              DEM_Product, Water_Occurence_Product, Inflow_Text_Files,
              WaterPIX_filename, Reservoirs_GEE_on_off, Supply_method,
              Startdate, Enddate, Simulation):
    '''
    This functions consists of the following sections:
    1. Set General Parameters
    2. Download Data
    3. Convert the RAW data to NETCDF files
    4. Run SurfWAT

    '''
    # import General modules
    import os
    import gdal
    import numpy as np
    import pandas as pd
    from netCDF4 import Dataset

    # import WA plus modules
    from watools.General import raster_conversions as RC
    from watools.General import data_conversions as DC
    import watools.Functions.Five as Five
    import watools.Functions.Start as Start
    import watools.Functions.Start.Get_Dictionaries as GD

    ######################### 1. Set General Parameters ##############################

    # Get environmental variable for the Home folder
    if WA_HOME_folder == '':
        WA_env_paths = os.environ["WA_HOME"].split(';')
        Dir_Home = WA_env_paths[0]
    else:
        Dir_Home = WA_HOME_folder

    # Create the Basin folder
    Dir_Basin = os.path.join(Dir_Home, Basin)
    output_dir = os.path.join(Dir_Basin, "Simulations",
                              "Simulation_%d" % Simulation)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # Get the boundaries of the basin based on the shapefile of the watershed
    # Boundaries, Shape_file_name_shp = Start.Boundaries.Determine(Basin)
    Boundaries, Example_dataset = Start.Boundaries.Determine_LU_Based(
        Basin, Dir_Home)
    geo_out, proj, size_X, size_Y = RC.Open_array_info(Example_dataset)

    # Define resolution of SRTM
    Resolution = '15s'

    # Find the maximum moving window value
    ET_Blue_Green_Classes_dict, Moving_Window_Per_Class_dict = GD.get_bluegreen_classes(
        version='1.0')
    Additional_Months_tail = np.max(list(
        Moving_Window_Per_Class_dict.values()))

    ############## Cut dates into pieces if it is needed ######################

    # Check the years that needs to be calculated
    years = list(
        range(int(Startdate.split('-')[0]),
              int(Enddate.split('-')[0]) + 1))

    for year in years:

        # Create .nc file if not exists
        nc_outname = os.path.join(output_dir, "%d.nc" % year)
        if not os.path.exists(nc_outname):
            DC.Create_new_NC_file(nc_outname, Example_dataset, Basin)

        # Open variables in netcdf
        fh = Dataset(nc_outname)
        Variables_NC = [var for var in fh.variables]
        fh.close()

        # Create Start and End date for time chunk
        Startdate_part = '%d-01-01' % int(year)
        Enddate_part = '%s-12-31' % int(year)

        if int(year) == int(years[0]):
            Startdate_Moving_Average = pd.Timestamp(Startdate) - pd.DateOffset(
                months=Additional_Months_tail)
            Startdate_Moving_Average_String = Startdate_Moving_Average.strftime(
                '%Y-%m-%d')
        else:
            Startdate_Moving_Average_String = Startdate_part

        ############################# 2. Download Data ###################################

        # Download data
        if not "Precipitation" in Variables_NC:
            Data_Path_P_Monthly = Start.Download_Data.Precipitation(
                Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']],
                [Boundaries['Lonmin'], Boundaries['Lonmax']], Startdate_part,
                Enddate_part, P_Product)

        if not "Actual_Evapotranspiration" in Variables_NC:
            Data_Path_ET = Start.Download_Data.Evapotranspiration(
                Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']],
                [Boundaries['Lonmin'], Boundaries['Lonmax']], Startdate_part,
                Enddate_part, ET_Product)

        if (WaterPIX_filename == "" or Supply_method == "Fraction") \
                and not ("Reference_Evapotranspiration" in Variables_NC):

            Data_Path_ETref = Start.Download_Data.ETreference(
                Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']],
                [Boundaries['Lonmin'], Boundaries['Lonmax']],
                Startdate_Moving_Average_String, Enddate_part, ETref_Product)

        if Reservoirs_GEE_on_off == 1 and not ("Water_Occurrence"
                                               in Variables_NC):
            Data_Path_JRC_occurrence = Start.Download_Data.JRC_occurrence(
                Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']],
                [Boundaries['Lonmin'], Boundaries['Lonmax']],
                Water_Occurence_Product)

            input_JRC = os.path.join(Data_Path_JRC_occurrence,
                                     "JRC_Occurrence_percent.tif")

        else:
            input_JRC = None

        # WaterPIX input
        Data_Path_DEM_Dir = Start.Download_Data.DEM_Dir(
            Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']],
            [Boundaries['Lonmin'], Boundaries['Lonmax']], Resolution,
            DEM_Product)

        Data_Path_DEM = Start.Download_Data.DEM(
            Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']],
            [Boundaries['Lonmin'], Boundaries['Lonmax']], Resolution,
            DEM_Product)

        ###################### 3. Convert the RAW data to NETCDF files ##############################
        # The sequence of converting the data into netcdf is:
        # Precipitation
        # Evapotranspiration
        # Reference Evapotranspiration
        # DEM flow directions

        #______________________________Precipitation_______________________________

        # 1.) Precipitation data
        if not "Precipitation" in Variables_NC:
            # Get the data of Precipitation and save as nc
            DataCube_Prec = RC.Get3Darray_time_series_monthly(
                Data_Path_P_Monthly,
                Startdate_part,
                Enddate_part,
                Example_data=Example_dataset)

            DC.Add_NC_Array_Variable(nc_outname, DataCube_Prec,
                                     "Precipitation", "mm/month", 0.01)
            del DataCube_Prec

        #_______________________________Evaporation________________________________

        # 2.) Evapotranspiration data
        if not "Actual_Evapotranspiration" in Variables_NC:
            # Get the data of Evaporation and save as nc
            DataCube_ET = RC.Get3Darray_time_series_monthly(
                Data_Path_ET,
                Startdate_part,
                Enddate_part,
                Example_data=Example_dataset)
            DC.Add_NC_Array_Variable(nc_outname, DataCube_ET,
                                     "Actual_Evapotranspiration", "mm/month",
                                     0.01)
            del DataCube_ET

        #_______________________Reference Evaporation______________________________

        # 3.) Reference Evapotranspiration data
        if (WaterPIX_filename == "" or Supply_method == "Fraction") and not \
                ("Reference_Evapotranspiration" in Variables_NC):
            # Get the data of Precipitation and save as nc
            DataCube_ETref = RC.Get3Darray_time_series_monthly(
                Data_Path_ETref,
                Startdate_part,
                Enddate_part,
                Example_data=Example_dataset)
            DC.Add_NC_Array_Variable(nc_outname, DataCube_ETref,
                                     "Reference_Evapotranspiration",
                                     "mm/month", 0.01)
            del DataCube_ETref

        #____________________________fraction surface water _______________________

        if not "Fraction_Surface_Water_Supply" in Variables_NC:
            DataCube_frac_sw = np.ones([size_Y, size_X]) * np.nan

            import watools.Functions.Start.Get_Dictionaries as GD

            # Open LU dataset
            DataCube_LU = RC.Open_nc_array(nc_outname, "Landuse")

            # Get dictionaries and keys
            lulc = GD.get_sheet5_classes()
            lulc_dict = list(GD.get_sheet5_classes().keys())
            consumed_frac_dict = GD.sw_supply_fractions()

            for key in lulc_dict:
                Numbers = lulc[key]
                for LU_nmbr in Numbers:
                    DataCube_frac_sw[DataCube_LU ==
                                     LU_nmbr] = consumed_frac_dict[key]

            DC.Add_NC_Array_Static(nc_outname, DataCube_frac_sw,
                                   "Fraction_Surface_Water_Supply", "fraction",
                                   0.01)
            del DataCube_frac_sw, DataCube_LU

        ################### 4. Calculate Runoff (2 methods: a = Budyko and b = WaterPIX) #####################

        ################ 4a. Calculate Runoff based on Precipitation and Evapotranspiration ##################

        if (Supply_method == "Fraction"
                and not "Surface_Runoff" in Variables_NC):

            # Calculate runoff based on Budyko
            DataCube_Runoff = Five.Fraction_Based.Calc_surface_runoff(
                Dir_Basin, nc_outname, Startdate_part, Enddate_part,
                Example_dataset, ETref_Product, P_Product)

            # Save the runoff as netcdf
            DC.Add_NC_Array_Variable(nc_outname, DataCube_Runoff,
                                     "Surface_Runoff", "mm/month", 0.01)
            del DataCube_Runoff

        ###################### 4b. Get Runoff from WaterPIX ###########################
        if (Supply_method == "WaterPIX"
                and not "Surface_Runoff" in Variables_NC):

            # Get WaterPIX data
            WaterPIX_Var = 'TotalRunoff_M'
            DataCube_Runoff = Five.Read_WaterPIX.Get_Array(
                WaterPIX_filename, WaterPIX_Var, Example_dataset,
                Startdate_part, Enddate_part)

            # Save the runoff as netcdf
            DC.Add_NC_Array_Variable(nc_outname, DataCube_Runoff,
                                     "Surface_Runoff", "mm/month", 0.01)
            del DataCube_Runoff

        ####################### 5. Calculate Extraction (2 methods: a = Fraction, b = WaterPIX) ##################

        ###################### 5a. Get extraction from fraction method by using budyko ###########################
        if (Supply_method == "Fraction"
                and not "Surface_Withdrawal" in Variables_NC):
            DataCube_surface_withdrawal = Five.Fraction_Based.Calc_surface_withdrawal(
                Dir_Basin, nc_outname, Startdate_part, Enddate_part,
                Example_dataset, ETref_Product, P_Product)

            # Save the runoff as netcdf
            DC.Add_NC_Array_Variable(nc_outname, DataCube_surface_withdrawal,
                                     "Surface_Withdrawal", "mm/month", 0.01)
            del DataCube_surface_withdrawal

        #################################### 5b. Get extraction from WaterPIX ####################################
        if (Supply_method == "WaterPIX"
                and not "Surface_Withdrawal" in Variables_NC):
            WaterPIX_Var = 'Supply_M'
            DataCube_Supply = Five.Read_WaterPIX.Get_Array(
                WaterPIX_filename, WaterPIX_Var, Example_dataset, Startdate,
                Enddate)

            # Open array with surface water fractions
            DataCube_frac_sw = RC.Open_nc_array(
                nc_outname, "Fraction_Surface_Water_Supply")

            # Total amount of ETblue taken out of rivers
            DataCube_surface_withdrawal = DataCube_Supply * DataCube_frac_sw[
                None, :, :]

            # Save the runoff as netcdf
            DC.Add_NC_Array_Variable(nc_outname, DataCube_surface_withdrawal,
                                     "Surface_Withdrawal", "mm/month", 0.01)
            del DataCube_surface_withdrawal

        ################################## 5. Run SurfWAT #####################################

        import watools.Models.SurfWAT as SurfWAT

        # Define formats of input data
        Format_DEM = "TIFF"  # or "TIFF"
        Format_Runoff = "NetCDF"  # or "TIFF"
        Format_Extraction = "NetCDF"  # or "TIFF"
        Format_DEM_dir = "TIFF"  # or "TIFF"
        Format_Basin = "NetCDF"  # or "TIFF"

        # Give path (for tiff) or file (netcdf)
        input_nc = os.path.join(Dir_Basin, "Simulations",
                                "Simulation_%s" % Simulation,
                                "SurfWAT_in_%d.nc" % year)
        output_nc = os.path.join(Dir_Basin, "Simulations",
                                 "Simulation_%s" % Simulation,
                                 "SurfWAT_out_%d.nc" % year)

        # Create Input File for SurfWAT
        SurfWAT.Create_input_nc.main(Data_Path_DEM_Dir, Data_Path_DEM,
                                     os.path.dirname(nc_outname),
                                     os.path.dirname(nc_outname),
                                     os.path.dirname(nc_outname), Startdate,
                                     Enddate, input_nc, Resolution,
                                     Format_DEM_dir, Format_DEM, Format_Basin,
                                     Format_Runoff, Format_Extraction)

        # Run SurfWAT
        SurfWAT.Run_SurfWAT.main(input_nc, output_nc, input_JRC,
                                 Inflow_Text_Files, Reservoirs_GEE_on_off)
    '''
    ################################# Plot graph ##################################

    # Draw graph
    Five.Channel_Routing.Graph_DEM_Distance_Discharge(Discharge_dict_CR3, Distance_dict_CR2, DEM_dict_CR2, River_dict_CR2, Startdate, Enddate, Example_dataset)

    ######################## Change data to fit the LU data #######################

    # Discharge
    # Define info for the nc files
    info = ['monthly','m3-month-1', ''.join([Startdate[5:7], Startdate[0:4]]) , ''.join([Enddate[5:7], Enddate[0:4]])]

    Name_NC_Discharge = DC.Create_NC_name('DischargeEnd', Simulation, Dir_Basin, 5, info)
    if not os.path.exists(Name_NC_Discharge):

        # Get the data of Reference Evapotranspiration and save as nc
        DataCube_Discharge_CR = DC.Convert_dict_to_array(River_dict_CR2, Discharge_dict_CR3, Example_dataset)
        DC.Save_as_NC(Name_NC_Discharge, DataCube_Discharge_CR, 'Discharge_End_CR', Example_dataset, Startdate, Enddate, 'monthly')
        del DataCube_Discharge_CR


    '''
    '''

    # DEM
    Name_NC_DEM = DC.Create_NC_name('DEM', Simulation, Dir_Basin, 5)
    if not os.path.exists(Name_NC_DEM):

        # Get the data of Reference Evapotranspiration and save as nc
        DataCube_DEM_CR = RC.Open_nc_array(Name_NC_DEM_CR)
        DataCube_DEM = RC.resize_array_example(DataCube_DEM_CR, LU_data, method=1)
        DC.Save_as_NC(Name_NC_DEM, DataCube_DEM, 'DEM', LU_dataset)
        del DataCube_DEM

    # flow direction
    Name_NC_DEM_Dir = DC.Create_NC_name('DEM_Dir', Simulation, Dir_Basin, 5)
    if not os.path.exists(Name_NC_DEM_Dir):

        # Get the data of Reference Evapotranspiration and save as nc
        DataCube_DEM_Dir_CR = RC.Open_nc_array(Name_NC_DEM_Dir_CR)
        DataCube_DEM_Dir = RC.resize_array_example(DataCube_DEM_Dir_CR, LU_data, method=1)
        DC.Save_as_NC(Name_NC_DEM_Dir, DataCube_DEM_Dir, 'DEM_Dir', LU_dataset)
        del DataCube_DEM_Dir

    # Precipitation
    # Define info for the nc files
    info = ['monthly','mm', ''.join([Startdate[5:7], Startdate[0:4]]) , ''.join([Enddate[5:7], Enddate[0:4]])]

    Name_NC_Prec = DC.Create_NC_name('Prec', Simulation, Dir_Basin, 5)
    if not os.path.exists(Name_NC_Prec):

        # Get the data of Reference Evapotranspiration and save as nc
        DataCube_Prec = RC.Get3Darray_time_series_monthly(Dir_Basin, Data_Path_P_Monthly, Startdate, Enddate, LU_dataset)
        DC.Save_as_NC(Name_NC_Prec, DataCube_Prec, 'Prec', LU_dataset, Startdate, Enddate, 'monthly', 0.01)
        del DataCube_Prec

    # Evapotranspiration
    Name_NC_ET = DC.Create_NC_name('ET', Simulation, Dir_Basin, 5)
    if not os.path.exists(Name_NC_ET):

        # Get the data of Reference Evapotranspiration and save as nc
        DataCube_ET = RC.Get3Darray_time_series_monthly(Dir_Basin, Data_Path_ET, Startdate, Enddate, LU_dataset)
        DC.Save_as_NC(Name_NC_ET, DataCube_ET, 'ET', LU_dataset, Startdate, Enddate, 'monthly', 0.01)
        del DataCube_ET

    # Reference Evapotranspiration data
    Name_NC_ETref = DC.Create_NC_name('ETref', Simulation, Dir_Basin, 5, info)
    if not os.path.exists(Name_NC_ETref):

        # Get the data of Reference Evapotranspiration and save as nc
        DataCube_ETref = RC.Get3Darray_time_series_monthly(Dir_Basin, Data_Path_ETref, Startdate, Enddate, LU_dataset)
        DC.Save_as_NC(Name_NC_ETref, DataCube_ETref, 'ETref', LU_dataset, Startdate, Enddate, 'monthly', 0.01)
        del DataCube_ETref

    # Rivers
    Name_NC_Rivers = DC.Create_NC_name('Rivers', Simulation, Dir_Basin, 5, info)
    if not os.path.exists(Name_NC_Rivers):

        # Get the data of Reference Evapotranspiration and save as nc
        Rivers_CR = RC.Open_nc_array(Name_NC_Rivers_CR)
        DataCube_Rivers = RC.resize_array_example(Rivers_CR, LU_data)
        DC.Save_as_NC(Name_NC_Rivers, DataCube_Rivers, 'Rivers', LU_dataset)
        del DataCube_Rivers, Rivers_CR

    # Discharge
    # Define info for the nc files
    info = ['monthly','m3', ''.join([Startdate[5:7], Startdate[0:4]]) , ''.join([Enddate[5:7], Enddate[0:4]])]

    Name_NC_Routed_Discharge = DC.Create_NC_name('Routed_Discharge', Simulation, Dir_Basin, 5, info)
    if not os.path.exists(Name_NC_Routed_Discharge):

        # Get the data of Reference Evapotranspiration and save as nc
        Routed_Discharge_CR = RC.Open_nc_array(Name_NC_Discharge)
        DataCube_Routed_Discharge = RC.resize_array_example(Routed_Discharge_CR, LU_data)
        DC.Save_as_NC(Name_NC_Routed_Discharge, DataCube_Routed_Discharge, 'Routed_Discharge', LU_dataset, Startdate, Enddate, 'monthly')
        del DataCube_Routed_Discharge, Routed_Discharge_CR





    # Get raster information
    geo_out, proj, size_X, size_Y = RC.Open_array_info(Example_dataset)

    Rivers = RC.Open_nc_array(Name_NC_Rivers_CR)

    # Create ID Matrix
    y,x = np.indices((size_Y, size_X))
    ID_Matrix = np.int32(np.ravel_multi_index(np.vstack((y.ravel(),x.ravel())),(size_Y,size_X),mode='clip').reshape(x.shape)) + 1

    # Get tiff array time dimension:
    time_dimension = int(np.shape(Discharge_dict_CR3[0])[0])

    # create an empty array
    Result = np.zeros([time_dimension, size_Y, size_X])

    for river_part in range(0,len(River_dict_CR2)):
        for river_pixel in range(1,len(River_dict_CR2[river_part])):
            river_pixel_ID = River_dict_CR2[river_part][river_pixel]
            if len(np.argwhere(ID_Matrix == river_pixel_ID))>0:
                row, col = np.argwhere(ID_Matrix == river_pixel_ID)[0][:]
                Result[:,row,col] = Discharge_dict_CR3[river_part][:,river_pixel]
        print(river_part)


    Outflow = Discharge_dict_CR3[0][:,1]

    for i in range(0,time_dimension):
        output_name = r'C:/testmap/rtest_%s.tif' %i
        Result_one = Result[i, :, :]
        DC.Save_as_tiff(output_name, Result_one, geo_out, "WGS84")

    import os

    # Get environmental variable for the Home folder
    WA_env_paths = os.environ["WA_HOME"].split(';')
    Dir_Home = WA_env_paths[0]

    # Create the Basin folder
    Dir_Basin = os.path.join(Dir_Home, Basin)
    info = ['monthly','m3-month-1', ''.join([Startdate[5:7], Startdate[0:4]]) , ''.join([Enddate[5:7], Enddate[0:4]])]
    Name_Result = DC.Create_NC_name('DischargeEnd', Simulation, Dir_Basin, 5, info)
    Result[np.logical_and(Result == 0.0, Rivers == 0.0)] = np.nan

    DC.Save_as_NC(Name_Result, Result, 'DischargeEnd', Example_dataset, Startdate, Enddate, 'monthly')



    '''

    return ()
def slope_correct(down_short_hor, pressure, ea, DEMmap, DOY):
    """
    This function downscales the CFSR solar radiation by using the DEM map
    The Slope correction is based on Allen et al. (2006)
    'Analytical integrated functions for daily solar radiation on slope'

    Keyword arguments:
    down_short_hor -- numpy array with the horizontal downwards shortwave radiation
    pressure -- numpy array with the air pressure
    ea -- numpy array with the actual vapour pressure
    DEMmap -- 'C:/' path to the DEM map
    DOY -- day of the year
    """

    # Get Geo Info
    GeoT, Projection, xsize, ysize = RC.Open_array_info(DEMmap)

    minx = GeoT[0]
    miny = GeoT[3] + xsize * GeoT[4] + ysize * GeoT[5]

    x = np.flipud(np.arange(xsize) * GeoT[1] + minx + GeoT[1] / 2)
    y = np.flipud(np.arange(ysize) * -GeoT[5] + miny + -GeoT[5] / 2)

    # Calculate Extraterrestrial Solar Radiation [W m-2]
    demmap = RC.Open_tiff_array(DEMmap)
    demmap[demmap < 0] = 0

    # apply the slope correction
    Ra_hor, Ra_slp, sinb, sinb_hor, fi, slope, ID = SlopeInfluence(
        demmap, y, x, DOY)

    # Calculate atmospheric transmissivity
    Rs_hor = down_short_hor

    # EQ 39
    tau = Rs_hor / Ra_hor

    #EQ 41
    KB_hor = np.zeros(tau.shape) * np.nan

    indice = np.where(tau.flat >= 0.42)
    KB_hor.flat[indice] = 1.56 * tau.flat[indice] - 0.55

    indice = np.logical_and(tau.flat > 0.175, tau.flat < 0.42)
    KB_hor.flat[indice] = 0.022 - 0.280 * tau.flat[indice] + 0.828 * tau.flat[
        indice]**2 + 0.765 * tau.flat[indice]**3

    indice = np.where(tau.flat <= 0.175)
    KB_hor.flat[indice] = 0.016 * tau.flat[indice]

    # EQ 42
    KD_hor = tau - KB_hor

    Kt = 0.7

    #EQ 18
    W = 0.14 * ea * pressure + 2.1

    KB0 = 0.98 * np.exp((-0.00146 * pressure / Kt / sinb) - 0.075 *
                        (W / sinb)**0.4)
    KB0_hor = 0.98 * np.exp((-0.00146 * pressure / Kt / sinb_hor) - 0.075 *
                            (W / sinb_hor)**0.4)

    #EQ 34
    fB = KB0 / KB0_hor * Ra_slp / Ra_hor
    fia = (1 - KB_hor) * (
        1 + (KB_hor /
             (KB_hor + KD_hor))**0.5 * np.sin(slope / 2)**3) * fi + fB * KB_hor

    Rs = Rs_hor * (fB * (KB_hor / tau) + fia * (KD_hor / tau) + 0.23 *
                   (1 - fi))

    Rs[np.isnan(Rs)] = Rs_hor[np.isnan(Rs)]

    Rs_equiv = Rs / np.cos(slope)

    bias = np.nansum(Rs_hor) / np.nansum(Rs_equiv)

    return Rs_equiv, tau, bias
예제 #29
0
def RetrieveData(args):
    """
    This function retrieves JRC data for a given date from the
    http://storage.googleapis.com/global-surface-water/downloads/ server.

    Keyword arguments:
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, Names_to_download, lonlim, latlim] = args

    # Collect the data from the JRC webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(Names_to_download, output_folder)
    except:
        print("Was not able to download the file")

    # Clip the data to the users extend
    if len(Names_to_download) == 1:
        trash_folder = os.path.join(output_folder, "Trash")
        data_in = os.path.join(trash_folder, Names_to_download[0])
        data_end, geo_end = RC.clip_data(data_in, latlim, lonlim)
    else:

        data_end = np.zeros([
            int((latlim[1] - latlim[0]) / 0.00025),
            int((lonlim[1] - lonlim[0]) / 0.00025)
        ])

        for Name_to_merge in Names_to_download:
            trash_folder = os.path.join(output_folder, "Trash")
            data_in = os.path.join(trash_folder, Name_to_merge)
            geo_out, proj, size_X, size_Y = RC.Open_array_info(data_in)
            lat_min_merge = np.maximum(latlim[0],
                                       geo_out[3] + size_Y * geo_out[5])
            lat_max_merge = np.minimum(latlim[1], geo_out[3])
            lon_min_merge = np.maximum(lonlim[0], geo_out[0])
            lon_max_merge = np.minimum(lonlim[1],
                                       geo_out[0] + size_X * geo_out[1])

            lonmerge = [lon_min_merge, lon_max_merge]
            latmerge = [lat_min_merge, lat_max_merge]
            data_one, geo_one = RC.clip_data(data_in, latmerge, lonmerge)

            Ystart = int((geo_one[3] - latlim[1]) / geo_one[5])
            Yend = int(Ystart + np.shape(data_one)[0])
            Xstart = int((geo_one[0] - lonlim[0]) / geo_one[1])
            Xend = int(Xstart + np.shape(data_one)[1])

            data_end[Ystart:Yend, Xstart:Xend] = data_one

        geo_end = tuple([lonlim[0], geo_one[1], 0, latlim[1], 0, geo_one[5]])

    # Save results as Gtiff
    fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif')
    DC.Save_as_tiff(name=fileName_out,
                    data=data_end,
                    geo=geo_end,
                    projection='WGS84')
    shutil.rmtree(trash_folder)
    return True
예제 #30
0
def NPP_GPP_Based(Dir_Basin, Data_Path_GPP, Data_Path_NPP, Startdate, Enddate):
    """
    This functions calculated monthly NDM based on the yearly NPP and monthly GPP.

    Parameters
    ----------
    Dir_Basin : str
        Path to all the output data of the Basin
    Data_Path_GPP : str
        Path from the Dir_Basin to the GPP data
    Data_Path_NPP : str
        Path from the Dir_Basin to the NPP data
    Startdate : str
        Contains the start date of the model 'yyyy-mm-dd'
    Enddate : str
        Contains the end date of the model 'yyyy-mm-dd'
    Simulation : int
        Defines the simulation

    Returns
    -------
    Data_Path_NDM : str
        Path from the Dir_Basin to the normalized dry matter data

    """
    # import WA+ modules
    import watools.General.data_conversions as DC
    import watools.General.raster_conversions as RC

    # Define output folder for Normalized Dry Matter
    Data_Path_NDM = os.path.join(Dir_Basin, "NDM")
    if not os.path.exists(Data_Path_NDM):
        os.mkdir(Data_Path_NDM)

    # Define monthly time steps that will be created
    Dates = pd.date_range(Startdate, Enddate, freq='MS')

    # Define the years that will be calculated
    Year_Start = int(Startdate[0:4])
    Year_End = int(Enddate[0:4])
    Years = list(range(Year_Start, Year_End + 1))

    # Loop over the years
    for year in Years:

        # Change working directory to the NPP folder
        os.chdir(Data_Path_NPP)

        # Open yearly NPP data
        yearly_NPP_File = glob.glob('*yearly*%d.01.01.tif' % int(year))[0]
        Yearly_NPP = RC.Open_tiff_array(yearly_NPP_File)

        # Get the No Data Value of the NPP file
        dest = gdal.Open(yearly_NPP_File)
        NDV = dest.GetRasterBand(1).GetNoDataValue()

        # Set the No Data Value to Nan
        Yearly_NPP[Yearly_NPP == NDV] = np.nan

        # Change working directory to the GPP folder
        os.chdir(Data_Path_GPP)

        # Find all the monthly files of that year
        monthly_GPP_Files = glob.glob('*monthly*%d.*.01.tif' % int(year))

        # Check if it are 12 files otherwise something is wrong and send the ERROR
        if not len(monthly_GPP_Files) == 12:
            print('ERROR: Some monthly GPP Files are missing')

        # Get the projection information of the GPP inputs
        geo_out, proj, size_X, size_Y = RC.Open_array_info(
            monthly_GPP_Files[0])
        geo_out_NPP, proj_NPP, size_X_NPP, size_Y_NPP = RC.Open_array_info(
            os.path.join(Data_Path_NPP, yearly_NPP_File))

        if int(proj.split('"')[-2]) == 4326:
            proj = "WGS84"

        # Get the No Data Value of the GPP files
        dest = gdal.Open(monthly_GPP_Files[0])
        NDV = dest.GetRasterBand(1).GetNoDataValue()

        # Create a empty numpy array
        Yearly_GPP = np.zeros([size_Y, size_X])

        # Calculte the total yearly GPP
        for monthly_GPP_File in monthly_GPP_Files:

            # Open array
            Data = RC.Open_tiff_array(monthly_GPP_File)

            # Remove nan values
            Data[Data == NDV] = np.nan
            Data[np.isnan(Data)] = 0
            # Add data to yearly sum
            Yearly_GPP += Data

        # Check if size is the same of NPP and GPP otherwise resize
        if not (size_X_NPP == size_X and size_Y_NPP == size_Y):
            Yearly_NPP = RC.resize_array_example(Yearly_NPP, Yearly_GPP)

        # Loop over the monthly dates
        for Date in Dates:

            # If the Date is in the same year as the yearly NPP and GPP
            if Date.year == year:

                # Create empty GPP array
                monthly_GPP = np.ones([size_Y, size_X]) * np.nan

                # Get current month
                month = Date.month

                # Get the GPP file of the current year and month
                monthly_GPP_File = glob.glob('*monthly_%d.%02d.01.tif' %
                                             (int(year), int(month)))[0]
                monthly_GPP = RC.Open_tiff_array(monthly_GPP_File)
                monthly_GPP[monthly_GPP == NDV] = np.nan

                # Calculate the NDM based on the monthly and yearly NPP and GPP (fraction of GPP)
                Monthly_NDM = Yearly_NPP * monthly_GPP / Yearly_GPP * (
                    30. / 12.) * 10000  # kg/ha

                # Define output name
                output_name = os.path.join(
                    Data_Path_NDM, 'NDM_MOD17_kg_ha-1_monthly_%d.%02d.01.tif' %
                    (int(year), int(month)))

                # Save the NDM as tiff file
                DC.Save_as_tiff(output_name, Monthly_NDM, geo_out, proj)

    return (Data_Path_NDM)