예제 #1
0
def Calc_Property(Dir, latlim, lonlim, SL):

    import watools

    # Define level
    if SL == "sl3":
        level = "Topsoil"
    elif SL == "sl6":
        level = "Subsoil"

    # check if you need to download
    filename_out_thetasat = os.path.join(
        Dir, 'SoilGrids', 'Theta_Sat',
        'Theta_Sat_%s_SoilGrids_kg-kg.tif' % level)
    if not os.path.exists(filename_out_thetasat):
        if SL == "sl3":
            watools.Products.SoilGrids.Theta_Sat.Topsoil(Dir, latlim, lonlim)
        elif SL == "sl6":
            watools.Products.SoilGrids.Theta_Sat.Subsoil(Dir, latlim, lonlim)

    filedir_out_thetares = os.path.join(Dir, 'SoilGrids', 'Theta_Res')
    if not os.path.exists(filedir_out_thetares):
        os.makedirs(filedir_out_thetares)

    # Define theta field capacity output
    filename_out_thetares = os.path.join(
        filedir_out_thetares, 'Theta_Res_%s_SoilGrids_kg-kg.tif' % level)

    if not os.path.exists(filename_out_thetares):

        # Get info layer
        geo_out, proj, size_X, size_Y = RC.Open_array_info(
            filename_out_thetasat)

        # Open dataset
        theta_sat = RC.Open_tiff_array(filename_out_thetasat)

        # Calculate theta field capacity
        theta_Res = np.ones(theta_sat.shape) * -9999
        theta_Res = np.where(
            theta_sat < 0.351, 0.01, 0.4 * np.arccosh(theta_sat + 0.65) -
            0.05 * np.power(theta_sat + 0.65, 2.5) + 0.02)

        # Save as tiff
        DC.Save_as_tiff(filename_out_thetares, theta_Res, geo_out, proj)
    return
예제 #2
0
def Download_GWF_from_WA_FTP(output_folder, filename_Out, lonlim, latlim):
    """
    This function retrieves GWF data for a given date from the
    ftp.wateraccounting.unesco-ihe.org server.

    Keyword arguments:
    output_folder -- name of the end file with the weekly ALEXI data
    End_filename -- name of the end file
    lonlim -- [ymin, ymax] (values must be between -60 and 70)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    try:
        # Collect account and FTP information
        username, password = WebAccounts.Accounts(Type='FTP_WA')
        ftpserver = "ftp.wateraccounting.unesco-ihe.org"

        # Set the file names and directories
        filename = "Gray_Water_Footprint.tif"
        local_filename = os.path.join(output_folder, filename)

        # Download data from FTP
        ftp = FTP(ftpserver)
        ftp.login(username, password)
        directory = "/WaterAccounting_Guest/Static_WA_Datasets/"
        ftp.cwd(directory)
        lf = open(local_filename, "wb")
        ftp.retrbinary("RETR " + filename, lf.write)
        lf.close()

        # Clip extend out of world data
        dataset, Geo_out = RC.clip_data(local_filename, latlim, lonlim)

        # make geotiff file
        DC.Save_as_tiff(name=filename_Out,
                        data=dataset,
                        geo=Geo_out,
                        projection="WGS84")

        # delete old tif file
        os.remove(local_filename)

    except:
        print("file not exists")

    return
예제 #3
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD15 FPAR data for a given date from the
    http://e4ftl01.cr.usgs.gov/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, lonlim, latlim,
        hdf_library
    ] = args

    # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(TilesHorizontal, TilesVertical, Date, output_folder,
                     hdf_library)
    except:
        print("Was not able to download the file")

    # Define the output name of the collect data function
    name_collect = os.path.join(output_folder, 'Merged.tif')

    # Reproject the MODIS product to epsg_to
    #epsg_to ='4326'
    name_reprojected = RC.reproject_modis_wgs84(name_collect, method=2)

    # Clip the data to the users extend
    data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

    # Save the file as tiff

    FPARfileName = os.path.join(
        output_folder,
        'SnowFrac_MOD10_unitless_8-daily_' + Date.strftime('%Y') + '.' +
        Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')
    DC.Save_as_tiff(name=FPARfileName, data=data, geo=geo, projection='WGS84')

    # remove the side products
    os.remove(os.path.join(output_folder, name_collect))
    os.remove(os.path.join(output_folder, name_reprojected))

    return True
def split_yield(output_folder, p_fhs, et_blue_fhs, et_green_fhs,
                ab=(1.0, 1.0)):
    Data_Path_split = "split_y"
    out_folder = os.path.join(output_folder, Data_Path_split)
    if not os.path.exists(out_folder):
        os.mkdir(out_folder)
    sp_yield_fhs = []
    geo_out, proj, size_X, size_Y = RC.Open_array_info(p_fhs[0])
    for m in range(len(p_fhs)):
        out_fh = out_folder + '\\split_yield' + et_blue_fhs[m][-12:]
        P = RC.Open_tiff_array(p_fhs[m])
        ETBLUE = RC.Open_tiff_array(et_blue_fhs[m])
        ETGREEN = RC.Open_tiff_array(et_green_fhs[m])
        etbfraction = ETBLUE / (ETBLUE + ETGREEN)
        pfraction = P / np.nanmax(P)
        fraction = sh3.split_Yield(pfraction, etbfraction, ab[0], ab[1])
        DC.Save_as_tiff(out_fh, fraction, geo_out)
        sp_yield_fhs.append(out_fh)
    return sp_yield_fhs
def lapse_rate(Dir, temperature_map, DEMmap):
    """
    This function downscales the GLDAS temperature map by using the DEM map

    Keyword arguments:
    temperature_map -- 'C:/' path to the temperature map
    DEMmap -- 'C:/' path to the DEM map
    """

    # calculate average altitudes corresponding to T resolution
    dest = RC.reproject_dataset_example(DEMmap, temperature_map, method=4)
    DEM_ave_out_name = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_ave.tif')
    geo_out, proj, size_X, size_Y = RC.Open_array_info(temperature_map)
    DEM_ave_data = dest.GetRasterBand(1).ReadAsArray()
    DC.Save_as_tiff(DEM_ave_out_name, DEM_ave_data, geo_out, proj)
    dest = None

    # determine lapse-rate [degress Celcius per meter]
    lapse_rate_number = 0.0065

    # open maps as numpy arrays
    dest = RC.reproject_dataset_example(DEM_ave_out_name, DEMmap, method=2)
    dem_avg = dest.GetRasterBand(1).ReadAsArray()
    dem_avg[dem_avg < 0] = 0
    dest = None

    # Open the temperature dataset
    dest = RC.reproject_dataset_example(temperature_map, DEMmap, method=2)
    T = dest.GetRasterBand(1).ReadAsArray()
    dest = None

    # Open Demmap
    demmap = RC.Open_tiff_array(DEMmap)
    dem_avg[demmap <= 0] = 0
    demmap[demmap == -32768] = np.nan

    # calculate first part
    T = T + ((dem_avg - demmap) * lapse_rate_number)

    return T
예제 #6
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD16 ET data for a given date from the
    ftp://ftp.ntsg.umt.edu/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, TilesVertical, TilesHorizontal,latlim, lonlim, timestep, hdf_library, Size_pix] = args

    # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(TilesHorizontal,TilesVertical,Date,output_folder, timestep, hdf_library, Size_pix)
    except:
        print("Was not able to download the file")

    # Define the output name of the collect data function
    name_collect = os.path.join(output_folder, 'Merged.tif')

    # Reproject the MODIS product to epsg_to
    epsg_to ='4326'
    name_reprojected = RC.reproject_MODIS(name_collect, epsg_to)

    # Clip the data to the users extend
    data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

    if timestep == 'monthly':
         ETfileName = os.path.join(output_folder, 'ET_MOD16A2_mm-month-1_monthly_'+Date.strftime('%Y')+'.' + Date.strftime('%m')+'.01.tif')
    elif timestep == '8-daily':
         ETfileName = os.path.join(output_folder, 'ET_MOD16A2_mm-8days-1_8-daily_'+Date.strftime('%Y') + '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')

    DC.Save_as_tiff(name=ETfileName, data=data, geo=geo, projection='WGS84')

    # remove the side products
    os.remove(os.path.join(output_folder, name_collect))
    os.remove(os.path.join(output_folder, name_reprojected))

    return()
예제 #7
0
def Clip_Dataset(local_filename, Filename_out, latlim, lonlim):

    # Open Dataset
    SEBS_Array = spio.loadmat(local_filename)['ETm']

    # Define area
    XID = [
        int(np.floor((180 + lonlim[0]) / 0.05)),
        int(np.ceil((180 + lonlim[1]) / 0.05))
    ]
    YID = [
        int(np.ceil((90 - latlim[1]) / 0.05)),
        int(np.floor((90 - latlim[0]) / 0.05))
    ]

    # Define Georeference
    geo = tuple([-180 + 0.05 * XID[0], 0.05, 0, 90 - 0.05 * YID[0], 0, -0.05])

    # Clip Array
    SEBS_Array_clipped = SEBS_Array[YID[0]:YID[1], XID[0]:XID[1]] * 0.1

    # Save tiff file
    DC.Save_as_tiff(Filename_out, SEBS_Array_clipped, geo, "WGS84")
def monthly_to_yearly(state_date, end_date, in_files, out_file):

    month_range = pd.date_range(start=state_date, end=end_date,
                                freq='MS').strftime("%Y.%m").tolist()
    #print(month_range)
    files_list = []
    data = []

    files = glob.glob(in_files)
    #print(files)

    # Get array information and define projection
    geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0])
    if int(proj.split('"')[-2]) == 4326:
        proj = "WGS84"

    for i in month_range:
        if 'P_CHIRPS.v2.0_mm-month-1_monthly_' + i + '.01.tif' in files:
            files_list.append('P_CHIRPS.v2.0_mm-month-1_monthly_' + i +
                              '.01.tif')
        else:
            print("No such file")

    for j in files_list:
        photo = Image.open(j)
        month = np.array(photo)
        data.append(month)

    #print(data)
    arr_year = np.array(data)

    #print(year_sum)

    year_sum = arr_year.sum(axis=0)

    # Save tiff file
    DC.Save_as_tiff(out_file, year_sum, geo_out, proj)
def adjust_P(Dir, pressure_map, DEMmap):
    """
    This function downscales the GLDAS air pressure map by using the DEM map

    Keyword arguments:
    pressure_map -- 'C:/' path to the pressure map
    DEMmap -- 'C:/' path to the DEM map
    """

    # calculate average latitudes
    destDEMave = RC.reproject_dataset_example(DEMmap, pressure_map, method=4)
    DEM_ave_out_name = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_ave.tif')
    geo_out, proj, size_X, size_Y = RC.Open_array_info(pressure_map)
    DEM_ave_data = destDEMave.GetRasterBand(1).ReadAsArray()
    DC.Save_as_tiff(DEM_ave_out_name, DEM_ave_data, geo_out, proj)

    # open maps as numpy arrays
    dest = RC.reproject_dataset_example(DEM_ave_out_name, DEMmap, method=2)
    dem_avg = dest.GetRasterBand(1).ReadAsArray()
    dest = None

    # open maps as numpy arrays
    dest = RC.reproject_dataset_example(pressure_map, DEMmap, method=2)
    P = dest.GetRasterBand(1).ReadAsArray()
    dest = None

    demmap = RC.Open_tiff_array(DEMmap)
    dem_avg[demmap <= 0] = 0
    demmap[demmap == -32768] = np.nan

    # calculate second part
    P = P + (101.3 * ((293 - 0.0065 * (demmap - dem_avg)) / 293)**5.26 - 101.3)

    os.remove(DEM_ave_out_name)

    return P
예제 #10
0
def Calc_Property(Dir, latlim, lonlim, SL):

    import watools.Collect.SoilGrids as SG

    # Download needed layers
    SG.Clay_Content(Dir, latlim, lonlim, level=SL)
    #SG.Organic_Carbon_Content(Dir, latlim, lonlim, level=SL)
    SG.Bulk_Density(Dir, latlim, lonlim, level=SL)

    # Define path to layers
    filename_clay = os.path.join(
        Dir, 'SoilGrids', 'Clay_Content',
        'ClayContentMassFraction_%s_SoilGrids_percentage.tif' % SL)
    #filename_om = os.path.join(Dir, 'SoilGrids', 'Soil_Organic_Carbon_Content' ,'SoilOrganicCarbonContent_%s_SoilGrids_g_kg.tif' %SL)
    filename_bulkdensity = os.path.join(
        Dir, 'SoilGrids', 'Bulk_density',
        'BulkDensity_%s_SoilGrids_kg-m-3.tif' % SL)

    # Define path for output
    if SL == "sl3":
        level = "Topsoil"
    elif SL == "sl6":
        level = "Subsoil"

    filedir_out_densbulk = os.path.join(Dir, 'SoilGrids', 'Bulk_density')
    if not os.path.exists(filedir_out_densbulk):
        os.makedirs(filedir_out_densbulk)
    filedir_out_thetasat = os.path.join(Dir, 'SoilGrids', 'Theta_Sat')
    if not os.path.exists(filedir_out_thetasat):
        os.makedirs(filedir_out_thetasat)

    #filename_out_densbulk = os.path.join(filedir_out_densbulk ,'Bulk_Density_%s_SoilGrids_g-cm-3.tif' %level)
    filename_out_thetasat = os.path.join(
        filedir_out_thetasat, 'Theta_Sat2_%s_SoilGrids_kg-kg.tif' % level)

    #if not (os.path.exists(filename_out_densbulk) and os.path.exists(filename_out_thetasat)):
    if not os.path.exists(filename_out_thetasat):

        # Open datasets
        dest_clay = gdal.Open(filename_clay)
        #dest_om = gdal.Open(filename_om)
        dest_bulk = gdal.Open(filename_bulkdensity)

        # Open Array info
        geo_out, proj, size_X, size_Y = RC.Open_array_info(filename_clay)

        # Open Arrays
        Clay = dest_clay.GetRasterBand(1).ReadAsArray()
        #OM = dest_om.GetRasterBand(1).ReadAsArray()
        Clay = np.float_(Clay)
        Clay[Clay > 100] = np.nan
        #OM = np.float_(OM)
        #OM[OM<0]=np.nan
        #OM = OM/1000

        # Calculate bulk density
        #bulk_dens = 1/(0.6117 + 0.3601 * Clay/100 + 0.002172 * np.power(OM * 100, 2)+ 0.01715 * np.log(OM * 100))
        bulk_dens = dest_bulk.GetRasterBand(1).ReadAsArray()
        bulk_dens = bulk_dens / 1000

        # Calculate theta saturated
        theta_sat = 0.85 * (1 - (bulk_dens / 2.65)) + 0.13 * Clay / 100

        # Save data
        #DC.Save_as_tiff(filename_out_densbulk, bulk_dens, geo_out, "WGS84")
        DC.Save_as_tiff(filename_out_thetasat, theta_sat, geo_out, "WGS84")

    return ()
예제 #11
0
def RetrieveData(Date, args):
    """
    This function retrieves CHIRPS data for a given date from the
    ftp://chg-ftpout.geog.ucsb.edu server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, TimeCase, xID, yID, lonlim, latlim] = args

    # open ftp server
    ftp = FTP("chg-ftpout.geog.ucsb.edu", "", "")
    ftp.login()

    # Define FTP path to directory
    if TimeCase == 'daily':
        pathFTP = 'pub/org/chg/products/CHIRPS-2.0/global_daily/tifs/p05/%s/' % Date.strftime(
            '%Y')
    elif TimeCase == 'monthly':
        pathFTP = 'pub/org/chg/products/CHIRPS-2.0/global_monthly/tifs/'
    else:
        raise KeyError("The input time interval is not supported")

    # find the document name in this directory
    ftp.cwd(pathFTP)
    listing = []

    # read all the file names in the directory
    ftp.retrlines("LIST", listing.append)

    # create all the input name (filename) and output (outfilename, filetif, DiFileEnd) names
    if TimeCase == 'daily':
        filename = 'chirps-v2.0.%s.%02s.%02s.tif.gz' % (
            Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))
        outfilename = os.path.join(
            output_folder, 'chirps-v2.0.%s.%02s.%02s.tif' %
            (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d')))
        DirFileEnd = os.path.join(
            output_folder, 'P_CHIRPS.v2.0_mm-day-1_daily_%s.%02s.%02s.tif' %
            (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d')))
    elif TimeCase == 'monthly':
        filename = 'chirps-v2.0.%s.%02s.tif.gz' % (Date.strftime('%Y'),
                                                   Date.strftime('%m'))
        outfilename = os.path.join(
            output_folder, 'chirps-v2.0.%s.%02s.tif' %
            (Date.strftime('%Y'), Date.strftime('%m')))
        DirFileEnd = os.path.join(
            output_folder,
            'P_CHIRPS.v2.0_mm-month-1_monthly_%s.%02s.%02s.tif' %
            (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d')))
    else:
        raise KeyError("The input time interval is not supported")

    # download the global rainfall file
    try:
        local_filename = os.path.join(output_folder, filename)
        lf = open(local_filename, "wb")
        ftp.retrbinary("RETR " + filename, lf.write, 8192)
        lf.close()

        # unzip the file
        zip_filename = os.path.join(output_folder, filename)
        DC.Extract_Data_gz(zip_filename, outfilename)

        # open tiff file
        dataset = RC.Open_tiff_array(outfilename)

        # clip dataset to the given extent
        data = dataset[yID[0]:yID[1], xID[0]:xID[1]]
        data[data < 0] = -9999

        # save dataset as geotiff file
        geo = [lonlim[0], 0.05, 0, latlim[1], 0, -0.05]
        DC.Save_as_tiff(name=DirFileEnd,
                        data=data,
                        geo=geo,
                        projection="WGS84")

        # delete old tif file
        os.remove(outfilename)

    except:
        print("file not exists")
    return True
예제 #12
0
def RetrieveData_monthly(Date, args):
    """
    This function retrieves GLDAS CLSM monthly data for a given date.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """

    # Open all the parameters
    [path, url, Var, VarStr, VarInfo, TimeCase, xID, yID, lonlim, latlim, CaseParameters, username, password, types] = args

    # Reset the begin parameters for downloading
    downloaded = 0
    N = 0
    data_end = []

    # Check GLDAS version
    version = url[-3:]

    # Open all variable info
    for T in types:
        if T == 'mean':
            VarStr = VarInfo.names[Var]
        else:
            VarStr = VarInfo.names[Var] + '-' + T

        # Check whether the file already exist or
        # the worldfile is downloaded
        BasinDir = os.path.join(path[T], VarStr + '_GLDAS-CLSM_' +  VarInfo.units[Var] + '_monthly_' + Date.strftime('%Y.%m.%d') + '.tif')

        # Check if the outputfile already excists
        if not os.path.isfile(BasinDir):

            # Create the time dimension
            if version == '2.0':
                zID_start = int(((Date - pd.Timestamp("1948-1-1")).days))
                Y = int(Date.year)
                M = int(Date.month)
                Mday = calendar.monthrange(Y,M)[1]
                zID_end = zID_start + Mday
                if zID_end == 24472:
                    zID_end = 24470
                    Mday = Mday - 2


            # define total url
            url_GLDAS = url + '.ascii?%s[%s:1:%s][%s:1:%s][%s:1:%s]' %(Var,zID_start,zID_end,yID[0],yID[1],xID[0],xID[1])

            # if not downloaded try to download file
            while downloaded == 0:
                try:

                    # open URL
                    try:
                        dataset = requests.get(url_GLDAS, allow_redirects=False,stream = True)
                    except:
                        from requests.packages.urllib3.exceptions import InsecureRequestWarning
                        requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
                        dataset = requests.get(url_GLDAS, allow_redirects=False,stream = True, verify = False)
                    try:
                        get_dataset = requests.get(dataset.headers['location'], auth = (username,password),stream = True)
                    except:
                        from requests.packages.urllib3.exceptions import InsecureRequestWarning
                        requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
                        get_dataset = requests.get(dataset.headers['location'], auth = (username,password),stream = True, verify = False)

                    # download data (first save as text file)
                    pathtext = os.path.join(path[T],'temp%s.txt' %str(zID_start))
                    z = open(pathtext,'w')
                    z.write(get_dataset.text)
                    z.close()

                    # Reshape data
                    datashape = [Mday,yID[1] - yID[0] + 1,xID[1] - xID[0] + 1]
                    data_start = np.genfromtxt(pathtext,dtype = float,skip_header = 1,skip_footer = 6,delimiter = ',')
                    data_list = np.asarray(data_start[:,1:])
                    data_end = np.resize(data_list,(Mday, datashape[1], datashape[2]))
                    os.remove(pathtext)

                    # Add the VarFactor
                    if VarInfo.factors[Var] < 0:
                        data_end[data_end != -9999] = data_end[data_end != -9999] + VarInfo.factors[Var]
                    else:
                        data_end[data_end != -9999] = data_end[data_end != -9999] * VarInfo.factors[Var]
                    data_end[data_end < -9999] = -9999

                    # define geo
                    lonlimGLDAS = xID[0] * 0.25 - 180
                    latlimGLDAS = (yID[1] + 1) * 0.25 - 60

                    # Download was succesfull
                    downloaded = 1

                # If download was not succesfull
                except:

                    # Try another time
                    N = N + 1

                    # Stop trying after 10 times
                    if N == 10:
                        print('Data from ' + Date.strftime('%Y-%m-%d') + ' is not available')
                        downloaded = 1

            try:
                # Save to geotiff file
                if T == 'mean':
                    data_end[data_end<-100] = np.nan
                    data = np.flipud(np.nanmean(data_end, axis=0))
                if VarInfo.types[Var] == 'flux':
                    data = data * Mday

                geo = [lonlimGLDAS,0.25,0,latlimGLDAS,0,-0.25]
                DC.Save_as_tiff(name=BasinDir, data=data, geo=geo, projection="WGS84")

            except:
                print('GLDAS map from '+ Date.strftime('%Y-%m-%d') + ' is not created')

    return True
예제 #13
0
def DownloadData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores,
                 TimeCase, CaseParameters):
    """
    This function downloads ECMWF six-hourly, daily or monthly data

    Keyword arguments:

    """

    # correct latitude and longitude limits
    latlim_corr_one = np.floor(latlim[0] / 0.125) * 0.125
    latlim_corr_two = np.ceil(latlim[1] / 0.125) * 0.125
    latlim_corr = [latlim_corr_one, latlim_corr_two]

    # correct latitude and longitude limits
    lonlim_corr_one = np.floor(lonlim[0] / 0.125) * 0.125
    lonlim_corr_two = np.ceil(lonlim[1] / 0.125) * 0.125
    lonlim_corr = [lonlim_corr_one, lonlim_corr_two]

    # Load factors / unit / type of variables / accounts
    VarInfo = VariablesInfo(TimeCase)
    Varname_dir = VarInfo.file_name[Var]

    # Create Out directory
    out_dir = os.path.join(Dir, "Weather_Data", "Model", "ECMWF", TimeCase,
                           Varname_dir, "mean")
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    DownloadType = VarInfo.DownloadType[Var]

    # Set required data for the three hourly option
    if TimeCase == 'six_hourly':
        string1 = 'oper'

    # Set required data for the daily option
    elif TimeCase == 'daily':
        Dates = pd.date_range(Startdate, Enddate, freq='D')
    elif TimeCase == 'monthly':
        Dates = pd.date_range(Startdate, Enddate, freq='MS')

    if DownloadType == 1:
        string1 = 'oper'
        string4 = "0"
        string6 = "00:00:00/06:00:00/12:00:00/18:00:00"
        string2 = 'sfc'
        string8 = 'an'

    if DownloadType == 2:
        string1 = 'oper'
        string4 = "12"
        string6 = "00:00:00/12:00:00"
        string2 = 'sfc'
        string8 = 'fc'

    if DownloadType == 3:
        string1 = 'oper'
        string4 = "0"
        string6 = "00:00:00/06:00:00/12:00:00/18:00:00"
        string2 = 'pl'
        string8 = 'an'

    string7 = '%s/to/%s' % (Startdate, Enddate)

    parameter_number = VarInfo.number_para[Var]
    string3 = '%03d.128' % (parameter_number)
    string5 = '0.125/0.125'
    string9 = 'ei'
    string10 = '%s/%s/%s/%s' % (latlim_corr[1], lonlim_corr[0], latlim_corr[0],
                                lonlim_corr[1])  #N, W, S, E

    # Download data by using the ECMWF API
    import watools.Collect.ECMWF.ECMWFdownload as Download
    print('Use API ECMWF to collect the data, please wait')
    Download.API(Dir, DownloadType, string1, string2, string3, string4,
                 string5, string6, string7, string8, string9, string10)

    # Open the downloaded data
    NC_filename = os.path.join(Dir, 'data_interim.nc')
    fh = Dataset(NC_filename, mode='r')

    # Get the NC variable parameter
    parameter_var = VarInfo.var_name[Var]
    Var_unit = VarInfo.units[Var]
    factors_add = VarInfo.factors_add[Var]
    factors_mul = VarInfo.factors_mul[Var]

    # Open the NC data
    Data = fh.variables[parameter_var][:]
    Data_time = fh.variables['time'][:]
    lons = fh.variables['longitude'][:]
    lats = fh.variables['latitude'][:]

    # Define the georeference information
    Geo_four = np.nanmax(lats)
    Geo_one = np.nanmin(lons)
    Geo_out = tuple([Geo_one, 0.125, 0.0, Geo_four, 0.0, -0.125])

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    for date in Dates:

        # Define the year, month and day
        year = date.year
        month = date.month
        day = date.day

        # Hours since 1900-01-01
        start = datetime.datetime(year=1900, month=1, day=1)
        end = datetime.datetime(year, month, day)
        diff = end - start
        hours_from_start_begin = diff.total_seconds() / 60 / 60

        Date_good = np.zeros(len(Data_time))
        if TimeCase == 'daily':
            days_later = 1
        if TimeCase == 'monthly':
            days_later = calendar.monthrange(year, month)[1]

        Date_good[np.logical_and(
            Data_time >= hours_from_start_begin, Data_time <
            (hours_from_start_begin + 24 * days_later))] = 1

        Data_one = np.zeros([
            int(np.sum(Date_good)),
            int(np.size(Data, 1)),
            int(np.size(Data, 2))
        ])
        Data_one = Data[np.int_(Date_good) == 1, :, :]

        # Calculate the average temperature in celcius degrees
        Data_end = factors_mul * np.nanmean(Data_one, 0) + factors_add

        if VarInfo.types[Var] == 'flux':
            Data_end = Data_end * days_later

        VarOutputname = VarInfo.file_name[Var]

        # Define the out name
        name_out = os.path.join(
            out_dir, "%s_ECMWF_ERA-Interim_%s_%s_%d.%02d.%02d.tif" %
            (VarOutputname, Var_unit, TimeCase, year, month, day))

        # Create Tiff files
        DC.Save_as_tiff(name_out, Data_end, Geo_out, "WGS84")

        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)

    fh.close()

    return ()
예제 #14
0
def RetrieveData(Date, args):

    # unpack the arguments
    [output_folder, latlim, lonlim, Var, Version] = args

    # Name of the model
    if Version == 1:
        version_name = 'CFSR'
    if Version == 2:
        version_name = 'CFSRv2'

    # Name of the outputfile
    if Var == 'dlwsfc':
        Outputname = 'DLWR_%s_W-m2_' %version_name + str(Date.strftime('%Y')) + '.' + str(Date.strftime('%m')) + '.' + str(Date.strftime('%d')) + '.tif'
    if Var == 'dswsfc':
        Outputname = 'DSWR_%s_W-m2_' %version_name + str(Date.strftime('%Y')) + '.' + str(Date.strftime('%m')) + '.' + str(Date.strftime('%d')) + '.tif'
    if Var == 'ulwsfc':
        Outputname = 'ULWR_%s_W-m2_' %version_name + str(Date.strftime('%Y')) + '.' + str(Date.strftime('%m')) + '.' + str(Date.strftime('%d')) + '.tif'
    if Var == 'uswsfc':
        Outputname = 'USWR_%s_W-m2_' %version_name + str(Date.strftime('%Y')) + '.' + str(Date.strftime('%m')) + '.' + str(Date.strftime('%d')) + '.tif'

    # Create the total end output name
    outputnamePath = os.path.join(output_folder, Outputname)

    # If the output name not exists than create this output
    if not os.path.exists(outputnamePath):


        local_filename = Download_data(Date, Version, output_folder, Var)

        # convert grb2 to netcdf (wgrib2 module is needed)
        for i in range(0,4):
            nameNC = 'Output' + str(Date.strftime('%Y')) + str(Date.strftime('%m')) + str(Date.strftime('%d')) + '-' + str(i+1) + '.nc'

            # Total path of the output
            FileNC6hour = os.path.join(output_folder, nameNC)

	       # Band number of the grib data which is converted in .nc
            band=(int(Date.strftime('%d')) - 1) * 28 + (i + 1) * 7

            # Convert the data
            DC.Convert_grb2_to_nc(local_filename, FileNC6hour, band)

        if Version == 1:

            if Date < pd.Timestamp(pd.datetime(2011, 1, 1)):

                # Convert the latlim and lonlim into array
                Xstart = np.floor((lonlim[0] + 180.1562497) / 0.3125)
                Xend = np.ceil((lonlim[1] + 180.1562497) / 0.3125) + 1
                Ystart = np.floor((latlim[0] + 89.9171038899) / 0.3122121663)
                Yend = np.ceil((latlim[1] + 89.9171038899) / 0.3122121663)

                # Create a new dataset
                Datatot = np.zeros([576, 1152])

            else:
                Version = 2

        if Version == 2:

            # Convert the latlim and lonlim into array
            Xstart = np.floor((lonlim[0] + 180.102272725) / 0.204545)
            Xend = np.ceil((lonlim[1] + 180.102272725) / 0.204545) + 1
            Ystart = np.floor((latlim[0] + 89.9462116040955806) / 0.204423)
            Yend = np.ceil((latlim[1] + 89.9462116040955806) / 0.204423)

            # Create a new dataset
            Datatot = np.zeros([880, 1760])

        # Open 4 times 6 hourly dataset
        for i in range (0, 4):
            nameNC = 'Output' + str(Date.strftime('%Y')) + str(Date.strftime('%m')) + str(Date.strftime('%d')) + '-' + str(i + 1) + '.nc'
            FileNC6hour = os.path.join(output_folder, nameNC)
            f = Dataset(FileNC6hour, mode = 'r')
            Data = f.variables['Band1'][0:int(Datatot.shape[0]), 0:int(Datatot.shape[1])]
            f.close()
            data = np.array(Data)
            Datatot = Datatot + data

        # Calculate the average in W/m^2 over the day
        DatatotDay = Datatot / 4
        DatatotDayEnd = np.zeros([int(Datatot.shape[0]), int(Datatot.shape[1])])
        DatatotDayEnd[:,0:int(Datatot.shape[0])] = DatatotDay[:, int(Datatot.shape[0]):int(Datatot.shape[1])]
        DatatotDayEnd[:,int(Datatot.shape[0]):int(Datatot.shape[1])] = DatatotDay[:, 0:int(Datatot.shape[0])]

        # clip the data to the extent difined by the user
        DatasetEnd = DatatotDayEnd[int(Ystart):int(Yend), int(Xstart):int(Xend)]

        # save file
        if Version == 1:
	        pixel_size = 0.3125
        if Version == 2:
	        pixel_size = 0.204545
        geo = [lonlim[0],pixel_size,0,latlim[1],0,-pixel_size]
        DC.Save_as_tiff(data = np.flipud(DatasetEnd), name = outputnamePath, geo = geo, projection = "WGS84")

    return()
예제 #15
0
def RetrieveData(Date, args):
    """
    This function retrieves GLEAM ET data for a given date from the
    www.gleam.eu server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, latlim, lonlim, VarCode, TimeCase, Product] = args

    # Adjust latlim to GLEAM dataset
    latlim1 = [latlim[1] * -1, latlim[0] * -1]

    # select the spatial dataset
    Ystart = int(np.floor((latlim1[0] + 90) / 0.25))
    Yend = int(np.ceil((latlim1[1] + 90) / 0.25))
    Xstart = int(np.floor((lonlim[0] + 180) / 0.25))
    Xend = int(np.ceil((lonlim[1] + 180) / 0.25))

    Year = Date.year
    Month = Date.month

    if Product == "ET":
        filename = 'E_' + str(Year) + '_GLEAM_v3.2b.nc'
    if Product == "ETpot":
        filename = 'Ep_' + str(Year) + '_GLEAM_v3.2b.nc'

    local_filename = os.path.join(output_folder, filename)

    f = Dataset(local_filename, mode='r')

    if TimeCase == 'monthly':

        # defines the start and end of the month
        Datesend1 = str(Date)
        Datesend2 = Datesend1.replace(Datesend1[8:10], "01")
        Datesend3 = Datesend2[0:10]
        Datesend4 = Datesend1[0:10]
        Datestart = pd.date_range(Datesend3, Datesend4, freq='MS')

        # determine the DOY-1 and DOYend (those are use to define the temporal boundaries of the yearly data)
        DOY = int(Datestart[0].strftime('%j'))
        DOYend = int(Date.strftime('%j'))
        DOYDownload = DOY - 1
        Day = 1

        if Product == "ET":
            Data = f.variables['E'][DOYDownload:DOYend, Xstart:Xend,
                                    Ystart:Yend]
        if Product == "ETpot":
            Data = f.variables['Ep'][DOYDownload:DOYend, Xstart:Xend,
                                     Ystart:Yend]

        data = np.array(Data)
        f.close()

        # Sum ET data in time and change the no data value into -999
        dataSum = sum(data, 1)
        dataSum[dataSum < -100] = -999.000
        dataCor = np.swapaxes(dataSum, 0, 1)

    if TimeCase == 'daily':
        Day = Date.day

        # Define the DOY, DOY-1 is taken from the yearly dataset
        DOY = int(Date.strftime('%j'))
        DOYDownload = DOY - 1

        if Product == "ET":
            Data = f.variables['E'][DOYDownload, Xstart:Xend, Ystart:Yend]
        if Product == "ETpot":
            Data = f.variables['Ep'][DOYDownload, Xstart:Xend, Ystart:Yend]
        data = np.array(Data)
        f.close()

        data[data < -100] = -999.000
        dataCor = np.swapaxes(data, 0, 1)

    # The Georeference of the map
    geo_in = [lonlim[0], 0.25, 0.0, latlim[1], 0.0, -0.25]

    # Name of the map
    dataset_name = VarCode + '_' + str(Year) + '.' + str(Month).zfill(
        2) + '.' + str(Day).zfill(2) + '.tif'
    output_file = os.path.join(output_folder, dataset_name)

    # save data as tiff file
    DC.Save_as_tiff(name=output_file,
                    data=dataCor,
                    geo=geo_in,
                    projection="WGS84")

    return True
예제 #16
0
def main(Dir,
         Startdate='',
         Enddate='',
         latlim=[-50, 50],
         lonlim=[-180, 180],
         cores=False,
         Waitbar=1):
    """
    This function downloads RFE V2.0 (monthly) data

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    cores -- The number of cores used to run the routine.
             It can be 'False' to avoid using parallel computing
             routines.
    Waitbar -- 1 (Default) will print a waitbar
    """
    # Download data
    print('\nDownload monthly RFE precipitation data for period %s till %s' %
          (Startdate, Enddate))

    # Check variables
    if not Startdate:
        Startdate = pd.Timestamp('2001-01-01')
    if not Enddate:
        Enddate = pd.Timestamp('Now')
    Dates = pd.date_range(Startdate, Enddate, freq='MS')

    # Make directory
    output_folder = os.path.join(Dir, 'Precipitation', 'RFE', 'Monthly/')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    for Date in Dates:
        month = Date.month
        year = Date.year
        end_day = calendar.monthrange(year, month)[1]
        Startdate_one_month = '%s-%02s-01' % (year, month)
        Enddate_one_month = '%s-%02s-%02s' % (year, month, end_day)

        DownloadData(Dir, Startdate_one_month, Enddate_one_month, latlim,
                     lonlim, 0, cores)

        Dates_daily = pd.date_range(Startdate_one_month,
                                    Enddate_one_month,
                                    freq='D')

        # Make directory
        input_folder_daily = os.path.join(Dir, 'Precipitation', 'RFE',
                                          'Daily/')
        i = 0

        for Date_daily in Dates_daily:
            file_name = 'P_RFE.v2.0_mm-day-1_daily_%s.%02s.%02s.tif' % (
                Date_daily.strftime('%Y'), Date_daily.strftime('%m'),
                Date_daily.strftime('%d'))
            file_name_daily_path = os.path.join(input_folder_daily, file_name)
            if os.path.exists(file_name_daily_path):
                if Date_daily == Dates_daily[i]:
                    Raster_monthly = RC.Open_tiff_array(file_name_daily_path)
                else:
                    Raster_monthly += RC.Open_tiff_array(file_name_daily_path)
            else:
                if Date_daily == Dates_daily[i]:
                    i += 1

        geo_out, proj, size_X, size_Y = RC.Open_array_info(
            file_name_daily_path)
        file_name = 'P_RFE.v2.0_mm-month-1_monthly_%s.%02s.01.tif' % (
            Date.strftime('%Y'), Date.strftime('%m'))
        file_name_output = os.path.join(output_folder, file_name)
        DC.Save_as_tiff(file_name_output,
                        Raster_monthly,
                        geo_out,
                        projection="WGS84")

        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)
예제 #17
0
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar):
    """
    This scripts downloads ASCAT SWI data from the VITO server.
    The output files display the Surface Water Index.

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    lonlim -- [ymin, ymax]
    latlim -- [xmin, xmax]
    """

    # Check the latitude and longitude and otherwise reset lat and lon.
    if latlim[0] < -90 or latlim[1] > 90:
        print('Latitude above 90N or below 90S is not possible.\
            Value set to maximum')
        latlim[0] = np.max(latlim[0], -90)
        latlim[1] = np.min(latlim[1], 90)
    if lonlim[0] < -180 or lonlim[1] > 180:
        print('Longitude must be between 180E and 180W.\
            Now value is set to maximum')
        lonlim[0] = np.max(lonlim[0], -180)
        lonlim[1] = np.min(lonlim[1], 180)

    # Check Startdate and Enddate
    if not Startdate:
        Startdate = pd.Timestamp('2007-01-01')
    if not Enddate:
        Enddate = pd.Timestamp('2018-12-31')

    # Make a panda timestamp of the date
    try:
        Enddate = pd.Timestamp(Enddate)
    except:
        Enddate = Enddate

    # amount of Dates weekly
    Dates = pd.date_range(Startdate, Enddate, freq='D')

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    # Define directory and create it if not exists
    output_folder = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    output_folder_temp = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily', 'Temp')
    if not os.path.exists(output_folder_temp):
        os.makedirs(output_folder_temp)

    # loop over dates
    for Date in Dates:

        # Define end filename
        End_filename = os.path.join(
            output_folder, 'SWI_ASCAT_V3_Percentage_daily_%d.%02d.%02d.tif' %
            (Date.year, Date.month, Date.day))

        # Define IDs
        xID = 1800 + np.int16(
            np.array([np.ceil((lonlim[0]) * 10),
                      np.floor((lonlim[1]) * 10)]))

        yID = np.int16(
            np.array([np.floor((-latlim[1]) * 10),
                      np.ceil((-latlim[0]) * 10)])) + 900

        # Download the data from FTP server if the file not exists
        if not os.path.exists(End_filename):
            try:
                data = Download_ASCAT_from_VITO(End_filename,
                                                output_folder_temp, Date, yID,
                                                xID)
                # make geotiff file
                geo = [lonlim[0], 0.1, 0, latlim[1], 0, -0.1]
                DC.Save_as_tiff(name=End_filename,
                                data=data,
                                geo=geo,
                                projection="WGS84")
            except:
                print("Was not able to download file with date %s" % Date)

        # Adjust waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)

    # remove the temporary folder
    shutil.rmtree(output_folder_temp)
예제 #18
0
def NPP_GPP_Based(Dir_Basin, Data_Path_GPP, Data_Path_NPP, Startdate, Enddate):
    """
    This functions calculated monthly NDM based on the yearly NPP and monthly GPP.

    Parameters
    ----------
    Dir_Basin : str
        Path to all the output data of the Basin
    Data_Path_GPP : str
        Path from the Dir_Basin to the GPP data
    Data_Path_NPP : str
        Path from the Dir_Basin to the NPP data
    Startdate : str
        Contains the start date of the model 'yyyy-mm-dd'
    Enddate : str
        Contains the end date of the model 'yyyy-mm-dd'
    Simulation : int
        Defines the simulation

    Returns
    -------
    Data_Path_NDM : str
        Path from the Dir_Basin to the normalized dry matter data

    """
    # import WA+ modules
    import watools.General.data_conversions as DC
    import watools.General.raster_conversions as RC

    # Define output folder for Normalized Dry Matter
    Data_Path_NDM = os.path.join(Dir_Basin, "NDM")
    if not os.path.exists(Data_Path_NDM):
        os.mkdir(Data_Path_NDM)

    # Define monthly time steps that will be created
    Dates = pd.date_range(Startdate, Enddate, freq='MS')

    # Define the years that will be calculated
    Year_Start = int(Startdate[0:4])
    Year_End = int(Enddate[0:4])
    Years = list(range(Year_Start, Year_End + 1))

    # Loop over the years
    for year in Years:

        # Change working directory to the NPP folder
        os.chdir(Data_Path_NPP)

        # Open yearly NPP data
        yearly_NPP_File = glob.glob('*yearly*%d.01.01.tif' % int(year))[0]
        Yearly_NPP = RC.Open_tiff_array(yearly_NPP_File)

        # Get the No Data Value of the NPP file
        dest = gdal.Open(yearly_NPP_File)
        NDV = dest.GetRasterBand(1).GetNoDataValue()

        # Set the No Data Value to Nan
        Yearly_NPP[Yearly_NPP == NDV] = np.nan

        # Change working directory to the GPP folder
        os.chdir(Data_Path_GPP)

        # Find all the monthly files of that year
        monthly_GPP_Files = glob.glob('*monthly*%d.*.01.tif' % int(year))

        # Check if it are 12 files otherwise something is wrong and send the ERROR
        if not len(monthly_GPP_Files) == 12:
            print('ERROR: Some monthly GPP Files are missing')

        # Get the projection information of the GPP inputs
        geo_out, proj, size_X, size_Y = RC.Open_array_info(
            monthly_GPP_Files[0])
        geo_out_NPP, proj_NPP, size_X_NPP, size_Y_NPP = RC.Open_array_info(
            os.path.join(Data_Path_NPP, yearly_NPP_File))

        if int(proj.split('"')[-2]) == 4326:
            proj = "WGS84"

        # Get the No Data Value of the GPP files
        dest = gdal.Open(monthly_GPP_Files[0])
        NDV = dest.GetRasterBand(1).GetNoDataValue()

        # Create a empty numpy array
        Yearly_GPP = np.zeros([size_Y, size_X])

        # Calculte the total yearly GPP
        for monthly_GPP_File in monthly_GPP_Files:

            # Open array
            Data = RC.Open_tiff_array(monthly_GPP_File)

            # Remove nan values
            Data[Data == NDV] = np.nan
            Data[np.isnan(Data)] = 0
            # Add data to yearly sum
            Yearly_GPP += Data

        # Check if size is the same of NPP and GPP otherwise resize
        if not (size_X_NPP == size_X and size_Y_NPP == size_Y):
            Yearly_NPP = RC.resize_array_example(Yearly_NPP, Yearly_GPP)

        # Loop over the monthly dates
        for Date in Dates:

            # If the Date is in the same year as the yearly NPP and GPP
            if Date.year == year:

                # Create empty GPP array
                monthly_GPP = np.ones([size_Y, size_X]) * np.nan

                # Get current month
                month = Date.month

                # Get the GPP file of the current year and month
                monthly_GPP_File = glob.glob('*monthly_%d.%02d.01.tif' %
                                             (int(year), int(month)))[0]
                monthly_GPP = RC.Open_tiff_array(monthly_GPP_File)
                monthly_GPP[monthly_GPP == NDV] = np.nan

                # Calculate the NDM based on the monthly and yearly NPP and GPP (fraction of GPP)
                Monthly_NDM = Yearly_NPP * monthly_GPP / Yearly_GPP * (
                    30. / 12.) * 10000  # kg/ha

                # Define output name
                output_name = os.path.join(
                    Data_Path_NDM, 'NDM_MOD17_kg_ha-1_monthly_%d.%02d.01.tif' %
                    (int(year), int(month)))

                # Save the NDM as tiff file
                DC.Save_as_tiff(output_name, Monthly_NDM, geo_out, proj)

    return (Data_Path_NDM)
예제 #19
0
def DownloadData(output_folder, latlim, lonlim, parameter, resolution):
    """
    This function downloads DEM data from HydroSHED

    Keyword arguments:
    output_folder -- directory of the result
	latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    Resample -- 1 = The data will be resampled to 0.001 degree spatial
                    resolution
             -- 0 = The data will have the same pixel size as the data obtained
                    from the internet
    """
    # Define parameter depedent variables
    if parameter == "dir_3s":
        para_name = "DIR"
        unit = "-"
        resolution = '3s'
        parameter = 'dir'

    if parameter == "dem_3s":
        para_name = "DEM"
        unit = "m"
        resolution = '3s'
        parameter = 'dem'

    if parameter == "dir_15s":
        para_name = "DIR"
        unit = "-"
        resolution = '15s'
        parameter = 'dir'

    if parameter == "dem_15s":
        para_name = "DEM"
        unit = "m"
        resolution = '15s'
        parameter = 'dem'

# converts the latlim and lonlim into names of the tiles which must be
# downloaded
    if resolution == '3s':

        name, rangeLon, rangeLat = Find_Document_Names(latlim, lonlim,
                                                       parameter)

        # Memory for the map x and y shape (starts with zero)
        size_X_tot = 0
        size_Y_tot = 0

    if resolution == '15s':
        name = Find_Document_names_15s(latlim, lonlim, parameter, resolution)

    nameResults = []
    # Create a temporary folder for processing
    output_folder_trash = os.path.join(output_folder, "Temp")
    if not os.path.exists(output_folder_trash):
        os.makedirs(output_folder_trash)

    # Download, extract, and converts all the files to tiff files
    for nameFile in name:

        try:
            # Download the data from
            # http://earlywarning.usgs.gov/hydrodata/
            output_file, file_name = Download_Data(nameFile,
                                                   output_folder_trash,
                                                   parameter, para_name,
                                                   resolution)

            # extract zip data
            DC.Extract_Data(output_file, output_folder_trash)

            # Converts the data with a adf extention to a tiff extension.
            # The input is the file name and in which directory the data must be stored
            file_name_tiff = file_name.split('.')[0] + '_trans_temporary.tif'
            file_name_extract = file_name.split('_')[0:3]
            if resolution == '3s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1]

            if resolution == '15s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1] + '_15s'

            input_adf = os.path.join(output_folder_trash, file_name_extract2,
                                     file_name_extract2, 'hdr.adf')
            output_tiff = os.path.join(output_folder_trash, file_name_tiff)

            # convert data from adf to a tiff file
            output_tiff = DC.Convert_adf_to_tiff(input_adf, output_tiff)

            geo_out, proj, size_X, size_Y = RC.Open_array_info(output_tiff)
            if int(size_X) != int(6000) or int(size_Y) != int(6000):
                data = np.ones((6000, 6000)) * -9999

                # Create the latitude bound
                Vfile = str(nameFile)[1:3]
                SignV = str(nameFile)[0]
                SignVer = 1
                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(nameFile)[4:7]
                SignH = str(nameFile)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                Expected_X_min = Bound1
                Expected_Y_max = Bound2 + 5

                Xid_start = int(
                    np.round((geo_out[0] - Expected_X_min) / geo_out[1]))
                Xid_end = int(
                    np.round(
                        ((geo_out[0] + size_X * geo_out[1]) - Expected_X_min) /
                        geo_out[1]))
                Yid_start = int(
                    np.round((Expected_Y_max - geo_out[3]) / (-geo_out[5])))
                Yid_end = int(
                    np.round((Expected_Y_max - (geo_out[3] +
                                                (size_Y * geo_out[5]))) /
                             (-geo_out[5])))

                data[Yid_start:Yid_end,
                     Xid_start:Xid_end] = RC.Open_tiff_array(output_tiff)
                if np.max(data) == 255:
                    data[data == 255] = -9999
                data[data < -9999] = -9999

                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

        except:

            if resolution == '3s':
                # If tile not exist create a replacing zero tile (sea tiles)
                output = nameFile.split('.')[0] + "_trans_temporary.tif"
                output_tiff = os.path.join(output_folder_trash, output)
                file_name = nameFile
                data = np.ones((6000, 6000)) * -9999
                data = data.astype(np.float32)

                # Create the latitude bound
                Vfile = str(file_name)[1:3]
                SignV = str(file_name)[0]
                SignVer = 1
                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(file_name)[4:7]
                SignH = str(file_name)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                # Geospatial data for the tile
                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

            if resolution == '15s':

                print('no 15s data is in dataset')

        if resolution == '3s':

            # clip data
            Data, Geo_data = RC.clip_data(output_tiff, latlim, lonlim)
            size_Y_out = int(np.shape(Data)[0])
            size_X_out = int(np.shape(Data)[1])

            # Total size of the product so far
            size_Y_tot = int(size_Y_tot + size_Y_out)
            size_X_tot = int(size_X_tot + size_X_out)

            if nameFile is name[0]:
                Geo_x_end = Geo_data[0]
                Geo_y_end = Geo_data[3]
            else:
                Geo_x_end = np.min([Geo_x_end, Geo_data[0]])
                Geo_y_end = np.max([Geo_y_end, Geo_data[3]])

            # create name for chunk
            FileNameEnd = "%s_temporary.tif" % (nameFile)
            nameForEnd = os.path.join(output_folder_trash, FileNameEnd)
            nameResults.append(str(nameForEnd))

            # save chunk as tiff file
            DC.Save_as_tiff(name=nameForEnd,
                            data=Data,
                            geo=Geo_data,
                            projection="WGS84")

    if resolution == '3s':
        #size_X_end = int(size_X_tot) #!
        #size_Y_end = int(size_Y_tot) #!

        size_X_end = int(size_X_tot / len(rangeLat)) + 1  #!
        size_Y_end = int(size_Y_tot / len(rangeLon)) + 1  #!

        # Define the georeference of the end matrix
        geo_out = [Geo_x_end, Geo_data[1], 0, Geo_y_end, 0, Geo_data[5]]

        latlim_out = [geo_out[3] + geo_out[5] * size_Y_end, geo_out[3]]
        lonlim_out = [geo_out[0], geo_out[0] + geo_out[1] * size_X_end]

        # merge chunk together resulting in 1 tiff map
        datasetTot = Merge_DEM(latlim_out, lonlim_out, nameResults, size_Y_end,
                               size_X_end)

        datasetTot[datasetTot < -9999] = -9999

    if resolution == '15s':
        output_file_merged = os.path.join(output_folder_trash, 'merged.tif')
        datasetTot, geo_out = Merge_DEM_15s(output_folder_trash,
                                            output_file_merged, latlim, lonlim)

    # name of the end result
    output_DEM_name = "%s_HydroShed_%s_%s.tif" % (para_name, unit, resolution)

    Save_name = os.path.join(output_folder, output_DEM_name)

    # Make geotiff file
    DC.Save_as_tiff(name=Save_name,
                    data=datasetTot,
                    geo=geo_out,
                    projection="WGS84")
    os.chdir(output_folder)

    # Delete the temporary folder
    shutil.rmtree(output_folder_trash)
예제 #20
0
def Calc_Rainy_Days(Dir_Basin, Data_Path_P, Startdate, Enddate):
    """
    This functions calculates the amount of rainy days based on daily precipitation data.

    Parameters
    ----------
    Dir_Basin : str
        Path to all the output data of the Basin
    Data_Path_P : str
        Path to the daily rainfall data
    Startdate : str
        Contains the start date of the model 'yyyy-mm-dd'
    Enddate : str
        Contains the end date of the model 'yyyy-mm-dd'

    Returns
    -------
    Data_Path_RD : str
        Path from the Dir_Basin to the rainy days data

    """
    # import WA+ modules
    import watools.General.data_conversions as DC
    import watools.General.raster_conversions as RC

    # Create an output directory to store the rainy days tiffs
    Data_Path_RD = os.path.join(Dir_Basin, 'Rainy_Days')
    if not os.path.exists(Data_Path_RD):
        os.mkdir(Data_Path_RD)

    # Define the dates that must be created
    Dates = pd.date_range(Startdate, Enddate, freq ='MS')

    # Set working directory to the rainfall folder
    os.chdir(Data_Path_P)

    # Open all the daily data and store the data in a 3D array
    for Date in Dates:
        # Define the year and month and amount of days in month
        year = Date.year
        month = Date.month
        daysinmonth = calendar.monthrange(year, month)[1]

        # Set the third (time) dimension of array starting at 0
        i = 0

        # Find all files of that month
        files = glob.glob('*daily_%d.%02d.*.tif' %(year, month))

        # Check if the amount of files corresponds with the amount of days in month
        if len(files) is not daysinmonth:
            print('ERROR: Not all Rainfall days for month %d and year %d are downloaded'  %(month, year))

        # Loop over the days and store data in raster
        for File in files:
            dir_file = os.path.join(Data_Path_P, File)

            # Get array information and create empty numpy array for daily rainfall when looping the first file
            if File == files[0]:

                # Open geolocation info and define projection
                geo_out, proj, size_X, size_Y = RC.Open_array_info(dir_file)
                if int(proj.split('"')[-2]) == 4326:
                    proj = "WGS84"

                # Create empty array for the whole month
                P_Daily = np.zeros([daysinmonth,size_Y, size_X])

            # Open data and put the data in 3D array
            Data = RC.Open_tiff_array(dir_file)

            # Remove the weird numbers
            Data[Data<0] = 0

            # Add the precipitation to the monthly cube
            P_Daily[i, :, :] = Data
            i += 1

        # Define a rainy day
        P_Daily[P_Daily > 0.201] = 1
        P_Daily[P_Daily != 1] = 0

        # Sum the amount of rainy days
        RD_one_month = np.nansum(P_Daily,0)

        # Define output name
        Outname = os.path.join(Data_Path_RD, 'Rainy_Days_NumOfDays_monthly_%d.%02d.01.tif' %(year, month))

        # Save tiff file
        DC.Save_as_tiff(Outname, RD_one_month, geo_out, proj)

    return(Data_Path_RD)
예제 #21
0
def RetrieveData_three_hourly(Date, args):
    """
    This function retrieves GLDAS three-hourly data for a given date.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
	
	# Open all the parameters
    [path, url, Var, VarStr, VarInfo, TimeCase, xID, yID, lonlim, latlim, CaseParameters, username, password,  types] = args

	# Open variable info parameters
    VarFactor = VarInfo.factors[Var]

	# Loop over the periods
    for period in CaseParameters:

        # Check whether the file already exist or the worldfile is
        # downloaded
        BasinDir = path + '/' + VarStr + '_GLDAS-CLSM_' + \
            VarInfo.units[Var] + '_3hour_' + Date.strftime('%Y.%m.%d') + \
            '_'+str(period) + '.tif'

        if not os.path.isfile(BasinDir):

            # Reset the begin parameters for downloading
            downloaded = 0
            N=0

            while downloaded == 0:
                try:

                    # Define time
                    zID = int(((Date - pd.Timestamp("1979-1-2")).days) * 8) + (period - 1)

                    # total URL
                    url_GLDAS = url + '.ascii?%s[%s][%s:1:%s][%s:1:%s]' %(Var,zID,yID[0],yID[1],xID[0],xID[1])

                    # open URL
                    try:
                        dataset = requests.get(url_GLDAS, allow_redirects=False, stream = True)
                    except:
                        from requests.packages.urllib3.exceptions import InsecureRequestWarning
                        requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
                        dataset = requests.get(url_GLDAS, allow_redirects=False, stream = True, verify = False)

                    try:
                        get_dataset = requests.get(dataset.headers['Location'], auth = (username,password), stream = True)
                    except:
                        from requests.packages.urllib3.exceptions import InsecureRequestWarning
                        requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
                        get_dataset = requests.get(dataset.headers['location'], auth = (username,password),stream = True, verify = False)

                                      # download data (first save as text file)
                    pathtext = os.path.join(path, 'temp%s.txt' % zID)
                    z = open(pathtext, 'w')
                    z.write(get_dataset.text)
                    z.close()

                    # Open text file and remove header and footer
                    data_start = np.genfromtxt(pathtext,dtype = float,skip_header = 1,skip_footer = 6,delimiter=',')
                    data = data_start[:,1:]

                    # Add the VarFactor
                    if VarFactor < 0:
                        data = data + VarFactor
                    else:
                        data = data * VarFactor
                    if VarInfo.types[Var] == 'flux':
                        data = data / 8

                    # Set Nan value for values lower than -9999
                    data[data < -9999] = -9999

                    # Say that download was succesfull
                    downloaded = 1

                # If download was not succesfull
                except:
                    data=[]

                    # Try another time
                    N = N + 1

                    # Stop trying after 10 times
                    if N == 10:
                        print('Data from ' + Date.strftime('%Y-%m-%d') + ' is not available')
                        downloaded = 1

            # define geo
            lonlimGLDAS = xID[0] * 1.0 - 180
            latlimGLDAS = (yID[1] + 1) * 1.0 - 60

            # Save to geotiff file
            geo = [lonlimGLDAS,1.0,0,latlimGLDAS,0,-1.0]
            DC.Save_as_tiff(name=BasinDir, data=np.flipud(data[:,:]), geo=geo, projection="WGS84")

            # Delete data and text file
            del data
            os.remove(pathtext)

    return True
예제 #22
0
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, version,
                 Product):
    """
    This scripts downloads SSEBop ET data from the UNESCO-IHE ftp server.
    The output files display the total ET in mm for a period of one month.
    The name of the file corresponds to the first day of the month.

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    lonlim -- [ymin, ymax] (values must be between -90 and 90)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    if version == "FTP":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -59.2 or latlim[1] > 80:
            print(
                'Latitude above 80N or below -59.2S is not possible. Value set to maximum'
            )
            latlim[0] = np.max(latlim[0], -59.2)
            latlim[1] = np.min(latlim[1], 80)
        if lonlim[0] < -180 or lonlim[1] > 180:
            print(
                'Longitude must be between 180E and 180W. Now value is set to maximum'
            )
            lonlim[0] = np.max(lonlim[0], -180)
            lonlim[1] = np.min(lonlim[1], 180)

    # Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            Enddate = pd.Timestamp('2014-10-31')

    if version == "V4":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -60 or latlim[1] > 80.0022588483988670:
            print(
                'Latitude above 80N or below -59.2S is not possible. Value set to maximum'
            )
            latlim[0] = np.max(latlim[0], -60)
            latlim[1] = np.min(latlim[1], 80.0022588483988670)
        if lonlim[0] < -180 or lonlim[1] > 180.0002930387853439:
            print(
                'Longitude must be between 180E and 180W. Now value is set to maximum'
            )
            lonlim[0] = np.max(lonlim[0], -180)
            lonlim[1] = np.min(lonlim[1], 180.0002930387853439)

    # Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            import datetime
            Enddate = pd.Timestamp(datetime.datetime.now())

# Define directory and create it if not exists
    if Product == "ETact":
        output_folder = os.path.join(Dir, 'Evaporation', 'SSEBop', 'Monthly')
        freq_use = "MS"
    if Product == "ETpot":
        output_folder = os.path.join(Dir, 'Potential_Evapotranspiration',
                                     'FEWS', 'Daily')
        freq_use = "D"

    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Creates dates library
    Dates = pd.date_range(Startdate, Enddate, freq=freq_use)

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    # Loop over the dates
    for Date in Dates:

        # Define year and month
        year = Date.year
        month = Date.month
        day = Date.day

        if version == "FTP":

            # Date as printed in filename
            Filename_out = os.path.join(
                output_folder,
                'ETa_SSEBop_FTP_mm-month-1_monthly_%s.%02s.%02s.tif' %
                (Date.strftime('%Y'), Date.strftime('%m'),
                 Date.strftime('%d')))

            # Define end filename
            Filename_dir = os.path.join("%s" % year,
                                        "m%s%02d.tif" % (str(year)[2:], month))
            Filename_only = "m%s%02d.tif" % (str(year)[2:], month)

        if version == "V4":

            # Date as printed in filename
            if Product == "ETpot":
                Filename_out = os.path.join(
                    output_folder,
                    'ETpot_FEWS_mm-day-1_daily_%s.%02s.%02s.tif' %
                    (Date.strftime('%Y'), Date.strftime('%m'),
                     Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = 'et%02s%02d%02d.tar.gz' % (str(year)[2:],
                                                               month, day)
                # The end file name after downloading and unzipping
                Filename_only = "et%02s%02d%02d.bil" % (str(year)[2:], month,
                                                        day)
                # Create bin folder
                temp_folder = os.path.join(output_folder, "Temp")
                if not os.path.exists(temp_folder):
                    os.makedirs(temp_folder)
                local_filename = os.path.join(temp_folder, Filename_only)

            if Product == "ETact":
                Filename_out = os.path.join(
                    output_folder,
                    'ETa_SSEBop_V4_mm-month-1_monthly_%s.%02s.%02s.tif' %
                    (Date.strftime('%Y'), Date.strftime('%m'),
                     Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = "m%s%02d.zip" % (str(year), month)
                # The end file name after downloading and unzipping
                Filename_only = "m%s%02d_modisSSEBopETv4_actual_mm.tif" % (
                    str(year), month)

                # Temporary filename for the downloaded global file
                local_filename = os.path.join(output_folder, Filename_only)

        # Download the data from FTP server if the file not exists
        if not os.path.exists(Filename_out):
            try:

                if version == "FTP":
                    Download_SSEBop_from_WA_FTP(local_filename, Filename_dir)
                if version == "V4":
                    if Product == "ETpot":
                        Download_SSEBop_from_Web(temp_folder,
                                                 Filename_only_zip, Product)
                    if Product == "ETact":
                        Download_SSEBop_from_Web(output_folder,
                                                 Filename_only_zip, Product)

                if Product == "ETpot":
                    Array_ETpot = RC.Open_bil_array(local_filename)
                    Array_ETpot = Array_ETpot / 100
                    Geo_out = tuple([-180.5, 1, 0, 90.5, 0, -1])
                    dest = DC.Save_as_MEM(Array_ETpot, Geo_out, "WGS84")
                    data, Geo_out = RC.clip_data(dest, latlim, lonlim)
                    DC.Save_as_tiff(Filename_out, data, Geo_out, "WGS84")

                if Product == "ETact":
                    # Clip dataset
                    RC.Clip_Dataset_GDAL(local_filename, Filename_out, latlim,
                                         lonlim)
                    os.remove(local_filename)

            except:
                print("Was not able to download file with date %s" % Date)

        # Adjust waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)

    if version == "V4":
        import glob
        os.chdir(output_folder)
        if Product == "ETact":
            zipfiles = glob.glob("*.zip")
            for zipfile in zipfiles:
                os.remove(os.path.join(output_folder, zipfile))
            xmlfiles = glob.glob("*.xml")
            for xmlfile in xmlfiles:
                os.remove(os.path.join(output_folder, xmlfile))
        if Product == "ETpot":
            import shutil
            Temp_dir = os.path.join(output_folder, "Temp")
            shutil.rmtree(Temp_dir)

    return
예제 #23
0
def RetrieveData_monthly(Date, args):
    """
    This function retrieves MSWEP precipitation monthly data for a given date.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [path, url, TimeCase, xID, yID, lonlim, latlim, username, password] = args

    # Check whether the file already exist or the worldfile is downloaded
    BasinDir = path + '/P_MSWEP_mm-month_monthly_' + Date.strftime(
        '%Y.%m.%d') + '.tif'

    # Define month and year of current month
    Y = Date.year
    M = Date.month

    # Check if the outputfile already excists
    if not os.path.isfile(BasinDir):

        # Reset the begin parameters for downloading
        downloaded = 0
        N = 0

        # Create the time dimension
        zID = (Y - 1979) * 12 + (M - 1)

        # define total url
        url_MSWEP = url + '.ascii?precipitation[%s][%s:1:%s][%s:1:%s]' % (
            zID, yID[0], yID[1], xID[0], xID[1])

        # if not downloaded try to download file
        while downloaded == 0:
            try:

                # open URL
                try:
                    dataset = requests.get(url_MSWEP,
                                           allow_redirects=False,
                                           stream=True)
                except:
                    from requests.packages.urllib3.exceptions import InsecureRequestWarning
                    requests.packages.urllib3.disable_warnings(
                        InsecureRequestWarning)
                    dataset = requests.get(url_MSWEP,
                                           allow_redirects=False,
                                           stream=True,
                                           verify=False)

                # download data (first save as text file)
                pathtext = os.path.join(path, 'temp%s.txt' % str(zID))
                z = open(pathtext, 'w')
                z.write(dataset.content)
                z.close()

                # Open text file and remove header and footer
                data_start = np.genfromtxt(pathtext,
                                           dtype=float,
                                           skip_header=1,
                                           skip_footer=6,
                                           delimiter=',')
                data = data_start[1:, 1:]

                # Set Nan value for values lower than -9999
                data[data < -9998] = np.nan

                # Say that download was succesfull
                downloaded = 1

            # If download was not succesfull
            except:

                data = []

                # Try another time
                N = N + 1

                # Stop trying after 10 times
                if N == 10:
                    print('Data from ' + Date.strftime('%Y-%m-%d') +
                          ' is not available')
                    downloaded = 1

            # define geo
            lonlimMSWEP = xID[0] * 0.10 - 180
            latlimMSWEP = yID[1] * 0.10 - 90

            # Save to geotiff file
            geo = [lonlimMSWEP, 0.1, 0, latlimMSWEP, 0, -0.1]
            DC.Save_as_tiff(name=BasinDir,
                            data=data,
                            geo=geo,
                            projection="WGS84")

            # Delete data and text file
            del data
            os.remove(pathtext)

    return (True)
def Nearest_Interpolate(Dir_in, Startdate, Enddate, Dir_out=None):
    """
    This functions calculates monthly tiff files based on the 16 daily tiff files. (will calculate the average)

    Parameters
    ----------
    Dir_in : str
        Path to the input data
    Startdate : str
        Contains the start date of the model 'yyyy-mm-dd'
    Enddate : str
        Contains the end date of the model 'yyyy-mm-dd'
    Dir_out : str
        Path to the output data, default is same as Dir_in

    """
    # import WA+ modules
    import watools.General.data_conversions as DC
    import watools.General.raster_conversions as RC

    # Change working directory
    os.chdir(Dir_in)

    # Find all eight daily files
    files = glob.glob('*16-daily*.tif')

    # Create array with filename and keys (DOY and year) of all the 8 daily files
    i = 0
    DOY_Year = np.zeros([len(files), 3])
    for File in files:

        # Get the time characteristics from the filename
        year = File.split('.')[-4][-4:]
        month = File.split('.')[-3]
        day = File.split('.')[-2]

        # Create pandas Timestamp
        date_file = '%s-%02s-%02s' % (year, month, day)
        Datum = pd.Timestamp(date_file)

        # Get day of year
        DOY = Datum.strftime('%j')

        # Save data in array
        DOY_Year[i, 0] = i
        DOY_Year[i, 1] = DOY
        DOY_Year[i, 2] = year

        # Loop over files
        i += 1

    # Check enddate:
    Enddate_split = Enddate.split('-')
    month_range = calendar.monthrange(int(Enddate_split[0]),
                                      int(Enddate_split[1]))[1]
    Enddate = '%d-%02d-%02d' % (int(Enddate_split[0]), int(
        Enddate_split[1]), month_range)

    # Check startdate:
    Startdate_split = Startdate.split('-')
    Startdate = '%d-%02d-01' % (int(Startdate_split[0]), int(
        Startdate_split[1]))

    # Define end and start date
    Dates = pd.date_range(Startdate, Enddate, freq='MS')
    DatesEnd = pd.date_range(Startdate, Enddate, freq='M')

    # Get array information and define projection
    geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0])
    if int(proj.split('"')[-2]) == 4326:
        proj = "WGS84"

    # Get the No Data Value
    dest = gdal.Open(files[0])
    NDV = dest.GetRasterBand(1).GetNoDataValue()

    # Loop over months and create monthly tiff files
    i = 0
    for date in Dates:
        # Get Start and end DOY of the current month
        DOY_month_start = date.strftime('%j')
        DOY_month_end = DatesEnd[i].strftime('%j')

        # Search for the files that are between those DOYs
        year = date.year
        DOYs = DOY_Year[DOY_Year[:, 2] == year]
        DOYs_oneMonth = DOYs[np.logical_and(
            (DOYs[:, 1] + 16) >= int(DOY_month_start),
            DOYs[:, 1] <= int(DOY_month_end))]

        # Create empty arrays
        Monthly = np.zeros([size_Y, size_X])
        Weight_tot = np.zeros([size_Y, size_X])
        Data_one_month = np.ones([size_Y, size_X]) * np.nan

        # Loop over the files that are within the DOYs
        for EightDays in DOYs_oneMonth[:, 1]:

            # Calculate the amount of days in this month of each file
            Weight = np.ones([size_Y, size_X])

            # For start of month
            if np.min(DOYs_oneMonth[:, 1]) == EightDays:
                Weight = Weight * int(EightDays + 16 - int(DOY_month_start))

            # For end of month
            elif np.max(DOYs_oneMonth[:, 1]) == EightDays:
                Weight = Weight * (int(DOY_month_end) - EightDays + 1)

            # For the middle of the month
            else:
                Weight = Weight * 16

            row = DOYs_oneMonth[np.argwhere(
                DOYs_oneMonth[:, 1] == EightDays)[0][0], :][0]

            # Open the array of current file
            input_name = os.path.join(Dir_in, files[int(row)])
            Data = RC.Open_tiff_array(input_name)

            # Remove NDV
            Weight[Data == NDV] = 0
            Data[Data == NDV] = np.nan

            # Multiply weight time data (per day)
            Data = Data * Weight

            # Calculate the total weight and data
            Weight_tot += Weight
            Monthly[~np.isnan(Data)] += Data[~np.isnan(Data)]

        # Go to next month
        i += 1

        # Calculate the average
        Data_one_month[Weight_tot != 0.] = Monthly[
            Weight_tot != 0.] / Weight_tot[Weight_tot != 0.]

        # Define output directory
        if Dir_out == None:
            Dir_out = Dir_in

        # Define output name
        output_name = os.path.join(
            Dir_out, files[int(row)].replace('16-daily', 'monthly'))
        output_name = output_name[:-9] + '%02d.01.tif' % (date.month)

        # Save tiff file
        DC.Save_as_tiff(output_name, Data_one_month, geo_out, proj)

    return
예제 #25
0
def ETref(Date, args):
    """
    This function starts to calculate ETref (daily) data based on Hydroshed, GLDAS, and (CFSR/LANDSAF) in parallel or single core

    Keyword arguments:
    Date -- panda timestamp
    args -- includes all the parameters that are needed for the ETref
	"""

    # unpack the arguments
    [Dir, lonlim, latlim, pixel_size, LANDSAF] = args

    # Set the paths
    nameTmin = 'Tair-min_GLDAS-NOAH_C_daily_' + Date.strftime(
        '%Y.%m.%d') + ".tif"
    tmin_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                            'tair_f_inst', 'min', nameTmin)

    nameTmax = 'Tair-max_GLDAS-NOAH_C_daily_' + Date.strftime(
        '%Y.%m.%d') + ".tif"
    tmax_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                            'tair_f_inst', 'max', nameTmax)

    nameHumid = 'Hum_GLDAS-NOAH_kg-kg_daily_' + Date.strftime(
        '%Y.%m.%d') + ".tif"
    humid_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                             'qair_f_inst', 'mean', nameHumid)

    namePress = 'P_GLDAS-NOAH_kpa_daily_' + Date.strftime('%Y.%m.%d') + ".tif"
    press_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                             'psurf_f_inst', 'mean', namePress)

    nameWind = 'W_GLDAS-NOAH_m-s-1_daily_' + Date.strftime('%Y.%m.%d') + ".tif"
    wind_str = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', 'daily',
                            'wind_f_inst', 'mean', nameWind)

    if LANDSAF == 1:

        nameShortClearname = 'ShortWave_Clear_Daily_W-m2_' + Date.strftime(
            '%Y-%m-%d') + '.tif'
        input2_str = os.path.join(Dir, 'Landsaf_Clipped',
                                  'Shortwave_Clear_Sky', nameShortClearname)

        nameShortNetname = 'ShortWave_Net_Daily_W-m2_' + Date.strftime(
            '%Y-%m-%d') + '.tif'
        input1_str = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Net',
                                  nameShortNetname)

        input3_str = 'not'

    else:
        if Date < pd.Timestamp(pd.datetime(2011, 4, 1)):

            nameDownLong = 'DLWR_CFSR_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input2_str = os.path.join(Dir, 'Radiation', 'CFSR', nameDownLong)

            nameDownShort = 'DSWR_CFSR_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input1_str = os.path.join(Dir, 'Radiation', 'CFSR', nameDownShort)

            nameUpLong = 'ULWR_CFSR_W-m2_' + Date.strftime('%Y.%m.%d') + ".tif"
            input3_str = os.path.join(Dir, 'Radiation', 'CFSR', nameUpLong)

        else:
            nameDownLong = 'DLWR_CFSRv2_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input2_str = os.path.join(Dir, 'Radiation', 'CFSRv2', nameDownLong)

            nameDownShort = 'DSWR_CFSRv2_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input1_str = os.path.join(Dir, 'Radiation', 'CFSRv2',
                                      nameDownShort)

            nameUpLong = 'ULWR_CFSRv2_W-m2_' + Date.strftime(
                '%Y.%m.%d') + ".tif"
            input3_str = os.path.join(Dir, 'Radiation', 'CFSRv2', nameUpLong)

# The day of year
    DOY = Date.dayofyear

    # Load DEM
    if not pixel_size:
        DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                                  'DEM_HydroShed_m_3s.tif')
    else:
        DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                                  'DEM_HydroShed_m_3s.tif')
        dest, ulx, lry, lrx, uly, epsg_to = RC.reproject_dataset_epsg(
            DEMmap_str, pixel_spacing=pixel_size, epsg_to=4326, method=2)
        DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                                  'DEM_HydroShed_m_reshaped_for_ETref.tif')
        DEM_data = dest.GetRasterBand(1).ReadAsArray()
        geo_dem = [ulx, pixel_size, 0.0, uly, 0.0, -pixel_size]
        DC.Save_as_tiff(name=DEMmap_str,
                        data=DEM_data,
                        geo=geo_dem,
                        projection='4326')

    # Calc ETref
    ETref = calc_ETref(Dir, tmin_str, tmax_str, humid_str, press_str, wind_str,
                       input1_str, input2_str, input3_str, DEMmap_str, DOY)

    # Make directory for the MODIS ET data
    output_folder = os.path.join(Dir, 'ETref', 'Daily')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Create the output names
    NameETref = 'ETref_mm-day-1_daily_' + Date.strftime('%Y.%m.%d') + '.tif'
    NameEnd = os.path.join(output_folder, NameETref)

    # Collect geotiff information
    geo_out, proj, size_X, size_Y = RC.Open_array_info(DEMmap_str)

    # Create daily ETref tiff files
    DC.Save_as_tiff(name=NameEnd, data=ETref, geo=geo_out, projection=proj)
예제 #26
0
def RetrieveData(Date, args):
    """
    This function retrieves TRMM data for a given date from the
    ftp://disc2.nascom.nasa.gov server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, TimeCase, xID, yID, lonlim, latlim] = args

    year = Date.year
    month = Date.month
    day = Date.day

    from watools import WebAccounts
    username, password = WebAccounts.Accounts(Type='NASA')

    # Create https
    if TimeCase == 'daily':
        URL = 'https://gpm1.gesdisc.eosdis.nasa.gov/opendap/GPM_L3/GPM_3IMERGDF.05/%d/%02d/3B-DAY.MS.MRG.3IMERG.%d%02d%02d-S000000-E235959.V05.nc4.ascii?precipitationCal[%d:1:%d][%d:1:%d]' % (
            year, month, year, month, day, xID[0], xID[1] - 1, yID[0],
            yID[1] - 1)
        DirFile = os.path.join(
            output_folder, "P_TRMM3B42.V7_mm-day-1_daily_%d.%02d.%02d.tif" %
            (year, month, day))
        Scaling = 1

    if TimeCase == 'monthly':
        URL = 'https://gpm1.gesdisc.eosdis.nasa.gov/opendap/hyrax/GPM_L3/GPM_3IMERGM.05/%d/3B-MO.MS.MRG.3IMERG.%d%02d01-S000000-E235959.%02d.V05B.HDF5.ascii?precipitation[%d:1:%d][%d:1:%d]' % (
            year, year, month, month, xID[0], xID[1] - 1, yID[0], yID[1] - 1)
        Scaling = calendar.monthrange(year, month)[1] * 24
        DirFile = os.path.join(
            output_folder,
            "P_GPM.IMERG_mm-month-1_monthly_%d.%02d.01.tif" % (year, month))

    if not os.path.isfile(DirFile):
        dataset = requests.get(URL, allow_redirects=False, stream=True)
        try:
            get_dataset = requests.get(dataset.headers['location'],
                                       auth=(username, password),
                                       stream=True)
        except:
            from requests.packages.urllib3.exceptions import InsecureRequestWarning
            requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
            get_dataset = requests.get(dataset.headers['location'],
                                       auth=(username, password),
                                       verify=False)

        # download data (first save as text file)
        pathtext = os.path.join(output_folder, 'temp.txt')
        z = open(pathtext, 'wb')
        z.write(get_dataset.content)
        z.close()

        # Open text file and remove header and footer
        data_start = np.genfromtxt(pathtext,
                                   dtype=float,
                                   skip_header=1,
                                   delimiter=',')
        data = data_start[:, 1:] * Scaling
        data[data < 0] = -9999
        data = data.transpose()
        data = np.flipud(data)

        # Delete .txt file
        os.remove(pathtext)

        # Make geotiff file
        geo = [lonlim[0], 0.1, 0, latlim[1], 0, -0.1]
        DC.Save_as_tiff(name=DirFile, data=data, geo=geo, projection="WGS84")

    return True
예제 #27
0
def main(Dir, Startdate = '', Enddate = '',
         latlim = [-60, 60], lonlim = [-180, 180], pixel_size = False, cores = False, LANDSAF =  0, SourceLANDSAF=  '', Waitbar = 1):
    """
    This function downloads TRMM3B43 V7 (monthly) data

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    cores -- The number of cores used to run the routine.
             It can be 'False' to avoid using parallel computing
             routines.
    Waitbar -- 1 (Default) will print the waitbar
    """

    print('Create monthly Reference ET data for period %s till %s' %(Startdate, Enddate))

    # An array of monthly dates which will be calculated
    Dates = pd.date_range(Startdate,Enddate,freq = 'MS')

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)

	# Calculate the ETref day by day for every month
    for Date in Dates:

        # Collect date data
        Y=Date.year
        M=Date.month
        Mday=calendar.monthrange(Y,M)[1]
        Days=pd.date_range(Date,Date+pd.Timedelta(days=Mday),freq='D')
        StartTime=Date.strftime('%Y')+'-'+Date.strftime('%m')+ '-01'
        EndTime=Date.strftime('%Y')+'-'+Date.strftime('%m')+'-'+str(Mday)

        # Get ETref on daily basis
        daily(Dir=Dir, Startdate=StartTime,Enddate=EndTime,latlim=latlim, lonlim=lonlim, pixel_size = pixel_size, cores=cores, LANDSAF=LANDSAF, SourceLANDSAF=SourceLANDSAF, Waitbar = 0)

        # Load DEM
        if not pixel_size:
            nameDEM='DEM_HydroShed_m_3s.tif'
            DEMmap=os.path.join(Dir,'HydroSHED','DEM',nameDEM )
        else:
            DEMmap=os.path.join(Dir,'HydroSHED','DEM','DEM_HydroShed_m_reshaped_for_ETref.tif')
        # Get some geo-data to save results
        geo_ET, proj, size_X, size_Y = RC.Open_array_info(DEMmap)

        dataMonth=np.zeros([size_Y,size_X])

        for Day in Days[:-1]:
            DirDay=os.path.join(Dir,'ETref','Daily','ETref_mm-day-1_daily_' + Day.strftime('%Y.%m.%d') + '.tif')
            dataDay=gdal.Open(DirDay)
            Dval=dataDay.GetRasterBand(1).ReadAsArray().astype(np.float32)
            Dval[Dval<0]=0
            dataMonth=dataMonth+Dval
            dataDay=None

        # make geotiff file
        output_folder_month=os.path.join(Dir,'ETref','Monthly')
        if os.path.exists(output_folder_month)==False:
            os.makedirs(output_folder_month)
        DirMonth=os.path.join(output_folder_month,'ETref_mm-month-1_monthly_'+Date.strftime('%Y.%m.%d') + '.tif')

        # Create the tiff file
        DC.Save_as_tiff(DirMonth,dataMonth, geo_ET, proj)

        # Create Waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)
예제 #28
0
def CollectLANDSAF(SourceLANDSAF, Dir, Startdate, Enddate, latlim, lonlim):
    """
    This function collects and clip LANDSAF data

    Keyword arguments:
    SourceLANDSAF -- 'C:/'  path to the LANDSAF source data (The directory includes SIS and SID)
    Dir -- 'C:/' path to the WA map
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    latlim -- [ymin, ymax] (values must be between -60 and 60)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    # Make an array of the days of which the ET is taken
    Dates = pd.date_range(Startdate, Enddate, freq='D')

    # make directories
    SISdir = os.path.join(Dir, 'Landsaf_Clipped', 'SIS')
    if os.path.exists(SISdir) is False:
        os.makedirs(SISdir)

    SIDdir = os.path.join(Dir, 'Landsaf_Clipped', 'SID')
    if os.path.exists(SIDdir) is False:
        os.makedirs(SIDdir)

    ShortwaveBasin(SourceLANDSAF,
                   Dir,
                   latlim,
                   lonlim,
                   Dates=[Startdate, Enddate])
    DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM',
                              'DEM_HydroShed_m_3s.tif')
    geo_out, proj, size_X, size_Y = RC.Open_array_info(DEMmap_str)

    # Open DEM map
    demmap = RC.Open_tiff_array(DEMmap_str)
    demmap[demmap < 0] = 0

    # make lat and lon arrays)
    dlat = geo_out[5]
    dlon = geo_out[1]
    lat = geo_out[3] + (np.arange(size_Y) + 0.5) * dlat
    lon = geo_out[0] + (np.arange(size_X) + 0.5) * dlon

    for date in Dates:
        # day of year
        day = date.dayofyear
        Horizontal, Sloping, sinb, sinb_hor, fi, slope, ID = SlopeInfluence(
            demmap, lat, lon, day)

        SIDname = os.path.join(
            SIDdir, 'SAF_SID_Daily_W-m2_' + date.strftime('%Y-%m-%d') + '.tif')
        SISname = os.path.join(
            SISdir, 'SAF_SIS_Daily_W-m2_' + date.strftime('%Y-%m-%d') + '.tif')

        #PREPARE SID MAPS
        SIDdest = RC.reproject_dataset_example(SIDname, DEMmap_str, method=3)
        SIDdata = SIDdest.GetRasterBand(1).ReadAsArray()

        #PREPARE SIS MAPS
        SISdest = RC.reproject_dataset_example(SISname, DEMmap_str, method=3)
        SISdata = SISdest.GetRasterBand(1).ReadAsArray()

        # Calculate ShortWave net
        Short_Wave_Net = SIDdata * (Sloping /
                                    Horizontal) + SISdata * 86400 / 1e6

        # Calculate ShortWave Clear
        Short_Wave = Sloping
        Short_Wave_Clear = Short_Wave * (0.75 + demmap * 2 * 10**-5)

        # make directories
        PathClear = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Clear_Sky')
        if os.path.exists(PathClear) is False:
            os.makedirs(PathClear)

        PathNet = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Net')
        if os.path.exists(PathNet) is False:
            os.makedirs(PathNet)

        # name Shortwave Clear and Net
        nameFileNet = 'ShortWave_Net_Daily_W-m2_' + date.strftime(
            '%Y-%m-%d') + '.tif'
        nameNet = os.path.join(PathNet, nameFileNet)

        nameFileClear = 'ShortWave_Clear_Daily_W-m2_' + date.strftime(
            '%Y-%m-%d') + '.tif'
        nameClear = os.path.join(PathClear, nameFileClear)

        # Save net and clear short wave radiation
        DC.Save_as_tiff(nameNet, Short_Wave_Net, geo_out, proj)
        DC.Save_as_tiff(nameClear, Short_Wave_Clear, geo_out, proj)
    return
예제 #29
0
def RetrieveData(args):
    """
    This function retrieves JRC data for a given date from the
    http://storage.googleapis.com/global-surface-water/downloads/ server.

    Keyword arguments:
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, Names_to_download, lonlim, latlim] = args

    # Collect the data from the JRC webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(Names_to_download, output_folder)
    except:
        print("Was not able to download the file")

    # Clip the data to the users extend
    if len(Names_to_download) == 1:
        trash_folder = os.path.join(output_folder, "Trash")
        data_in = os.path.join(trash_folder, Names_to_download[0])
        data_end, geo_end = RC.clip_data(data_in, latlim, lonlim)
    else:

        data_end = np.zeros([
            int((latlim[1] - latlim[0]) / 0.00025),
            int((lonlim[1] - lonlim[0]) / 0.00025)
        ])

        for Name_to_merge in Names_to_download:
            trash_folder = os.path.join(output_folder, "Trash")
            data_in = os.path.join(trash_folder, Name_to_merge)
            geo_out, proj, size_X, size_Y = RC.Open_array_info(data_in)
            lat_min_merge = np.maximum(latlim[0],
                                       geo_out[3] + size_Y * geo_out[5])
            lat_max_merge = np.minimum(latlim[1], geo_out[3])
            lon_min_merge = np.maximum(lonlim[0], geo_out[0])
            lon_max_merge = np.minimum(lonlim[1],
                                       geo_out[0] + size_X * geo_out[1])

            lonmerge = [lon_min_merge, lon_max_merge]
            latmerge = [lat_min_merge, lat_max_merge]
            data_one, geo_one = RC.clip_data(data_in, latmerge, lonmerge)

            Ystart = int((geo_one[3] - latlim[1]) / geo_one[5])
            Yend = int(Ystart + np.shape(data_one)[0])
            Xstart = int((geo_one[0] - lonlim[0]) / geo_one[1])
            Xend = int(Xstart + np.shape(data_one)[1])

            data_end[Ystart:Yend, Xstart:Xend] = data_one

        geo_end = tuple([lonlim[0], geo_one[1], 0, latlim[1], 0, geo_one[5]])

    # Save results as Gtiff
    fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif')
    DC.Save_as_tiff(name=fileName_out,
                    data=data_end,
                    geo=geo_end,
                    projection='WGS84')
    shutil.rmtree(trash_folder)
    return True
예제 #30
0
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar):

    # Create an array with the dates that will be calculated
    Dates = pd.date_range(Startdate, Enddate, freq='MS')

    # Create Waitbar
    if Waitbar == 1:
        import watools.Functions.Start.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount,
                                    total_amount,
                                    prefix='Progress:',
                                    suffix='Complete',
                                    length=50)

    # Define the minimum and maximum lat and long ETensemble Tile
    Min_lat_tile = int(np.floor((100 - latlim[1]) / 10))
    Max_lat_tile = int(np.floor((100 - latlim[0] - 0.00125) / 10))
    Min_lon_tile = int(np.floor((190 + lonlim[0]) / 10))
    Max_lon_tile = int(np.floor((190 + lonlim[1] - 0.00125) / 10))

    # Create the Lat and Lon tiles that will be downloaded
    Lat_tiles = [Min_lat_tile, Max_lat_tile]
    Lon_tiles = [Min_lon_tile, Max_lon_tile]

    # Define output folder and create this if it not exists
    output_folder = os.path.join(Dir, 'Evaporation', 'ETensV1_0')
    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Create Geotransform of the output files
    GEO_1 = lonlim[0]
    GEO_2 = 0.0025
    GEO_3 = 0.0
    GEO_4 = latlim[1]
    GEO_5 = 0.0
    GEO_6 = -0.0025
    geo = [GEO_1, GEO_2, GEO_3, GEO_4, GEO_5, GEO_6]
    geo_new = tuple(geo)

    # Define the parameter for downloading the data
    Downloaded = 0

    # Calculate the ET data date by date
    for Date in Dates:

        # Define the output name and folder
        file_name = 'ET_ETensemble250m_mm-month-1_monthly_%d.%02d.01.tif' % (
            Date.year, Date.month)
        output_file = os.path.join(output_folder, file_name)

        # If output file not exists create this
        if not os.path.exists(output_file):

            # If not downloaded than download
            if Downloaded == 0:

                # Download the ETens data from the FTP server
                Download_ETens_from_WA_FTP(output_folder, Lat_tiles, Lon_tiles)

                # Unzip the folder
                Unzip_ETens_data(output_folder, Lat_tiles, Lon_tiles)
                Downloaded = 1

            # Create the ET data for the area of interest
            ET_data = Collect_dataset(output_folder, Date, Lat_tiles,
                                      Lon_tiles, latlim, lonlim)

            # Save this array as a tiff file
            DC.Save_as_tiff(output_file, ET_data, geo_new, projection='WGS84')

        # Create Waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount,
                                        total_amount,
                                        prefix='Progress:',
                                        suffix='Complete',
                                        length=50)
    '''
    # Remove all the raw dataset
    for v_tile in range(Lat_tiles[0], Lat_tiles[1]+1):
        for h_tile in range(Lon_tiles[0], Lon_tiles[1]+1):
            Tilename = "h%sv%s" %(h_tile, v_tile)
            filename = os.path.join(output_folder, Tilename)
            if os.path.exists(filename):
                shutil.rmtree(filename)

    # Remove all .zip files
    for f in os.listdir(output_folder):
        if re.search(".zip", f):
            os.remove(os.path.join(output_folder, f))
    '''
    return ()