def main(Dir, latlim, lonlim, Waitbar=1): """ Downloads Globcover data from http://due.esrin.esa.int/page_globcover.php The following keyword arguments are needed: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] Waitbar -- '1' if you want a waitbar (Default = 1) """ # Create Waitbar if Waitbar == 1: print('\nDownload Globcover landcover map') import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Download and process the data DownloadData(Dir, latlim, lonlim) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50)
def main(Dir, latlim, lonlim, level='sl1', Waitbar=1): """ Downloads SoilGrids data from ftp://ftp.soilgrids.org/data/recent/ The following keyword arguments are needed: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] level -- 'sl1' (Default) 'sl2' 'sl3' 'sl4' 'sl5' 'sl6' 'sl7' Waitbar -- '1' if you want a waitbar (Default = 1) """ # Create directory if not exists for the output output_folder = os.path.join(Dir, 'SoilGrids', 'Silt_Content') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the output map and create this if not exists nameEnd = os.path.join( output_folder, 'SiltContentMassFraction_%s_SoilGrids_percentage.tif' % level) if not os.path.exists(nameEnd): # Create Waitbar if Waitbar == 1: print( '\nDownload Silt Content Mass Fraction soil map of %s from SoilGrids.org' % level) import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Download and process the data DownloadData(output_folder, latlim, lonlim, "SLTPPT", level) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) else: if Waitbar == 1: print( "\nSilt Content Mass Fraction soil map of %s from SoilGrids.org already exists in output folder" % level)
def Collect_data(FTPprefix, Years, output_folder, Waitbar, Product): ''' This function downloads all the needed GLEAM files from hydras.ugent.be as a nc file. Keywords arguments: FTPprefix -- FTP path to the GLEAM data Date -- 'yyyy-mm-dd' output_folder -- 'C:/file/to/path/' ''' # account of the SFTP server (only password is missing) server = 'hydras.ugent.be' portnumber = 2225 username, password = WebAccounts.Accounts(Type='GLEAM') # Create Waitbar print('\nDownload GLEAM data') if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount2 = len(Years) amount2 = 0 WaitbarConsole.printWaitBar(amount2, total_amount2, prefix='Progress:', suffix='Complete', length=50) for year in Years: directory = os.path.join(FTPprefix, '%d' % year) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(server, port=portnumber, username=username, password=password) ftp = ssh.open_sftp() ftp.chdir(directory) if Product == "ET": filename = 'E_' + str(year) + '_GLEAM_v3.3b.nc' if Product == "ETpot": filename = 'Ep_' + str(year) + '_GLEAM_v3.3b.nc' local_filename = os.path.join(output_folder, filename) if not os.path.exists(local_filename): ftp.get(filename, local_filename) if Waitbar == 1: amount2 += 1 WaitbarConsole.printWaitBar(amount2, total_amount2, prefix='Progress:', suffix='Complete', length=50) ftp.close() ssh.close() return ()
def main(Dir, latlim, lonlim, resolution='3s', Waitbar=1): """ Downloads HydroSHED flow direction data from http://www.hydrosheds.org/download/ this data includes a Digital Elevation Model Flow Direction The spatial resolution is 90m (3s) or 450m (15s) or 900m (30s) The following keyword arguments are needed: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] resolution -- '3s' (default) or '15s' or '30s' """ # Create directory if not exists for the output output_folder = os.path.join(Dir, 'HydroSHED', 'DIR') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the output map and create this if not exists # Define the output map and create this if not exists nameEnd = os.path.join(Dir, 'HydroSHED', 'DIR', 'DIR_HydroShed_-_%s.tif' % resolution) parameter = "dir_%s" % resolution if not os.path.exists(nameEnd): # Create Waitbar if Waitbar == 1: print( '\nDownload HydroSHED Drainage Direction map with a resolution of %s' % resolution) import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Download and process the data DownloadData(output_folder, latlim, lonlim, parameter, resolution) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) else: if Waitbar == 1: print( "\nHydroSHED Drainage direction (%s) already exists in output folder" % resolution)
def DownloadData(Dir,latlim, lonlim, Waitbar): """ This function downloads JRC data Keyword arguments: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) Waitbar -- 1 (Default) will print a waitbar """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print('Latitude above 90N or below 90S is not possible. Value set to maximum') latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print('Longitude must be between 180E and 180W. Now value is set to maximum') lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the JRC water occurrence data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'JRC', 'Occurrence') if not os.path.exists(output_folder): os.makedirs(output_folder) fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif') if not os.path.exists(fileName_out): # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # This function defines the name of dataset that needs to be collected Names_to_download = Tiles_to_download(lonlim,latlim) # Pass variables to parallel function and run args = [output_folder, Names_to_download, lonlim, latlim] RetrieveData(args) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) else: print('JRC water occurrence map already exists') return()
def main(Dir, latlim, lonlim, Waitbar=1): """ Downloads SoilGrids data from ftp://ftp.soilgrids.org/data/recent/ The following keyword arguments are needed: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] Waitbar -- '1' if you want a waitbar (Default = 1) """ # Create directory if not exists for the output output_folder = os.path.join(Dir, 'SoilGrids', 'Predicted_Probability_Of_Occurence') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the output map and create this if not exists nameEnd = os.path.join( output_folder, 'PredictedProbabilityOfOccurrence_SoilGrids_percentage.tif') if not os.path.exists(nameEnd): # Create Waitbar if Waitbar == 1: print( '\nDownload Predicted Pobability of Occurrence soil map from SoilGrids.org' ) import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Download and process the data DownloadData(output_folder, latlim, lonlim, "BDRLOG") if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) else: if Waitbar == 1: print( "\nPredicted Pobability of Occurrence soil map from SoilGrids.org already exists in output folder" )
def main(Dir, latlim, lonlim, Waitbar=1): """ Downloads HydroSHED data from http://srtm.csi.cgiar.org/download this data includes a Digital Elevation Model (DEM) The spatial resolution is 90m (3s) The following keyword arguments are needed: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] Waitbar -- '1' if you want a waitbar (Default = 1) """ # Create directory if not exists for the output output_folder = os.path.join(Dir, 'SRTM', 'DEM') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the output map and create this if not exists nameEnd = os.path.join(output_folder, 'DEM_SRTM_m_3s.tif') if not os.path.exists(nameEnd): # Create Waitbar if Waitbar == 1: print('\nDownload SRTM altitude map with a resolution of 3s') import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Download and process the data DownloadData(output_folder, latlim, lonlim) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) else: if Waitbar == 1: print("\nSRTM altitude map (3s) already exists in output folder")
def SetVariables(Dir, Startdate, Enddate, latlim, lonlim, pixel_size, cores, LANDSAF, Waitbar): """ This function starts to calculate ETref (daily) data based on Hydroshed, GLDAS, and (CFSR/LANDSAF) in parallel or single core Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -60 and 60) lonlim -- [xmin, xmax] (values must be between -180 and 180) pixel_size -- The output pixel size cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. LANDSAF -- if LANDSAF data must be used it is 1 SourceLANDSAF -- the path to the LANDSAF files Waitbar -- 1 (Default) will print the waitbar """ # Make an array of the days of which the ET is taken Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as Waitbar total_amount = len(Dates) amount = 0 Waitbar.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Pass variables to parallel function and run args = [Dir, lonlim, latlim, pixel_size, LANDSAF] if not cores: for Date in Dates: ETref(Date, args) if Waitbar == 1: amount += 1 Waitbar.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(ETref)(Date, args) for Date in Dates) return results
def ALEXI_daily(Dates, output_folder, latlim, lonlim, Waitbar, total_amount, TimeStep): amount = 0 for Date in Dates: # Date as printed in filename DirFile = os.path.join( output_folder, 'ETa_ALEXI_CSFR_mm-day-1_daily_%d.%02d.%02d.tif' % (Date.year, Date.month, Date.day)) DOY = Date.timetuple().tm_yday # Define end filename filename = "EDAY_CERES_%d%03d.dat.gz" % (Date.year, DOY) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, filename) # Define IDs yID = 3000 - np.int16( np.array([ np.ceil((latlim[1] + 60) * 20), np.floor((latlim[0] + 60) * 20) ])) xID = np.int16( np.array([np.floor((lonlim[0]) * 20), np.ceil((lonlim[1]) * 20)]) + 3600) # Download the data from FTP server if the file not exists if not os.path.exists(DirFile): try: Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID, TimeStep) except: print("Was not able to download file with date %s" % Date) # Adjust waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) os.chdir(output_folder) re = glob.glob("*.dat") for f in re: os.remove(os.path.join(output_folder, f))
def main(Dir, latlim, lonlim, Waitbar=1): """ This function downloads ESACCI daily data for a given variable, time interval, and spatial extent. Keyword arguments: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] Waitbar -- 1 (Default) Will print a waitbar """ # Create directory if not exists for the output output_folder = os.path.join(Dir, 'ESACCI', 'LU') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the output map and create this if not exists nameEnd = os.path.join(output_folder, 'LU_ESACCI.tif') if not os.path.exists(nameEnd): # Create Waitbar if Waitbar == 1: print('\nDownload ESACCI landuse map') import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Download and process the data DownloadData(output_folder, latlim, lonlim, Waitbar) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) else: if Waitbar == 1: print("\nESACCI LU map already exists in output folder")
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): """ This scripts downloads ASCAT SWI data from the VITO server. The output files display the Surface Water Index. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] latlim -- [xmin, xmax] """ # Check the latitude and longitude and otherwise reset lat and lon. if latlim[0] < -90 or latlim[1] > 90: print('Latitude above 90N or below 90S is not possible.\ Value set to maximum') latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print('Longitude must be between 180E and 180W.\ Now value is set to maximum') lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2007-01-01') if not Enddate: Enddate = pd.Timestamp.now() # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate # amount of Dates weekly Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily') if not os.path.exists(output_folder): os.makedirs(output_folder) output_folder_temp = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily', 'Temp') if not os.path.exists(output_folder_temp): os.makedirs(output_folder_temp) # loop over dates for Date in Dates: # Define end filename End_filename = os.path.join( output_folder, 'SWI_ASCAT_V3_Percentage_daily_%d.%02d.%02d.tif' % (Date.year, Date.month, Date.day)) # Define IDs xID = 1800 + np.int16( np.array([np.ceil((lonlim[0]) * 10), np.floor((lonlim[1]) * 10)])) yID = np.int16( np.array([np.floor((-latlim[1]) * 10), np.ceil((-latlim[0]) * 10)])) + 900 # Download the data from FTP server if the file not exists if not os.path.exists(End_filename): try: data = Download_ASCAT_from_VITO(End_filename, output_folder_temp, Date, yID, xID) # make geotiff file geo = [lonlim[0], 0.1, 0, latlim[1], 0, -0.1] DC.Save_as_tiff(name=End_filename, data=data, geo=geo, projection="WGS84") except: print("Was not able to download file with date %s" % Date) # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # remove the temporary folder shutil.rmtree(output_folder_temp)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads TRMM daily or monthly data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. TimeCase -- String equal to 'daily' or 'monthly' Waitbar -- 1 (Default) will print a waitbar """ # String Parameters if TimeCase == 'daily': TimeFreq = 'D' output_folder = os.path.join(Dir, 'Precipitation', 'TRMM', 'Daily') elif TimeCase == 'monthly': TimeFreq = 'MS' output_folder = os.path.join(Dir, 'Precipitation', 'TRMM', 'Monthly') else: raise KeyError("The input time interval is not supported") # Make directory if not os.path.exists(output_folder): os.makedirs(output_folder) # Check variables if not Startdate: Startdate = pd.Timestamp('1998-01-01') if not Enddate: Enddate = pd.Timestamp('Now') Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) if latlim[0] < -50 or latlim[1] > 50: print('Latitude above 50N or below 50S is not possible.' ' Value set to maximum') latlim[0] = np.max(latlim[0], -50) latlim[1] = np.min(lonlim[1], 50) if lonlim[0] < -180 or lonlim[1] > 180: print('Longitude must be between 180E and 180W.' ' Now value is set to maximum') lonlim[0] = np.max(latlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Define IDs yID = np.int16( np.array( [np.ceil((latlim[0] + 50) * 4), np.floor((latlim[1] + 50) * 4)])) xID = np.int16( np.array([np.floor((lonlim[0]) * 4), np.ceil((lonlim[1]) * 4)]) + 720) # Pass variables to parallel function and run args = [output_folder, TimeCase, xID, yID, lonlim, latlim] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads MSWEP Version 2.1 daily or monthly data Keyword arguments: Dir -- 'C:/file/to/path/' Var -- 'wind_f_inst' : (string) For all variable codes: VariablesInfo('day').descriptions.keys() Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] Waitbar -- 0 or 1 (1 is waitbar on) cores -- 1....8 """ # Load factors / unit / type of variables / accounts username, password = WebAccounts.Accounts(Type='MSWEP') # Set required data for the daily option if TimeCase == 'daily': # Define output folder and create this one if not exists path = os.path.join(Dir, 'Precipitation', 'MSWEP', 'daily') if not os.path.exists(path): os.makedirs(path) # Startdate if not defined sd_date = '1979-01-01' # Define Time frequency TimeFreq = 'D' # Define URL by using personal account url = 'https://%s:%[email protected]/opendap/MSWEP_V2.1/global_daily_010deg/' % ( username, password) # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_daily # Set required data for the monthly option elif TimeCase == 'monthly': # Define output folder and create this one if not exists path = os.path.join(Dir, 'Precipitation', 'MSWEP', 'monthly') if not os.path.exists(path): os.makedirs(path) # Startdate if not defined sd_date = '1979-01-01' # Define Time frequency TimeFreq = 'MS' # Define URL by using personal account url = 'https://%s:%[email protected]:443/opendap/MSWEP_V2.1/global_monthly_010deg.nc' % ( username, password) # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_monthly # If none of the possible option are chosen else: raise KeyError("The input time interval is not supported") # Define IDs (latitude/longitude) yID = np.int16( np.array( [np.ceil((latlim[0] + 90) * 10), np.floor((latlim[1] + 90) * 10)])) xID = np.int16( np.array([ np.floor((lonlim[0] + 180) * 10), np.ceil((lonlim[1] + 180) * 10) ])) # Check dates. If no dates are given, the max number of days is used. if not Startdate: Startdate = pd.Timestamp(sd_date) if not Enddate: Enddate = pd.Timestamp('Now') # Should be much than available # Create all dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Create one parameter with all the required arguments args = [path, url, TimeCase, xID, yID, lonlim, latlim, username, password] # Pass variables to parallel function and run if not cores: for Date in Dates: RetrieveData_fcn(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData_fcn)(Date, args) for Date in Dates) return results
def CollectData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores, Version): """ This function collects daily CFSR data in geotiff format Keyword arguments: Dir -- 'C:/file/to/path/' Var -- 'dlwsfc','dswsfc','ulwsfc', or 'uswsfc' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) Waitbar -- 1 (Default) will print a wait bar cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Version -- 1 or 2 (1 = CFSR, 2 = CFSRv2) """ # Creates an array of the days of which the ET is taken Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # For collecting CFSR data if Version == 1: # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -89.9171038899 or latlim[1] > 89.9171038899: print( 'Latitude above 89.917N or below 89.917S is not possible. Value set to maximum' ) latlim[0] = np.maximum(latlim[0], -89.9171038899) latlim[1] = np.minimum(latlim[1], 89.9171038899) if lonlim[0] < -180 or lonlim[1] > 179.843249782: print( 'Longitude must be between 179.84E and 179.84W. Now value is set to maximum' ) lonlim[0] = np.maximum(lonlim[0], -180) lonlim[1] = np.minimum(lonlim[1], 179.843249782) # Make directory for the CFSR data output_folder = os.path.join(Dir, 'Radiation', 'CFSR') if not os.path.exists(output_folder): os.makedirs(output_folder) # For collecting CFSRv2 data if Version == 2: # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -89.9462116040955806 or latlim[1] > 89.9462116040955806: print( 'Latitude above 89.917N or below 89.946S is not possible. Value set to maximum' ) latlim[0] = np.maximum(latlim[0], -89.9462116040955806) latlim[1] = np.minimum(latlim[1], 89.9462116040955806) if lonlim[0] < -180 or lonlim[1] > 179.8977275: print( 'Longitude must be between 179.90E and 179.90W. Now value is set to maximum' ) lonlim[0] = np.maximum(lonlim[0], -180) lonlim[1] = np.minimum(lonlim[1], 179.8977275) # Make directory for the CFSRv2 data output_folder = os.path.join(Dir, 'Radiation', 'CFSRv2') if not os.path.exists(output_folder): os.makedirs(output_folder) # Pass variables to parallel function and run args = [output_folder, latlim, lonlim, Var, Version] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) # Remove all .nc and .grb2 files for f in os.listdir(output_folder): if re.search(".nc", f): os.remove(os.path.join(output_folder, f)) for f in os.listdir(output_folder): if re.search(".grb2", f): os.remove(os.path.join(output_folder, f)) for f in os.listdir(output_folder): if re.search(".grib2", f): os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, nameDownload, hdf_library, remove_hdf): """ This function downloads MOD15 8-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. nameDownload -- The name of the subset that must be download can be Fpar_500m or Lai_500m Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date to max if not Startdate: Startdate = pd.Timestamp('2000-02-18') if not Enddate: Enddate = pd.Timestamp('Now') # Define the unit if nameDownload == 'Fpar_500m': unit = '-' dataset = 'FPAR' if nameDownload == 'Lai_500m': unit = 'm2-m2' dataset = 'LAI' # Make an array of the days of which the FPAR is taken Dates = Make_TimeStamps(Startdate, Enddate) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print( 'Latitude above 90N or below 90S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the MODIS FPAR data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, dataset, 'MOD15') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define which MODIS tiles are required TilesVertical, TilesHorizontal = Get_tiles_from_txt( output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [ output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, unit, dataset, nameDownload, hdf_library ] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) # Remove all .hdf files if remove_hdf == 1: os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files #files = glob.glob("*.txt") #for f in files: # os.remove(os.path.join(output_folder, f)) return (results)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Time='', GMT_Offset=0, Waitbar=1): # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print( 'Latitude above 90N or below 90S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) output_folder = os.path.join(Dir, "MSGCPP", "SDS", "15min") if not os.path.exists(output_folder): os.makedirs(output_folder) if isinstance(Enddate, str): Enddate = datetime.datetime(int(Enddate.split('-')[0]), int(Enddate.split('-')[1]), int(Enddate.split('-')[2]), 23, 59) else: Enddate = datetime.datetime(Enddate.year, Enddate.month, Enddate.day, 23, 59) if Time == '': Dates = pd.date_range( Startdate, Enddate, freq="15min") - datetime.timedelta(hours=GMT_Offset) else: Dates = pd.date_range(Startdate, Enddate, freq="D") - datetime.timedelta(hours=GMT_Offset) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Loop over dates for Date in Dates: if Time != '': Hour = int(Time.split(':')[0]) Minute = int(Time.split(':')[1]) Date = datetime.datetime(Date.year, Date.month, Date.day, Hour, Minute) filename_out = os.path.join( output_folder, "SDS_MSGCPP_W-m-2_15min_%d.%02d.%02d_H%02d.M%02d.tif" % (Date.year, Date.month, Date.day, Date.hour, Date.minute)) if not os.path.exists(filename_out): # define url url = r"http://msgcpp-ogc-archive.knmi.nl/msgar.cgi?&service=wcs&version=1.0.0&request=getcoverage&coverage=surface_downwelling_shortwave_flux_in_air&FORMAT=GeoTIFF&CRS=EPSG%%3A4326&BBOX=%s,%s,%s,%s&RESX=0.04310344827586207&RESY=0.04418103448275862&time=%d-%02d-%02dT%02d%%3A%02d%%3A00Z" % ( lonlim[0], latlim[0], lonlim[1], latlim[1], Date.year, Date.month, Date.day, Date.hour, Date.minute) urllib.request.urlretrieve(url, filename=filename_out) statinfo = os.stat(filename_out) if statinfo.st_size < 3000: url = r"http://msgcpp-ogc-realtime.knmi.nl/msgrt.cgi??&service=wcs&version=1.0.0&request=getcoverage&coverage=surface_downwelling_shortwave_flux_in_air&FORMAT=GeoTIFF&CRS=EPSG%%3A4326&BBOX=%s,%s,%s,%s&RESX=0.04310344827586207&RESY=0.04418103448275862&time=%d-%02d-%02dT%02d%%3A%02d%%3A00Z" % ( lonlim[0], latlim[0], lonlim[1], latlim[1], Date.year, Date.month, Date.day, Date.hour, Date.minute) urllib.request.urlretrieve(url, filename=filename_out) statinfo = os.stat(filename_out) if statinfo.st_size < 300: os.remove(filename_out) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50)
def DownloadData(Dir, latlim, lonlim, Waitbar): """ This scripts downloads HiHydroSoil Saturated Theta soil data from the UNESCO-IHE ftp server. Keyword arguments: Dir -- 'C:/file/to/path/' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print( 'Latitude above 90N or below -90S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole amount = 0 WaitbarConsole.printWaitBar(amount, 1, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'HiHydroSoil', 'ThetaSat') if not os.path.exists(output_folder): os.makedirs(output_folder) # Date as printed in filename Filename_out = os.path.join(output_folder, 'Theta_Saturated_Topsoil_HiHydroSoil.tif') # Define end filename Filename_in = os.path.join("wcsat_topsoil.tif") # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_in) if not os.path.exists(Filename_out): # Download the data from FTP server if the file not exists try: if not os.path.exists(local_filename): Download_HiHydroSoil_from_WA_FTP(local_filename, Filename_in) else: statinfo = os.stat(local_filename) if int(statinfo.st_size) < 3746200000: Download_HiHydroSoil_from_WA_FTP(local_filename, Filename_in) # Clip dataset Clip_Dataset(local_filename, Filename_out, latlim, lonlim) os.remove(local_filename) except: print("Was not able to download file") # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, 1, prefix='Progress:', suffix='Complete', length=50) return
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase, Product): """ This function downloads GLEAM ET data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: Enddate = pd.Timestamp('2015-12-31') # Make an array of the days of which the ET is taken YearsDownloadstart = str(Startdate[0:4]) YearsDownloadend = str(Enddate[0:4]) Years = range(int(YearsDownloadstart), int(YearsDownloadend) + 1) # String Parameters if TimeCase == 'daily': VarCode = '%s_GLEAM.V3.3b_mm-day-1_daily' % Product FTPprefix = 'data/v3.3b/' TimeFreq = 'D' Folder_name = 'Daily' elif TimeCase == 'monthly': VarCode = '%s_GLEAM.V3.3b_mm-month-1_monthly' % Product FTPprefix = 'data/v3.3b/' TimeFreq = 'M' Folder_name = 'Monthly' # Get end of month for Enddate monthDownloadend = str(Enddate[5:7]) End_month = calendar.monthrange(int(YearsDownloadend), int(monthDownloadend))[1] Enddate = '%d-%02d-%d' % (int(YearsDownloadend), int(monthDownloadend), int(End_month)) else: raise KeyError("The input time interval is not supported") Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Make directory for the MODIS ET data if Product == "ET": output_folder = os.path.join(Dir, 'Evaporation', 'GLEAM', Folder_name) if not os.path.exists(output_folder): os.makedirs(output_folder) if Product == "ETpot": output_folder = os.path.join(Dir, 'Potential_Evapotranspiration', 'GLEAM', Folder_name) if not os.path.exists(output_folder): os.makedirs(output_folder) # Check variables if latlim[0] < -50 or latlim[1] > 50: print('Latitude above 50N or below 50S is not possible.' ' Value set to maximum') latlim[0] = np.max(latlim[0], -50) latlim[1] = np.min(lonlim[1], 50) if lonlim[0] < -180 or lonlim[1] > 180: print('Longitude must be between 180E and 180W.' ' Now value is set to maximum') lonlim[0] = np.max(latlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Collect the data from the GLEAM webpage and returns the data and lat and long in meters of those tiles try: Collect_data(FTPprefix, Years, output_folder, Waitbar, Product) except: print("Was not able to download the file") # Create Waitbar print('\nProcess the GLEAM data') if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Pass variables to parallel function and run args = [output_folder, latlim, lonlim, VarCode, TimeCase, Product] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.nc") for f in files: os.remove(os.path.join(output_folder, f)) return (results)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, timestep, Waitbar, cores, hdf_library, remove_hdf): """ This function downloads MOD13 16-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date if not Startdate: Startdate = pd.Timestamp('2000-01-01') if not Enddate: Enddate = pd.Timestamp('2014-12-31') # Make an array of the days of which the ET is taken if timestep == 'monthly': Dates = pd.date_range(Startdate, Enddate, freq='M') TIMESTEP = 'Monthly' Size_pix = 1 elif timestep == '8-daily': Dates = Make_TimeStamps(Startdate, Enddate) TIMESTEP = '8_Daily' Size_pix = 2 # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Make directory for the MODIS ET data output_folder = os.path.join(Dir, 'Evaporation', 'MOD16', TIMESTEP) if not os.path.exists(output_folder): os.makedirs(output_folder) TilesVertical, TilesHorizontal = watertools.Collect.MOD15.DataAccess.Get_tiles_from_txt( output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [ output_folder, TilesVertical, TilesHorizontal, latlim, lonlim, timestep, hdf_library, Size_pix ] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files #files = glob.glob("*.txt") #for f in files: # os.remove(os.path.join(output_folder, f)) return (results)
def main(Dir, Startdate = '', Enddate = '', latlim = [-60, 60], lonlim = [-180, 180], pixel_size = False, cores = False, LANDSAF = 0, SourceLANDSAF= '', Waitbar = 1): """ This function downloads TRMM3B43 V7 (monthly) data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print the waitbar """ print('Create monthly Reference ET data for period %s till %s' %(Startdate, Enddate)) # An array of monthly dates which will be calculated Dates = pd.date_range(Startdate,Enddate,freq = 'MS') # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Calculate the ETref day by day for every month for Date in Dates: # Collect date data Y=Date.year M=Date.month Mday=calendar.monthrange(Y,M)[1] Days=pd.date_range(Date,Date+pd.Timedelta(days=Mday),freq='D') StartTime=Date.strftime('%Y')+'-'+Date.strftime('%m')+ '-01' EndTime=Date.strftime('%Y')+'-'+Date.strftime('%m')+'-'+str(Mday) # Get ETref on daily basis daily(Dir=Dir, Startdate=StartTime,Enddate=EndTime,latlim=latlim, lonlim=lonlim, pixel_size = pixel_size, cores=cores, LANDSAF=LANDSAF, SourceLANDSAF=SourceLANDSAF, Waitbar = 0) # Load DEM if not pixel_size: nameDEM='DEM_HydroShed_m_3s.tif' DEMmap=os.path.join(Dir,'HydroSHED','DEM',nameDEM ) else: DEMmap=os.path.join(Dir,'HydroSHED','DEM','DEM_HydroShed_m_reshaped_for_ETref.tif') # Get some geo-data to save results geo_ET, proj, size_X, size_Y = RC.Open_array_info(DEMmap) dataMonth=np.zeros([size_Y,size_X]) for Day in Days[:-1]: DirDay=os.path.join(Dir,'ETref','Daily','ETref_mm-day-1_daily_' + Day.strftime('%Y.%m.%d') + '.tif') dataDay=gdal.Open(DirDay) Dval=dataDay.GetRasterBand(1).ReadAsArray().astype(np.float32) Dval[Dval<0]=0 dataMonth=dataMonth+Dval dataDay=None # make geotiff file output_folder_month=os.path.join(Dir,'ETref','Monthly') if os.path.exists(output_folder_month)==False: os.makedirs(output_folder_month) DirMonth=os.path.join(output_folder_month,'ETref_mm-month-1_monthly_'+Date.strftime('%Y.%m.%d') + '.tif') # Create the tiff file DC.Save_as_tiff(DirMonth,dataMonth, geo_ET, proj) # Create Waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, version, Product): """ This scripts downloads SSEBop ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one month. The name of the file corresponds to the first day of the month. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ if version == "FTP": # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -59.2 or latlim[1] > 80: print('Latitude above 80N or below -59.2S is not possible. Value set to maximum') latlim[0] = np.max(latlim[0], -59.2) latlim[1] = np.min(latlim[1], 80) if lonlim[0] < -180 or lonlim[1] > 180: print('Longitude must be between 180E and 180W. Now value is set to maximum') lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: Enddate = pd.Timestamp('2014-10-31') if version == "V4": # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -60 or latlim[1] > 80.0022588483988670: print('Latitude above 80N or below -59.2S is not possible. Value set to maximum') latlim[0] = np.max(latlim[0], -60) latlim[1] = np.min(latlim[1], 80.0022588483988670) if lonlim[0] < -180 or lonlim[1] > 180.0002930387853439: print('Longitude must be between 180E and 180W. Now value is set to maximum') lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180.0002930387853439) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: import datetime Enddate = pd.Timestamp(datetime.datetime.now()) # Define directory and create it if not exists if Product == "ETact": output_folder = os.path.join(Dir, 'Evaporation', 'SSEBop', 'Monthly') freq_use = "MS" if Product == "ETpot": output_folder = os.path.join(Dir, 'Potential_Evapotranspiration', 'FEWS', 'Daily') freq_use = "D" if not os.path.exists(output_folder): os.makedirs(output_folder) # Creates dates library Dates = pd.date_range(Startdate, Enddate, freq = freq_use) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Loop over the dates for Date in Dates: # Define year and month year = Date.year month = Date.month day = Date.day if version == "FTP": # Date as printed in filename Filename_out= os.path.join(output_folder,'ETa_SSEBop_FTP_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define end filename Filename_dir = os.path.join("%s" %year, "m%s%02d.tif" %(str(year)[2:], month)) Filename_only = "m%s%02d.tif" %(str(year)[2:], month) if version == "V4": # Date as printed in filename if Product == "ETpot": Filename_out= os.path.join(output_folder,'ETpot_FEWS_mm-day-1_daily_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define the downloaded zip file Filename_only_zip = 'et%02s%02d%02d.tar.gz' %(str(year)[2:], month, day) # The end file name after downloading and unzipping Filename_only = "et%02s%02d%02d.bil" %(str(year)[2:], month, day) # Create bin folder temp_folder = os.path.join(output_folder, "Temp") if not os.path.exists(temp_folder): os.makedirs(temp_folder) local_filename = os.path.join(temp_folder, Filename_only) if Product == "ETact": Filename_out= os.path.join(output_folder,'ETa_SSEBop_V4_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define the downloaded zip file Filename_only_zip = "m%s%02d.zip" %(str(year), month) # The end file name after downloading and unzipping Filename_only = "m%s%02d_modisSSEBopETv4_actual_mm.tif" %(str(year), month) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_only) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: if version == "FTP": Download_SSEBop_from_WA_FTP(local_filename, Filename_dir) if version == "V4": if Product == "ETpot": Download_SSEBop_from_Web(temp_folder, Filename_only_zip, Product) if Product == "ETact": Download_SSEBop_from_Web(output_folder, Filename_only_zip, Product) if Product == "ETpot": Array_ETpot = RC.Open_bil_array(local_filename) Array_ETpot = Array_ETpot/100 Geo_out = tuple([-180.5, 1, 0, 90.5, 0, -1]) dest = DC.Save_as_MEM(Array_ETpot, Geo_out, "WGS84") data, Geo_out = RC.clip_data(dest, latlim, lonlim) DC.Save_as_tiff(Filename_out, data, Geo_out, "WGS84") if Product == "ETact": # Clip dataset data, Geo_out = RC.clip_data(local_filename, latlim, lonlim) data[data<-9999] = -9999 DC.Save_as_tiff(Filename_out, data, Geo_out, "WGS84") os.remove(local_filename) except: print("Was not able to download file with date %s" %Date) # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) if version == "V4": import glob os.chdir(output_folder) if Product == "ETact": zipfiles = glob.glob("*.zip") for zipfile in zipfiles: os.remove(os.path.join(output_folder, zipfile)) xmlfiles = glob.glob("*.xml") for xmlfile in xmlfiles: os.remove(os.path.join(output_folder, xmlfile)) if Product == "ETpot": import shutil Temp_dir = os.path.join(output_folder, "Temp") shutil.rmtree(Temp_dir) return
def DownloadData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase, CaseParameters): """ This function downloads ECMWF six-hourly, daily or monthly data Keyword arguments: """ # correct latitude and longitude limits latlim_corr_one = np.floor(latlim[0] / 0.125) * 0.125 latlim_corr_two = np.ceil(latlim[1] / 0.125) * 0.125 latlim_corr = [latlim_corr_one, latlim_corr_two] # correct latitude and longitude limits lonlim_corr_one = np.floor(lonlim[0] / 0.125) * 0.125 lonlim_corr_two = np.ceil(lonlim[1] / 0.125) * 0.125 lonlim_corr = [lonlim_corr_one, lonlim_corr_two] # Load factors / unit / type of variables / accounts VarInfo = VariablesInfo(TimeCase) Varname_dir = VarInfo.file_name[Var] # Create Out directory out_dir = os.path.join(Dir, "Weather_Data", "Model", "ECMWF", TimeCase, Varname_dir, "mean") if not os.path.exists(out_dir): os.makedirs(out_dir) DownloadType = VarInfo.DownloadType[Var] # Set required data for the three hourly option if TimeCase == 'six_hourly': string1 = 'oper' # Set required data for the daily option elif TimeCase == 'daily': Dates = pd.date_range(Startdate, Enddate, freq='D') elif TimeCase == 'monthly': Dates = pd.date_range(Startdate, Enddate, freq='MS') if DownloadType == 1: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'sfc' string8 = 'an' if DownloadType == 2: string1 = 'oper' string4 = "12" string6 = "00:00:00/12:00:00" string2 = 'sfc' string8 = 'fc' if DownloadType == 3: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'pl' string8 = 'an' string7 = '%s/to/%s' % (Startdate, Enddate) parameter_number = VarInfo.number_para[Var] string3 = '%03d.128' % (parameter_number) string5 = '0.125/0.125' string9 = 'ei' string10 = '%s/%s/%s/%s' % (latlim_corr[1], lonlim_corr[0], latlim_corr[0], lonlim_corr[1]) #N, W, S, E # Download data by using the ECMWF API import watertools.Collect.ECMWF.ECMWFdownload as Download print('Use API ECMWF to collect the data, please wait') Download.API(Dir, DownloadType, string1, string2, string3, string4, string5, string6, string7, string8, string9, string10) # Open the downloaded data NC_filename = os.path.join(Dir, 'data_interim.nc') fh = Dataset(NC_filename, mode='r') # Get the NC variable parameter parameter_var = VarInfo.var_name[Var] Var_unit = VarInfo.units[Var] factors_add = VarInfo.factors_add[Var] factors_mul = VarInfo.factors_mul[Var] # Open the NC data Data = fh.variables[parameter_var][:] Data_time = fh.variables['time'][:] lons = fh.variables['longitude'][:] lats = fh.variables['latitude'][:] # Define the georeference information Geo_four = np.nanmax(lats) Geo_one = np.nanmin(lons) Geo_out = tuple([Geo_one, 0.125, 0.0, Geo_four, 0.0, -0.125]) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) for date in Dates: # Define the year, month and day year = date.year month = date.month day = date.day # Hours since 1900-01-01 start = datetime.datetime(year=1900, month=1, day=1) end = datetime.datetime(year, month, day) diff = end - start hours_from_start_begin = diff.total_seconds() / 60 / 60 Date_good = np.zeros(len(Data_time)) if TimeCase == 'daily': days_later = 1 if TimeCase == 'monthly': days_later = calendar.monthrange(year, month)[1] Date_good[np.logical_and( Data_time >= hours_from_start_begin, Data_time < (hours_from_start_begin + 24 * days_later))] = 1 Data_one = np.zeros([ int(np.sum(Date_good)), int(np.size(Data, 1)), int(np.size(Data, 2)) ]) Data_one = Data[np.int_(Date_good) == 1, :, :] # Calculate the average temperature in celcius degrees Data_end = factors_mul * np.nanmean(Data_one, 0) + factors_add if VarInfo.types[Var] == 'flux': Data_end = Data_end * days_later VarOutputname = VarInfo.file_name[Var] # Define the out name name_out = os.path.join( out_dir, "%s_ECMWF_ERA-Interim_%s_%s_%d.%02d.%02d.tif" % (VarOutputname, Var_unit, TimeCase, year, month, day)) # Create Tiff files DC.Save_as_tiff(name_out, Data_end, Geo_out, "WGS84") if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) fh.close() return ()
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, TimeStep, Waitbar): """ This scripts downloads ALEXI ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one week. The name of the file corresponds to the first day of the week. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' TimeStep -- 'daily' or 'weekly' (by using here monthly, an older dataset will be used) lonlim -- [ymin, ymax] (values must be between -60 and 70) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -60 or latlim[1] > 70: print( 'Latitude above 70N or below 60S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -60) latlim[1] = np.min(latlim[1], 70) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: if TimeStep == 'weekly': Startdate = pd.Timestamp('2003-01-01') if TimeStep == 'daily': Startdate = pd.Timestamp('2005-01-01') if not Enddate: if TimeStep == 'weekly': Enddate = pd.Timestamp('2015-12-31') if TimeStep == 'daily': Enddate = pd.Timestamp('2016-12-31') # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate if TimeStep == 'weekly': # Define the Startdate of ALEXI DOY = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_yday Year = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_year # Change the startdate so it includes an ALEXI date DOYstart = int(math.ceil(DOY / 7.0) * 7 + 1) DOYstart = str('%s-%s' % (DOYstart, Year)) Day = datetime.datetime.strptime(DOYstart, '%j-%Y') Month = '%02d' % Day.month Day = '%02d' % Day.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) DOY = datetime.datetime.strptime(Date, '%Y-%m-%d').timetuple().tm_yday # The new Startdate Date = pd.Timestamp(Date) # amount of Dates weekly Dates = pd.date_range(Date, Enddate, freq='7D') # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'ALEXI', 'Weekly') if not os.path.exists(output_folder): os.makedirs(output_folder) if TimeStep == 'daily': # Define Dates Dates = pd.date_range(Startdate, Enddate, freq='D') # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'ALEXI', 'Daily') if not os.path.exists(output_folder): os.makedirs(output_folder) # Create Waitbar total_amount = len(Dates) if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) if TimeStep == 'weekly': ALEXI_weekly(Date, Enddate, output_folder, latlim, lonlim, Year, Waitbar, total_amount, TimeStep) if TimeStep == 'daily': ALEXI_daily(Dates, output_folder, latlim, lonlim, Waitbar, total_amount, TimeStep)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): # Create an array with the dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq='MS') # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define the minimum and maximum lat and long ETensemble Tile Min_lat_tile = int(np.floor((100 - latlim[1]) / 10)) Max_lat_tile = int(np.floor((100 - latlim[0] - 0.00125) / 10)) Min_lon_tile = int(np.floor((190 + lonlim[0]) / 10)) Max_lon_tile = int(np.floor((190 + lonlim[1] - 0.00125) / 10)) # Create the Lat and Lon tiles that will be downloaded Lat_tiles = [Min_lat_tile, Max_lat_tile] Lon_tiles = [Min_lon_tile, Max_lon_tile] # Define output folder and create this if it not exists output_folder = os.path.join(Dir, 'Evaporation', 'ETensV1_0') if not os.path.exists(output_folder): os.makedirs(output_folder) # Create Geotransform of the output files GEO_1 = lonlim[0] GEO_2 = 0.0025 GEO_3 = 0.0 GEO_4 = latlim[1] GEO_5 = 0.0 GEO_6 = -0.0025 geo = [GEO_1, GEO_2, GEO_3, GEO_4, GEO_5, GEO_6] geo_new = tuple(geo) # Define the parameter for downloading the data Downloaded = 0 # Calculate the ET data date by date for Date in Dates: # Define the output name and folder file_name = 'ET_ETensemble250m_mm-month-1_monthly_%d.%02d.01.tif' % ( Date.year, Date.month) output_file = os.path.join(output_folder, file_name) # If output file not exists create this if not os.path.exists(output_file): # If not downloaded than download if Downloaded == 0: # Download the ETens data from the FTP server Download_ETens_from_WA_FTP(output_folder, Lat_tiles, Lon_tiles) # Unzip the folder Unzip_ETens_data(output_folder, Lat_tiles, Lon_tiles) Downloaded = 1 # Create the ET data for the area of interest ET_data = Collect_dataset(output_folder, Date, Lat_tiles, Lon_tiles, latlim, lonlim) # Save this array as a tiff file DC.Save_as_tiff(output_file, ET_data, geo_new, projection='WGS84') # Create Waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) ''' # Remove all the raw dataset for v_tile in range(Lat_tiles[0], Lat_tiles[1]+1): for h_tile in range(Lon_tiles[0], Lon_tiles[1]+1): Tilename = "h%sv%s" %(h_tile, v_tile) filename = os.path.join(output_folder, Tilename) if os.path.exists(filename): shutil.rmtree(filename) # Remove all .zip files for f in os.listdir(output_folder): if re.search(".zip", f): os.remove(os.path.join(output_folder, f)) ''' return ()
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, hdf_library, remove_hdf): """ This function downloads MYD11 daily LST data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) Waitbar -- 1 (Default) will print a waitbar hdf_library -- string, if all the hdf files are already stored on computer define directory to the data here remove_hdf -- 1 (Default), if 1 remove all the downloaded hdf files in the end """ import watertools # Check start and end date and otherwise set the date to max if not Startdate: Startdate = pd.Timestamp('2000-02-18') if not Enddate: Enddate = pd.Timestamp('Now') # Make an array of the days of which the LST is taken Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print( 'Latitude above 90N or below 90S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the MODIS NDVI data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'LST', 'MYD11', 'daily') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define which MODIS tiles are required TilesVertical, TilesHorizontal = watertools.Collect.MOD15.DataAccess.Get_tiles_from_txt( output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [ output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, hdf_library ] for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Remove all .hdf files if remove_hdf == 1: try: os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) except: pass return ()
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, band, resolution, cores, hdf_library, remove_hdf): """ This function downloads MOD9 daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date to max if not Startdate: Startdate = pd.Timestamp('2000-02-24') if not Enddate: Enddate = pd.Timestamp('Now') # Make an array of the days of which the NDVI is taken Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print( 'Latitude above 90N or below 90S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the MODIS NDVI data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'Reflectance', 'MOD9', 'Band_%s_%s' % (band, resolution)) if not os.path.exists(output_folder): os.makedirs(output_folder) TilesVertical, TilesHorizontal = watertools.Collect.MOD15.DataAccess.Get_tiles_from_txt( output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [ output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, band, resolution, hdf_library ] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files #files = glob.glob("*.txt") #for f in files: # os.remove(os.path.join(output_folder, f)) return (results)
def DownloadData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, CaseParameters, cores, TimeCase): """ This function downloads GLDAS CLSM daily data Keyword arguments: Dir -- 'C:/file/to/path/' Var -- 'wind_f_inst' : (string) For all variable codes: VariablesInfo('day').descriptions.keys() Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] cores -- 1....8 CaseParameters -- See files: three_hourly.py, daily.py, and monthly.py """ # Load factors / unit / type of variables / accounts VarInfo = VariablesInfo(TimeCase) username, password = WebAccounts.Accounts(Type='NASA') # Set required data for the daily option # Set required data for the three hourly option if TimeCase == 'three_hourly': # Define output folder and create this one if not exists path = os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS', TimeCase, Var) if not os.path.exists(path): os.makedirs(path) # Startdate if not defined sd_date = '1979-01-02' # Define Time frequency TimeFreq = 'D' # Define URL by using personal account #url = 'http://%s:%[email protected]:80/dods/GLDAS_NOAH025SUBP_3H' %(username,password) url = 'https://hydro1.gesdisc.eosdis.nasa.gov/dods/GLDAS_CLM10SUBP_3H' #%(username,password) # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_three_hourly types = ['mean'] elif TimeCase == 'daily': types = ['mean'] # Define output folder and create this one if not exists path = { 'mean': os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS_CLSM', TimeCase, Var, 'mean') } for i in range(len(types)): if not os.path.exists(path[types[i]]): os.makedirs(path[types[i]]) # Startdate if not defined sd_date = '1948-01-01' # Define Time frequency TimeFreq = 'D' # Define URL by using personal account url = 'https://hydro1.gesdisc.eosdis.nasa.gov/dods/GLDAS_CLSM025_D.2.0' # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_daily # Set required data for the monthly option elif TimeCase == 'monthly': types = ['mean'] # Define output folder and create this one if not exists path = { 'mean': os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS_CLSM', TimeCase, Var, 'mean') } for i in range(len(types)): if not os.path.exists(path[types[i]]): os.makedirs(path[types[i]]) # Startdate if not defined sd_date = '1979-01-02' # Define Time frequency TimeFreq = 'MS' # Define URL by using personal account url = 'https://hydro1.gesdisc.eosdis.nasa.gov/dods/GLDAS_CLSM025_D.2.0' # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_monthly # If none of the possible option are chosen else: raise KeyError("The input time interval is not supported") if TimeCase == 'three_hourly': # Define IDs (latitude/longitude) yID = np.int16( np.array([np.ceil((latlim[0] + 60)), np.floor((latlim[1] + 60))])) xID = np.int16( np.array([np.floor((lonlim[0] + 180)), np.ceil((lonlim[1] + 180))])) else: # Define IDs (latitude/longitude) yID = np.int16( np.array([ np.ceil((latlim[0] + 60) * 4), np.floor((latlim[1] + 60) * 4) ])) xID = np.int16( np.array([ np.floor((lonlim[0] + 180) * 4), np.ceil((lonlim[1] + 180) * 4) ])) # Check dates. If no dates are given, the max number of days is used. if not Startdate: Startdate = pd.Timestamp(sd_date) if not Enddate: Enddate = pd.Timestamp('Now') # Should be much than available # Create all dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define the variable string name VarStr = VarInfo.names[Var] # Create one parameter with all the required arguments args = [ path, url, Var, VarStr, VarInfo, TimeCase, xID, yID, lonlim, latlim, CaseParameters, username, password, types ] # Pass variables to parallel function and run if not cores: for Date in Dates: RetrieveData_fcn(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData_fcn)(Date, args) for Date in Dates) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Type, Waitbar): """ This scripts downloads ETmonitor ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one month. The name of the file corresponds to the first day of the month. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print( 'Latitude above 90N or below 90S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2008-01-01') if not Enddate: Enddate = pd.Timestamp('2012-12-31') # Creates dates library Dates = pd.date_range(Startdate, Enddate, freq="MS") # Create Waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists if Type == "act": output_folder = os.path.join(Dir, 'Evaporation', 'ETmonitor', 'Monthly') if Type == "pot": output_folder = os.path.join(Dir, 'ETpot', 'ETmonitor', 'Monthly') if Type == "ei": output_folder = os.path.join(Dir, 'Ei', 'ETmonitor', 'Monthly') if Type == "es": output_folder = os.path.join(Dir, 'Es', 'ETmonitor', 'Monthly') if Type == "ew": output_folder = os.path.join(Dir, 'Ew', 'ETmonitor', 'Monthly') if Type == "tr": output_folder = os.path.join(Dir, 'Transpiration', 'ETmonitor', 'Monthly') if not os.path.exists(output_folder): os.makedirs(output_folder) for Date in Dates: # Define year and month year = Date.year month = Date.month # Define end filename and Date as printed in filename if Type == "act": Filename_in = "ET_ETmonitor_mm-month_%d_%02d_01.tif" % (year, month) Filename_out = os.path.join( output_folder, 'ETa_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) if Type == "pot": Filename_in = "ETpot_ETmonitor_mm-month_%d_%02d_01.tif" % (year, month) Filename_out = os.path.join( output_folder, 'ETpot_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) if Type == "ei": Filename_in = "Ei_ETmonitor_mm-month_%d_%02d_01.tif" % (year, month) Filename_out = os.path.join( output_folder, 'Ei_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) if Type == "es": Filename_in = "Es_ETmonitor_mm-month_%d_%02d_01.tif" % (year, month) Filename_out = os.path.join( output_folder, 'Es_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) if Type == "ew": Filename_in = "Ew_ETmonitor_mm-month_%d_%02d_01.tif" % (year, month) Filename_out = os.path.join( output_folder, 'Ew_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) if Type == "tr": Filename_in = "Tr_ETmonitor_mm-month_%d_%02d_01.tif" % (year, month) Filename_out = os.path.join( output_folder, 'Tr_ETmonitor_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_in) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: Download_ETmonitor_from_WA_FTP(local_filename, Filename_in, Type) # Reproject dataset epsg_to = '4326' name_reprojected_ETmonitor = RC.reproject_MODIS( local_filename, epsg_to) # Clip dataset RC.Clip_Dataset_GDAL(name_reprojected_ETmonitor, Filename_out, latlim, lonlim) os.remove(name_reprojected_ETmonitor) os.remove(local_filename) except: print("Was not able to download file with date %s" % Date) # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) return
def ALEXI_weekly(Date, Enddate, output_folder, latlim, lonlim, Year, Waitbar, total_amount, TimeStep): # Define the stop conditions Stop = Enddate.toordinal() End_date = 0 amount = 0 while End_date == 0: # Date as printed in filename Datesname = Date + pd.DateOffset(days=-7) DirFile = os.path.join( output_folder, 'ETa_ALEXI_CSFR_mm-week-1_weekly_%s.%02s.%02s.tif' % (Datesname.strftime('%Y'), Datesname.strftime('%m'), Datesname.strftime('%d'))) # Define end filename filename = "ALEXI_weekly_mm_%s_%s.tif" % (Date.strftime('%j'), Date.strftime('%Y')) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, filename) # Create the new date for the next download Datename = (str(Date.strftime('%Y')) + '-' + str(Date.strftime('%m')) + '-' + str(Date.strftime('%d'))) # Define IDs yID = 3000 - np.int16( np.array([ np.ceil((latlim[1] + 60) * 20), np.floor((latlim[0] + 60) * 20) ])) xID = np.int16( np.array([np.floor((lonlim[0]) * 20), np.ceil((lonlim[1]) * 20)]) + 3600) # Download the data from FTP server if the file not exists if not os.path.exists(DirFile): try: Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID, TimeStep) except: print("Was not able to download file with date %s" % Date) # Current DOY DOY = datetime.datetime.strptime(Datename, '%Y-%m-%d').timetuple().tm_yday # Define next day DOY_next = int(DOY + 7) if DOY_next >= 366: DOY_next = 8 Year += 1 DOYnext = str('%s-%s' % (DOY_next, Year)) DayNext = datetime.datetime.strptime(DOYnext, '%j-%Y') Month = '%02d' % DayNext.month Day = '%02d' % DayNext.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) # Adjust waitbar if Waitbar == 1: import watertools.Functions.Random.WaitbarConsole as WaitbarConsole amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Check if this file must be downloaded Date = pd.Timestamp(Date) if Date.toordinal() > Stop: End_date = 1