def DownloadData(Dir, latlim, lonlim, Waitbar): """ This scripts downloads HiHydroSoil Saturated Theta soil data from the UNESCO-IHE ftp server. Keyword arguments: Dir -- 'C:/file/to/path/' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below -90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole amount = 0 WaitbarConsole.printWaitBar(amount, 1, prefix = 'Progress:', suffix = 'Complete', length = 50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'HiHydroSoil', 'ThetaSat') if not os.path.exists(output_folder): os.makedirs(output_folder) # Date as printed in filename Filename_out= os.path.join(output_folder,'Theta_Saturated_Topsoil_HiHydroSoil.tif') # Define end filename Filename_in = os.path.join("wcsat_topsoil.tif") # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_in) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: Download_HiHydroSoil_from_WA_FTP(local_filename, Filename_in) # Clip dataset Clip_Dataset(local_filename, Filename_out, latlim, lonlim) os.remove(local_filename) except: print "Was not able to download file" # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, 1, prefix = 'Progress:', suffix = 'Complete', length = 50) return
def ALEXI_weekly(Date, Enddate, output_folder, latlim, lonlim, Year, Waitbar, total_amount, TimeStep): # Define the stop conditions Stop = Enddate.toordinal() End_date=0 amount = 0 while End_date == 0: # Date as printed in filename Datesname=Date+pd.DateOffset(days=-7) DirFile= os.path.join(output_folder,'ETa_ALEXI_CSFR_mm-week-1_weekly_%s.%02s.%02s.tif' %(Datesname.strftime('%Y'), Datesname.strftime('%m'), Datesname.strftime('%d'))) # Define end filename filename = "ALEXI_weekly_mm_%s_%s.tif" %(Date.strftime('%j'), Date.strftime('%Y')) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, filename) # Create the new date for the next download Datename = (str(Date.strftime('%Y')) + '-' + str(Date.strftime('%m')) + '-' + str(Date.strftime('%d'))) # Define IDs yID = 3000 - np.int16(np.array([np.ceil((latlim[1]+60)*20),np.floor((latlim[0]+60)*20)])) xID = np.int16(np.array([np.floor((lonlim[0])*20),np.ceil((lonlim[1])*20)])+3600) # Download the data from FTP server if the file not exists if not os.path.exists(DirFile): try: Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID, TimeStep) except: print "Was not able to download file with date %s" %Date # Current DOY DOY = datetime.datetime.strptime(Datename, '%Y-%m-%d').timetuple().tm_yday # Define next day DOY_next = int(DOY + 7) if DOY_next >= 366: DOY_next = 8 Year += 1 DOYnext = str('%s-%s' %(DOY_next, Year)) DayNext = datetime.datetime.strptime(DOYnext, '%j-%Y') Month = '%02d' % DayNext.month Day = '%02d' % DayNext.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) # Adjust waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Check if this file must be downloaded Date = pd.Timestamp(Date) if Date.toordinal() > Stop: End_date = 1
def Collect_data(FTPprefix, Years, output_folder, Waitbar): ''' This function downloads all the needed GLEAM files from hydras.ugent.be as a nc file. Keywords arguments: FTPprefix -- FTP path to the GLEAM data Date -- 'yyyy-mm-dd' output_folder -- 'C:/file/to/path/' ''' # account of the SFTP server (only password is missing) server = 'hydras.ugent.be' portnumber = 2225 username, password = WebAccounts.Accounts(Type='GLEAM') # Create Waitbar print '\nDownload GLEAM data' if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount2 = len(Years) amount2 = 0 WaitbarConsole.printWaitBar(amount2, total_amount2, prefix='Progress:', suffix='Complete', length=50) for year in Years: directory = os.path.join(FTPprefix, '%d' % year) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(server, port=portnumber, username=username, password=password) ftp = ssh.open_sftp() ftp.chdir(directory) filename = 'E_' + str(year) + '_GLEAM_v3.1b.nc' local_filename = os.path.join(output_folder, filename) if not os.path.exists(local_filename): ftp.get(filename, local_filename) if Waitbar == 1: amount2 += 1 WaitbarConsole.printWaitBar(amount2, total_amount2, prefix='Progress:', suffix='Complete', length=50) ftp.close() ssh.close() return ()
def DownloadData(Dir,latlim, lonlim, Waitbar): """ This function downloads JRC data Keyword arguments: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) Waitbar -- 1 (Default) will print a waitbar """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the JRC water occurrence data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'JRC', 'Occurrence') if not os.path.exists(output_folder): os.makedirs(output_folder) fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif') if not os.path.exists(fileName_out): # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # This function defines the name of dataset that needs to be collected Names_to_download = Tiles_to_download(lonlim,latlim) # Pass variables to parallel function and run args = [output_folder, Names_to_download, lonlim, latlim] RetrieveData(args) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) else: print 'JRC water occurrence map already exists' return()
def main(Dir, latlim, lonlim, resolution='3s', Waitbar=1): """ Downloads HydroSHED data from http://www.hydrosheds.org/download/ this data includes a Digital Elevation Model (DEM) The spatial resolution is 90m (3s) or 450m (15s) The following keyword arguments are needed: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] resolution -- '3s' (Default) or '15s' Waitbar -- '1' if you want a waitbar (Default = 1) """ # Create directory if not exists for the output output_folder = os.path.join(Dir, 'HydroSHED', 'DEM') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the output map and create this if not exists nameEnd = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_HydroShed_m_%s.tif' % resolution) parameter = "dem_%s" % resolution if not os.path.exists(nameEnd): # Create Waitbar if Waitbar == 1: print '\nDownload HydroSHED altitude map with a resolution of %s' % resolution import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Download and process the data DownloadData(output_folder, latlim, lonlim, parameter, resolution) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) else: if Waitbar == 1: print "\nHydroSHED altitude map (%s) already exists in output folder" % resolution
def SetVariables(Dir, Startdate, Enddate, latlim, lonlim, pixel_size, cores, LANDSAF, Waitbar): """ This function starts to calculate ETref (daily) data based on Hydroshed, GLDAS, and (CFSR/LANDSAF) in parallel or single core Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -60 and 60) lonlim -- [xmin, xmax] (values must be between -180 and 180) pixel_size -- The output pixel size cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. LANDSAF -- if LANDSAF data must be used it is 1 SourceLANDSAF -- the path to the LANDSAF files Waitbar -- 1 (Default) will print the waitbar """ # Make an array of the days of which the ET is taken Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as Waitbar total_amount = len(Dates) amount = 0 Waitbar.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Pass variables to parallel function and run args = [Dir, lonlim, latlim, pixel_size, LANDSAF] if not cores: for Date in Dates: ETref(Date, args) if Waitbar == 1: amount += 1 Waitbar.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(ETref)(Date, args) for Date in Dates) return results
def ALEXI_daily(Dates, output_folder, latlim, lonlim, Waitbar, total_amount, TimeStep): amount = 0 for Date in Dates: # Date as printed in filename DirFile = os.path.join( output_folder, 'ETa_ALEXI_CSFR_mm-day-1_daily_%d.%02d.%02d.tif' % (Date.year, Date.month, Date.day)) DOY = Date.timetuple().tm_yday # Define end filename filename = "EDAY_CERES_%d%03d.dat.gz" % (Date.year, DOY) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, filename) # Define IDs yID = 3000 - np.int16( np.array([ np.ceil((latlim[1] + 60) * 20), np.floor((latlim[0] + 60) * 20) ])) xID = np.int16( np.array([np.floor((lonlim[0]) * 20), np.ceil((lonlim[1]) * 20)]) + 3600) # Download the data from FTP server if the file not exists if not os.path.exists(DirFile): try: Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID, TimeStep) except: print "Was not able to download file with date %s" % Date # Adjust waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) os.chdir(output_folder) re = glob.glob("*.dat") for f in re: os.remove(os.path.join(output_folder, f))
def Collect_data(FTPprefix,Years,output_folder, Waitbar): ''' This function downloads all the needed GLEAM files from hydras.ugent.be as a nc file. Keywords arguments: FTPprefix -- FTP path to the GLEAM data Date -- 'yyyy-mm-dd' output_folder -- 'C:/file/to/path/' ''' # account of the SFTP server (only password is missing) server='hydras.ugent.be' portnumber=2225 username, password = WebAccounts.Accounts(Type='GLEAM') # Create Waitbar print '\nDownload GLEAM data' if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount2 = len(Years) amount2 = 0 WaitbarConsole.printWaitBar(amount2, total_amount2, prefix = 'Progress:', suffix = 'Complete', length = 50) for year in Years: directory = os.path.join(FTPprefix, '%d' %year) ssh=paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(server, port=portnumber, username=username, password=password) ftp=ssh.open_sftp() ftp.chdir(directory) filename='E_' + str(year) + '_GLEAM_v3.1b.nc' local_filename = os.path.join(output_folder, filename) if not os.path.exists(local_filename): ftp.get(filename, local_filename) if Waitbar == 1: amount2 += 1 WaitbarConsole.printWaitBar(amount2, total_amount2, prefix = 'Progress:', suffix = 'Complete', length = 50) ftp.close() ssh.close() return()
def main(Dir, latlim, lonlim, resolution = '3s', Waitbar = 1): """ Downloads HydroSHED data from http://www.hydrosheds.org/download/ this data includes a Digital Elevation Model (DEM) The spatial resolution is 90m (3s) or 450m (15s) The following keyword arguments are needed: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] resolution -- '3s' (Default) or '15s' Waitbar -- '1' if you want a waitbar (Default = 1) """ # Create directory if not exists for the output output_folder = os.path.join(Dir, 'HydroSHED', 'DEM') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the output map and create this if not exists nameEnd = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_HydroShed_m_%s.tif' %resolution) parameter = "dem_%s" %resolution if not os.path.exists(nameEnd): # Create Waitbar if Waitbar == 1: print '\nDownload HydroSHED altitude map with a resolution of %s' %resolution import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Download and process the data DownloadData(output_folder, latlim, lonlim, parameter, resolution) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) else: if Waitbar == 1: print "\nHydroSHED altitude map (%s) already exists in output folder" %resolution
def SetVariables(Dir, Startdate, Enddate, latlim, lonlim, pixel_size, cores, LANDSAF, Waitbar): """ This function starts to calculate ETref (daily) data based on Hydroshed, GLDAS, and (CFSR/LANDSAF) in parallel or single core Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -60 and 60) lonlim -- [xmin, xmax] (values must be between -180 and 180) pixel_size -- The output pixel size cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. LANDSAF -- if LANDSAF data must be used it is 1 SourceLANDSAF -- the path to the LANDSAF files Waitbar -- 1 (Default) will print the waitbar """ # Make an array of the days of which the ET is taken Dates = pd.date_range(Startdate,Enddate,freq = 'D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as Waitbar total_amount = len(Dates) amount = 0 Waitbar.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Pass variables to parallel function and run args = [Dir, lonlim, latlim, pixel_size, LANDSAF] if not cores: for Date in Dates: ETref(Date, args) if Waitbar == 1: amount += 1 Waitbar.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(ETref)(Date, args) for Date in Dates) return results
def ALEXI_daily(Dates, output_folder, latlim, lonlim, Waitbar, total_amount, TimeStep): amount = 0 for Date in Dates: # Date as printed in filename DirFile= os.path.join(output_folder,'ETa_ALEXI_CSFR_mm-day-1_daily_%d.%02d.%02d.tif' %(Date.year, Date.month, Date.day)) DOY = Date.timetuple().tm_yday # Define end filename filename = "EDAY_CERES_%d%03d.dat.gz" %(Date.year, DOY) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, filename) # Define IDs yID = 3000 - np.int16(np.array([np.ceil((latlim[1]+60)*20),np.floor((latlim[0]+60)*20)])) xID = np.int16(np.array([np.floor((lonlim[0])*20),np.ceil((lonlim[1])*20)])+3600) # Download the data from FTP server if the file not exists if not os.path.exists(DirFile): try: Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID, TimeStep) except: print "Was not able to download file with date %s" %Date # Adjust waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) os.chdir(output_folder) re = glob.glob("*.dat") for f in re: os.remove(os.path.join(output_folder, f))
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): # Create an array with the dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq = 'MS') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Define the minimum and maximum lat and long ETensemble Tile Min_lat_tile = int(np.floor((100 - latlim[1])/10)) Max_lat_tile = int(np.floor((100 - latlim[0]-0.00125)/10)) Min_lon_tile = int(np.floor((190 + lonlim[0])/10)) Max_lon_tile = int(np.floor((190 + lonlim[1]-0.00125)/10)) # Create the Lat and Lon tiles that will be downloaded Lat_tiles = [Min_lat_tile, Max_lat_tile] Lon_tiles = [Min_lon_tile, Max_lon_tile] # Define output folder and create this if it not exists output_folder = os.path.join(Dir, 'Evaporation', 'ETensV1_0') if not os.path.exists(output_folder): os.makedirs(output_folder) # Create Geotransform of the output files GEO_1 = lonlim[0] GEO_2 = 0.0025 GEO_3 = 0.0 GEO_4 = latlim[1] GEO_5 = 0.0 GEO_6 = -0.0025 geo = [GEO_1, GEO_2, GEO_3, GEO_4, GEO_5, GEO_6] geo_new=tuple(geo) # Define the parameter for downloading the data Downloaded = 0 # Calculate the ET data date by date for Date in Dates: # Define the output name and folder file_name = 'ET_ETensemble250m_mm-month-1_monthly_%d.%02d.01.tif' %(Date.year,Date.month) output_file = os.path.join(output_folder, file_name) # If output file not exists create this if not os.path.exists(output_file): # If not downloaded than download if Downloaded == 0: # Download the ETens data from the FTP server Download_ETens_from_WA_FTP(output_folder, Lat_tiles, Lon_tiles) # Unzip the folder Unzip_ETens_data(output_folder, Lat_tiles, Lon_tiles) Downloaded = 1 # Create the ET data for the area of interest ET_data = Collect_dataset(output_folder, Date, Lat_tiles, Lon_tiles, latlim, lonlim) # Save this array as a tiff file DC.Save_as_tiff(output_file, ET_data, geo_new, projection='WGS84') # Create Waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) return()
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads MSWEP Version 2.1 daily or monthly data Keyword arguments: Dir -- 'C:/file/to/path/' Var -- 'wind_f_inst' : (string) For all variable codes: VariablesInfo('day').descriptions.keys() Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] Waitbar -- 0 or 1 (1 is waitbar on) cores -- 1....8 """ # Load factors / unit / type of variables / accounts username, password = WebAccounts.Accounts(Type = 'MSWEP') # Set required data for the daily option if TimeCase == 'daily': # Define output folder and create this one if not exists path = os.path.join(Dir, 'Precipitation', 'MSWEP', 'daily') if not os.path.exists(path): os.makedirs(path) # Startdate if not defined sd_date = '1979-01-01' # Define Time frequency TimeFreq = 'D' # Define URL by using personal account url = 'https://%s:%[email protected]/opendap/MSWEP_V2.1/global_daily_010deg/' %(username,password) # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_daily # Set required data for the monthly option elif TimeCase == 'monthly': # Define output folder and create this one if not exists path = os.path.join(Dir, 'Precipitation', 'MSWEP', 'monthly') if not os.path.exists(path): os.makedirs(path) # Startdate if not defined sd_date = '1979-01-01' # Define Time frequency TimeFreq = 'MS' # Define URL by using personal account url = 'https://%s:%[email protected]:443/opendap/MSWEP_V2.1/global_monthly_010deg.nc' %(username,password) # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_monthly # If none of the possible option are chosen else: raise KeyError("The input time interval is not supported") # Define IDs (latitude/longitude) yID = np.int16(np.array([np.ceil((latlim[0] + 90) * 10), np.floor((latlim[1] + 90) * 10)])) xID = np.int16(np.array([np.floor((lonlim[0] + 180) * 10), np.ceil((lonlim[1] + 180) * 10)])) # Check dates. If no dates are given, the max number of days is used. if not Startdate: Startdate = pd.Timestamp(sd_date) if not Enddate: Enddate = pd.Timestamp('Now') # Should be much than available # Create all dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Create one parameter with all the required arguments args = [path, url, TimeCase, xID, yID, lonlim, latlim, username, password] # Pass variables to parallel function and run if not cores: for Date in Dates: RetrieveData_fcn(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData_fcn)(Date, args) for Date in Dates) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, version): """ This scripts downloads SSEBop ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one month. The name of the file corresponds to the first day of the month. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ if version == "FTP": # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -59.2 or latlim[1] > 80: print 'Latitude above 80N or below -59.2S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -59.2) latlim[1] = np.min(latlim[1], 80) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: Enddate = pd.Timestamp('2014-10-31') if version == "V4": # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -60 or latlim[1] > 80.0022588483988670: print 'Latitude above 80N or below -59.2S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -60) latlim[1] = np.min(latlim[1], 80.0022588483988670) if lonlim[0] < -180 or lonlim[1] > 180.0002930387853439: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180.0002930387853439) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: import datetime Enddate = pd.Timestamp(datetime.datetime.now()) # Creates dates library Dates = pd.date_range(Startdate, Enddate, freq = "MS") # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'SSEBop', 'Monthly') if not os.path.exists(output_folder): os.makedirs(output_folder) for Date in Dates: # Define year and month year = Date.year month = Date.month if version == "FTP": # Date as printed in filename Filename_out= os.path.join(output_folder,'ETa_SSEBop_FTP_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define end filename Filename_dir = os.path.join("%s" %year, "m%s%02d.tif" %(str(year)[2:], month)) Filename_only = "m%s%02d.tif" %(str(year)[2:], month) if version == "V4": # Date as printed in filename Filename_out= os.path.join(output_folder,'ETa_SSEBop_V4_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define the downloaded zip file Filename_only_zip = "m%s%02d.zip" %(str(year), month) # The end file name after downloading and unzipping Filename_only = "m%s%02d_modisSSEBopETv4_actual_mm.tif" %(str(year), month) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_only) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: if version == "FTP": Download_SSEBop_from_WA_FTP(local_filename, Filename_dir) if version == "V4": Download_SSEBop_from_Web(output_folder, Filename_only_zip) # Clip dataset RC.Clip_Dataset_GDAL(local_filename, Filename_out, latlim, lonlim) os.remove(local_filename) except: print "Was not able to download file with date %s" %Date # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) if version == "V4": import glob os.chdir(output_folder) zipfiles = glob.glob("*.zip") for zipfile in zipfiles: os.remove(os.path.join(output_folder, zipfile)) xmlfiles = glob.glob("*.xml") for xmlfile in xmlfiles: os.remove(os.path.join(output_folder, xmlfile)) return
def DownloadData(Dir, latlim, lonlim, Waitbar): """ This scripts downloads HiHydroSoil Saturated Theta soil data from the UNESCO-IHE ftp server. Keyword arguments: Dir -- 'C:/file/to/path/' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below -90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole amount = 0 WaitbarConsole.printWaitBar(amount, 1, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'HiHydroSoil', 'ThetaSat') if not os.path.exists(output_folder): os.makedirs(output_folder) # Date as printed in filename Filename_out = os.path.join(output_folder, 'Theta_Saturated_Topsoil_HiHydroSoil.tif') # Define end filename Filename_in = os.path.join("wcsat_topsoil.tif") # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_in) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: Download_HiHydroSoil_from_WA_FTP(local_filename, Filename_in) # Clip dataset Clip_Dataset(local_filename, Filename_out, latlim, lonlim) os.remove(local_filename) except: print "Was not able to download file" # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, 1, prefix='Progress:', suffix='Complete', length=50) return
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores): """ This function downloads MOD13 16-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date if not Startdate: Startdate = pd.Timestamp('2000-01-01') if not Enddate: Enddate = pd.Timestamp('2014-12-31') # Make an array of the days of which the ET is taken Dates = pd.date_range(Startdate, Enddate, freq='M') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Make directory for the MODIS ET data output_folder = os.path.join(Dir, 'Evaporation', 'MOD16') if not os.path.exists(output_folder): os.makedirs(output_folder) # Download list (txt file on the internet) which includes the lat and lon information for the integrized sinusoidal projection tiles of MODIS nameDownloadtext = 'http://modis-land.gsfc.nasa.gov/pdf/sn_gring_10deg.txt' file_nametext = os.path.join(output_folder, nameDownloadtext.split('/')[-1]) urllib.urlretrieve(nameDownloadtext, file_nametext) # Open text file with tiles which is downloaded before tiletext = np.genfromtxt(file_nametext, skip_header=7, skip_footer=1, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)) tiletext2 = tiletext[tiletext[:, 2] >= -900, :] # This function converts the values in the text file into horizontal and vertical number of the tiles which must be downloaded to cover the extent defined by the user TilesVertical, TilesHorizontal = Tiles_to_download(tiletext2=tiletext2, lonlim1=lonlim, latlim1=latlim) # Pass variables to parallel function and run args = [output_folder, TilesVertical, TilesHorizontal, latlim, lonlim] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files files = glob.glob("*.txt") for f in files: os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, hdf_library, remove_hdf): """ This function downloads MOD17 yearly NPP data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date to max if not Startdate: Startdate = pd.Timestamp('2000-02-18') if not Enddate: Enddate = pd.Timestamp('Now') # Make an array of the days of which the NPP is taken yearstart = pd.Timestamp(Startdate).year yearend = pd.Timestamp(Enddate).year Startdate_NPP='%s-01-01' % yearstart Enddate_NPP='%s-12-31'% yearend Dates = pd.date_range(Startdate_NPP, Enddate_NPP, freq = 'AS') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the MODIS NPP data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'NPP', 'MOD17') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define which MODIS tiles are required TilesVertical, TilesHorizontal = wa.Collect.MOD15.DataAccess.Get_tiles_from_txt(output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, hdf_library] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files files = glob.glob("*.txt") for f in files: os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads GLDAS CLSM daily data Keyword arguments: Dir -- 'C:/file/to/path/' Var -- 'wind_f_inst' : (string) For all variable codes: VariablesInfo('day').descriptions.keys() Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] cores -- 1....8 CaseParameters -- See files: three_hourly.py, daily.py, and monthly.py """ # Load factors / unit / type of variables / accounts VarInfo = VariablesInfo(TimeCase) username, password = WebAccounts.Accounts(Type='NASA') # Set required data for the daily option if TimeCase == 'daily': types = ['mean'] # Define output folder and create this one if not exists path = { 'mean': os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS_CLSM', TimeCase, Var, 'mean') } for i in range(len(types)): if not os.path.exists(path[types[i]]): os.makedirs(path[types[i]]) # Startdate if not defined sd_date = '1948-01-01' # Define Time frequency TimeFreq = 'D' # Define URL by using personal account url = 'https://hydro1.gesdisc.eosdis.nasa.gov/dods/GLDAS_CLSM025_D.2.0' # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_daily # Set required data for the monthly option if TimeCase == 'monthly': types = ['mean'] # Define output folder and create this one if not exists path = { 'mean': os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS_CLSM', TimeCase, Var, 'mean') } for i in range(len(types)): if not os.path.exists(path[types[i]]): os.makedirs(path[types[i]]) # Startdate if not defined sd_date = '1948-01-01' # Define Time frequency TimeFreq = 'MS' # Define URL by using personal account url = 'https://hydro1.gesdisc.eosdis.nasa.gov/dods/GLDAS_CLSM025_D.2.0' # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_monthly # If none of the possible option are chosen else: raise KeyError("The input time interval is not supported") # Define IDs (latitude/longitude) yID = np.int16( np.array( [np.ceil((latlim[0] + 60) * 4), np.floor((latlim[1] + 60) * 4)])) xID = np.int16( np.array( [np.floor((lonlim[0] + 180) * 4), np.ceil((lonlim[1] + 180) * 4)])) # Check dates. If no dates are given, the max number of days is used. if not Startdate: Startdate = pd.Timestamp(sd_date) if not Enddate: Enddate = pd.Timestamp('Now') # Should be much than available # Create all dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define the variable string name VarStr = VarInfo.names[Var] # Create one parameter with all the required arguments args = [ path, url, Var, VarStr, VarInfo, TimeCase, xID, yID, lonlim, latlim, username, password, types ] # Pass variables to parallel function and run if not cores: for Date in Dates: RetrieveData_fcn(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData_fcn)(Date, args) for Date in Dates) return results
def ALEXI_weekly(Date, Enddate, output_folder, latlim, lonlim, Year, Waitbar, total_amount, TimeStep): # Define the stop conditions Stop = Enddate.toordinal() End_date = 0 amount = 0 while End_date == 0: # Date as printed in filename Datesname = Date + pd.DateOffset(days=-7) DirFile = os.path.join( output_folder, 'ETa_ALEXI_CSFR_mm-week-1_weekly_%s.%02s.%02s.tif' % (Datesname.strftime('%Y'), Datesname.strftime('%m'), Datesname.strftime('%d'))) # Define end filename filename = "ALEXI_weekly_mm_%s_%s.tif" % (Date.strftime('%j'), Date.strftime('%Y')) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, filename) # Create the new date for the next download Datename = (str(Date.strftime('%Y')) + '-' + str(Date.strftime('%m')) + '-' + str(Date.strftime('%d'))) # Define IDs yID = 3000 - np.int16( np.array([ np.ceil((latlim[1] + 60) * 20), np.floor((latlim[0] + 60) * 20) ])) xID = np.int16( np.array([np.floor((lonlim[0]) * 20), np.ceil((lonlim[1]) * 20)]) + 3600) # Download the data from FTP server if the file not exists if not os.path.exists(DirFile): try: Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID, TimeStep) except: print "Was not able to download file with date %s" % Date # Current DOY DOY = datetime.datetime.strptime(Datename, '%Y-%m-%d').timetuple().tm_yday # Define next day DOY_next = int(DOY + 7) if DOY_next >= 366: DOY_next = 8 Year += 1 DOYnext = str('%s-%s' % (DOY_next, Year)) DayNext = datetime.datetime.strptime(DOYnext, '%j-%Y') Month = '%02d' % DayNext.month Day = '%02d' % DayNext.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) # Adjust waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Check if this file must be downloaded Date = pd.Timestamp(Date) if Date.toordinal() > Stop: End_date = 1
def DownloadData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads GLDAS CLSM daily data Keyword arguments: Dir -- 'C:/file/to/path/' Var -- 'wind_f_inst' : (string) For all variable codes: VariablesInfo('day').descriptions.keys() Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] lonlim -- [xmin, xmax] cores -- 1....8 CaseParameters -- See files: three_hourly.py, daily.py, and monthly.py """ # Load factors / unit / type of variables / accounts VarInfo = VariablesInfo(TimeCase) username, password = WebAccounts.Accounts(Type = 'NASA') # Set required data for the daily option if TimeCase == 'daily': types = ['mean'] # Define output folder and create this one if not exists path = {'mean': os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS_CLSM', TimeCase, Var, 'mean')} for i in range(len(types)): if not os.path.exists(path[types[i]]): os.makedirs(path[types[i]]) # Startdate if not defined sd_date = '1948-01-01' # Define Time frequency TimeFreq = 'D' # Define URL by using personal account url = 'https://hydro1.gesdisc.eosdis.nasa.gov/dods/GLDAS_CLSM025_D.2.0' # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_daily # Set required data for the monthly option if TimeCase == 'monthly': types = ['mean'] # Define output folder and create this one if not exists path = {'mean': os.path.join(Dir, 'Weather_Data', 'Model', 'GLDAS_CLSM', TimeCase, Var, 'mean')} for i in range(len(types)): if not os.path.exists(path[types[i]]): os.makedirs(path[types[i]]) # Startdate if not defined sd_date = '1948-01-01' # Define Time frequency TimeFreq = 'MS' # Define URL by using personal account url = 'https://hydro1.gesdisc.eosdis.nasa.gov/dods/GLDAS_CLSM025_D.2.0' # Name the definition that will be used to obtain the data RetrieveData_fcn = RetrieveData_monthly # If none of the possible option are chosen else: raise KeyError("The input time interval is not supported") # Define IDs (latitude/longitude) yID = np.int16(np.array([np.ceil((latlim[0] + 60) * 4), np.floor((latlim[1] + 60) * 4)])) xID = np.int16(np.array([np.floor((lonlim[0] + 180) * 4), np.ceil((lonlim[1] + 180) * 4)])) # Check dates. If no dates are given, the max number of days is used. if not Startdate: Startdate = pd.Timestamp(sd_date) if not Enddate: Enddate = pd.Timestamp('Now') # Should be much than available # Create all dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Define the variable string name VarStr = VarInfo.names[Var] # Create one parameter with all the required arguments args = [path, url, Var, VarStr, VarInfo, TimeCase, xID, yID, lonlim, latlim, username, password, types] # Pass variables to parallel function and run if not cores: for Date in Dates: RetrieveData_fcn(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData_fcn)(Date, args) for Date in Dates) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): """ This scripts downloads ASCAT SWI data from the VITO server. The output files display the Surface Water Index. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] latlim -- [xmin, xmax] """ # Check the latitude and longitude and otherwise reset lat and lon. if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible.\ Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W.\ Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2007-01-01') if not Enddate: Enddate = pd.Timestamp('2018-12-31') # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate # amount of Dates weekly Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily') if not os.path.exists(output_folder): os.makedirs(output_folder) output_folder_temp = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily', 'Temp') if not os.path.exists(output_folder_temp): os.makedirs(output_folder_temp) # loop over dates for Date in Dates: # Define end filename End_filename = os.path.join(output_folder, 'SWI_ASCAT_V3_Percentage_daily_%d.%02d.%02d.tif' % (Date.year, Date.month, Date.day)) # Define IDs xID = 1800 + np.int16(np.array([np.ceil((lonlim[0])*10), np.floor((lonlim[1])*10)])) yID = np.int16(np.array([np.floor((-latlim[1])*10), np.ceil((-latlim[0])*10)])) + 900 # Download the data from FTP server if the file not exists if not os.path.exists(End_filename): try: data = Download_ASCAT_from_VITO(End_filename, output_folder_temp, Date, yID, xID) # make geotiff file geo = [lonlim[0], 0.1, 0, latlim[1], 0, -0.1] DC.Save_as_tiff(name=End_filename, data=data, geo=geo, projection="WGS84") except: print "Was not able to download file with date %s" % Date # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # remove the temporary folder shutil.rmtree(output_folder_temp)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, timestep, Waitbar, cores, hdf_library, remove_hdf): """ This function downloads MOD13 16-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date if not Startdate: Startdate = pd.Timestamp('2000-01-01') if not Enddate: Enddate = pd.Timestamp('2014-12-31') # Make an array of the days of which the ET is taken if timestep == 'monthly': Dates = pd.date_range(Startdate, Enddate, freq='M') TIMESTEP = 'Monthly' elif timestep == '8-daily': Dates = Make_TimeStamps(Startdate, Enddate) TIMESTEP = '8_Daily' # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Make directory for the MODIS ET data output_folder = os.path.join(Dir, 'Evaporation', 'MOD16', TIMESTEP) if not os.path.exists(output_folder): os.makedirs(output_folder) TilesVertical, TilesHorizontal = wa.Collect.MOD15.DataAccess.Get_tiles_from_txt( output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [ output_folder, TilesVertical, TilesHorizontal, latlim, lonlim, timestep, hdf_library ] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files files = glob.glob("*.txt") for f in files: os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads GLEAM ET data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: Enddate = pd.Timestamp('2015-12-31') # Make an array of the days of which the ET is taken YearsDownloadstart = str(Startdate[0:4]) YearsDownloadend = str(Enddate[0:4]) Years = range(int(YearsDownloadstart), int(YearsDownloadend) + 1) # String Parameters if TimeCase == 'daily': VarCode = 'ET_GLEAM.V3.1b_mm-day-1_daily' FTPprefix = 'data/v3.1b/' TimeFreq = 'D' Folder_name = 'Daily' elif TimeCase == 'monthly': VarCode = 'ET_GLEAM.V3.1b_mm-month-1_monthly' FTPprefix = 'data/v3.1b/' TimeFreq = 'M' Folder_name = 'Monthly' # Get end of month for Enddate monthDownloadend = str(Enddate[5:7]) End_month = calendar.monthrange(int(YearsDownloadend), int(monthDownloadend))[1] Enddate = '%d-%02d-%d' % (int(YearsDownloadend), int(monthDownloadend), int(End_month)) else: raise KeyError("The input time interval is not supported") Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Make directory for the MODIS ET data output_folder = os.path.join(Dir, 'Evaporation', 'GLEAM', Folder_name) if not os.path.exists(output_folder): os.makedirs(output_folder) # Check variables if latlim[0] < -50 or latlim[1] > 50: print( 'Latitude above 50N or below 50S is not possible.' ' Value set to maximum') latlim[0] = np.max(latlim[0], -50) latlim[1] = np.min(lonlim[1], 50) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W.' ' Now value is set to maximum') lonlim[0] = np.max(latlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Collect the data from the GLEAM webpage and returns the data and lat and long in meters of those tiles try: Collect_data(FTPprefix, Years, output_folder, Waitbar) except: print "Was not able to download the file" # Create Waitbar print '\nProcess the GLEAM data' if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Pass variables to parallel function and run args = [output_folder, latlim, lonlim, VarCode, TimeCase] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.nc") for f in files: os.remove(os.path.join(output_folder, f)) return (results)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): """ This scripts downloads ALEXI ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one week. The name of the file corresponds to the first day of the week. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] (values must be between -60 and 70) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -60 or latlim[1] > 70: print 'Latitude above 70N or below 60S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0],-60) latlim[1] = np.min(latlim[1],70) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: Enddate = pd.Timestamp('2015-12-31') # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate # Define the Startdate of ALEXI DOY = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_yday Year = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_year # Change the startdate so it includes an ALEXI date DOYstart = int(math.ceil(DOY/7.0)*7+1) DOYstart = str('%s-%s' %(DOYstart, Year)) Day = datetime.datetime.strptime(DOYstart, '%j-%Y') Month = '%02d' % Day.month Day = '%02d' % Day.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) DOY = datetime.datetime.strptime(Date, '%Y-%m-%d').timetuple().tm_yday # The new Startdate Date = pd.Timestamp(Date) # amount of Dates weekly Dates_Weekly = pd.date_range(Date, Enddate, freq = '7D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates_Weekly) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'ALEXI', 'Weekly') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define the stop conditions Stop = Enddate.toordinal() End_date=0 while End_date == 0: # Date as printed in filename Datesname=Date+pd.DateOffset(days=-7) DirFile= os.path.join(output_folder,'ETa_ALEXI_CSFR_mm-week-1_weekly_%s.%02s.%02s.tif' %(Datesname.strftime('%Y'), Datesname.strftime('%m'), Datesname.strftime('%d'))) # Define end filename filename = "ALEXI_weekly_mm_%s_%s.tif" %(Date.strftime('%j'), Date.strftime('%Y')) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, filename) # Create the new date for the next download Date = (str(Date.strftime('%Y')) + '-' + str(Date.strftime('%m')) + '-' + str(Date.strftime('%d'))) # Define IDs yID = 3000 - np.int16(np.array([np.ceil((latlim[1]+60)*20),np.floor((latlim[0]+60)*20)])) xID = np.int16(np.array([np.floor((lonlim[0])*20),np.ceil((lonlim[1])*20)])+3600) # Download the data from FTP server if the file not exists if not os.path.exists(DirFile): try: Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID) except: print "Was not able to download file with date %s" %Date # Current DOY DOY = datetime.datetime.strptime(Date, '%Y-%m-%d').timetuple().tm_yday # Define next day DOY_next = int(DOY + 7) if DOY_next >= 366: DOY_next = 8 Year += 1 DOYnext = str('%s-%s' %(DOY_next, Year)) DayNext = datetime.datetime.strptime(DOYnext, '%j-%Y') Month = '%02d' % DayNext.month Day = '%02d' % DayNext.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Check if this file must be downloaded Date = pd.Timestamp(Date) if Date.toordinal() > Stop: End_date = 1
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): """ This scripts downloads ASCAT SWI data from the VITO server. The output files display the Surface Water Index. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] latlim -- [xmin, xmax] """ # Check the latitude and longitude and otherwise reset lat and lon. if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible.\ Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W.\ Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2007-01-01') if not Enddate: Enddate = pd.Timestamp('2018-12-31') # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate # amount of Dates weekly Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily') if not os.path.exists(output_folder): os.makedirs(output_folder) output_folder_temp = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily', 'Temp') if not os.path.exists(output_folder_temp): os.makedirs(output_folder_temp) # loop over dates for Date in Dates: # Define end filename End_filename = os.path.join( output_folder, 'SWI_ASCAT_V3_Percentage_daily_%d.%02d.%02d.tif' % (Date.year, Date.month, Date.day)) # Define IDs xID = 1800 + np.int16( np.array([np.ceil((lonlim[0]) * 10), np.floor((lonlim[1]) * 10)])) yID = np.int16( np.array([np.floor((-latlim[1]) * 10), np.ceil((-latlim[0]) * 10)])) + 900 # Download the data from FTP server if the file not exists if not os.path.exists(End_filename): try: data = Download_ASCAT_from_VITO(End_filename, output_folder_temp, Date, yID, xID) # make geotiff file geo = [lonlim[0], 0.1, 0, latlim[1], 0, -0.1] DC.Save_as_tiff(name=End_filename, data=data, geo=geo, projection="WGS84") except: print "Was not able to download file with date %s" % Date # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # remove the temporary folder shutil.rmtree(output_folder_temp)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads CHIRPS daily or monthly data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) Waitbar -- 1 (Default) will print a waitbar cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. TimeCase -- String equal to 'daily' or 'monthly' """ # Define timestep for the timedates if TimeCase == 'daily': TimeFreq = 'D' output_folder = os.path.join(Dir, 'Precipitation', 'CHIRPS', 'Daily') elif TimeCase == 'monthly': TimeFreq = 'MS' output_folder = os.path.join(Dir, 'Precipitation', 'CHIRPS', 'Monthly') else: raise KeyError("The input time interval is not supported") # make directory if it not exists if not os.path.exists(output_folder): os.makedirs(output_folder) # check time variables if not Startdate: Startdate = pd.Timestamp('1981-01-01') if not Enddate: Enddate = pd.Timestamp('Now') # Create days Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Check space variables if latlim[0] < -50 or latlim[1] > 50: print ('Latitude above 50N or below 50S is not possible.' ' Value set to maximum') latlim[0] = np.max(latlim[0], -50) latlim[1] = np.min(lonlim[1], 50) if lonlim[0] < -180 or lonlim[1] > 180: print ('Longitude must be between 180E and 180W.' ' Now value is set to maximum') lonlim[0] = np.max(latlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Define IDs yID = 2000 - np.int16(np.array([np.ceil((latlim[1] + 50)*20), np.floor((latlim[0] + 50)*20)])) xID = np.int16(np.array([np.floor((lonlim[0] + 180)*20), np.ceil((lonlim[1] + 180)*20)])) # Pass variables to parallel function and run args = [output_folder, TimeCase, xID, yID, lonlim, latlim] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) return results
def main(Dir, Startdate='', Enddate='', latlim=[-60, 60], lonlim=[-180, 180], pixel_size=False, cores=False, LANDSAF=0, SourceLANDSAF='', Waitbar=1): """ This function downloads TRMM3B43 V7 (monthly) data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print the waitbar """ print 'Create monthly Reference ET data for period %s till %s' % ( Startdate, Enddate) # An array of monthly dates which will be calculated Dates = pd.date_range(Startdate, Enddate, freq='MS') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Calculate the ETref day by day for every month for Date in Dates: # Collect date data Y = Date.year M = Date.month Mday = calendar.monthrange(Y, M)[1] Days = pd.date_range(Date, Date + pd.Timedelta(days=Mday), freq='D') StartTime = Date.strftime('%Y') + '-' + Date.strftime('%m') + '-01' EndTime = Date.strftime('%Y') + '-' + Date.strftime('%m') + '-' + str( Mday) # Get ETref on daily basis daily(Dir=Dir, Startdate=StartTime, Enddate=EndTime, latlim=latlim, lonlim=lonlim, pixel_size=pixel_size, cores=cores, LANDSAF=LANDSAF, SourceLANDSAF=SourceLANDSAF, Waitbar=0) # Load DEM if not pixel_size: nameDEM = 'DEM_HydroShed_m_3s.tif' DEMmap = os.path.join(Dir, 'HydroSHED', 'DEM', nameDEM) else: DEMmap = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_HydroShed_m_reshaped_for_ETref.tif') # Get some geo-data to save results geo_ET, proj, size_X, size_Y = RC.Open_array_info(DEMmap) dataMonth = np.zeros([size_Y, size_X]) for Day in Days[:-1]: DirDay = os.path.join( Dir, 'ETref', 'Daily', 'ETref_mm-day-1_daily_' + Day.strftime('%Y.%m.%d') + '.tif') dataDay = gdal.Open(DirDay) Dval = dataDay.GetRasterBand(1).ReadAsArray().astype(np.float32) Dval[Dval < 0] = 0 dataMonth = dataMonth + Dval dataDay = None # make geotiff file output_folder_month = os.path.join(Dir, 'ETref', 'Monthly') if os.path.exists(output_folder_month) == False: os.makedirs(output_folder_month) DirMonth = os.path.join( output_folder_month, 'ETref_mm-month-1_monthly_' + Date.strftime('%Y.%m.%d') + '.tif') # Create the tiff file DC.Save_as_tiff(DirMonth, dataMonth, geo_ET, proj) # Create Waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads GLEAM ET data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: Enddate = pd.Timestamp('2015-12-31') # Make an array of the days of which the ET is taken YearsDownloadstart = str(Startdate[0:4]) YearsDownloadend = str(Enddate[0:4]) Years = range(int(YearsDownloadstart),int(YearsDownloadend)+1) # String Parameters if TimeCase == 'daily': VarCode = 'ET_GLEAM.V3.1b_mm-day-1_daily' FTPprefix = 'data/v3.1b/' TimeFreq = 'D' Folder_name = 'Daily' elif TimeCase == 'monthly': VarCode = 'ET_GLEAM.V3.1b_mm-month-1_monthly' FTPprefix = 'data/v3.1b/' TimeFreq = 'M' Folder_name = 'Monthly' # Get end of month for Enddate monthDownloadend = str(Enddate[5:7]) End_month = calendar.monthrange(int(YearsDownloadend),int(monthDownloadend))[1] Enddate = '%d-%02d-%d' %(int(YearsDownloadend),int(monthDownloadend),int(End_month)) else: raise KeyError("The input time interval is not supported") Dates = pd.date_range(Startdate, Enddate, freq = TimeFreq) # Make directory for the MODIS ET data output_folder=os.path.join(Dir,'Evaporation', 'GLEAM', Folder_name) if not os.path.exists(output_folder): os.makedirs(output_folder) # Check variables if latlim[0] < -50 or latlim[1] > 50: print ('Latitude above 50N or below 50S is not possible.' ' Value set to maximum') latlim[0] = np.max(latlim[0], -50) latlim[1] = np.min(lonlim[1], 50) if lonlim[0] < -180 or lonlim[1] > 180: print ('Longitude must be between 180E and 180W.' ' Now value is set to maximum') lonlim[0] = np.max(latlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Collect the data from the GLEAM webpage and returns the data and lat and long in meters of those tiles try: Collect_data(FTPprefix, Years, output_folder, Waitbar) except: print "Was not able to download the file" # Create Waitbar print '\nProcess the GLEAM data' if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Pass variables to parallel function and run args = [output_folder, latlim, lonlim, VarCode, TimeCase] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.nc") for f in files: os.remove(os.path.join(output_folder, f)) return(results)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, TimeStep, Waitbar): """ This scripts downloads ALEXI ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one week. The name of the file corresponds to the first day of the week. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' TimeStep -- 'daily' or 'weekly' (by using here monthly, an older dataset will be used) lonlim -- [ymin, ymax] (values must be between -60 and 70) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -60 or latlim[1] > 70: print 'Latitude above 70N or below 60S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -60) latlim[1] = np.min(latlim[1], 70) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: if TimeStep == 'weekly': Startdate = pd.Timestamp('2003-01-01') if TimeStep == 'daily': Startdate = pd.Timestamp('2005-01-01') if not Enddate: if TimeStep == 'weekly': Enddate = pd.Timestamp('2015-12-31') if TimeStep == 'daily': Enddate = pd.Timestamp('2016-12-31') # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate if TimeStep == 'weekly': # Define the Startdate of ALEXI DOY = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_yday Year = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_year # Change the startdate so it includes an ALEXI date DOYstart = int(math.ceil(DOY / 7.0) * 7 + 1) DOYstart = str('%s-%s' % (DOYstart, Year)) Day = datetime.datetime.strptime(DOYstart, '%j-%Y') Month = '%02d' % Day.month Day = '%02d' % Day.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) DOY = datetime.datetime.strptime(Date, '%Y-%m-%d').timetuple().tm_yday # The new Startdate Date = pd.Timestamp(Date) # amount of Dates weekly Dates = pd.date_range(Date, Enddate, freq='7D') # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'ALEXI', 'Weekly') if not os.path.exists(output_folder): os.makedirs(output_folder) if TimeStep == 'daily': # Define Dates Dates = pd.date_range(Startdate, Enddate, freq='D') # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'ALEXI', 'Daily') if not os.path.exists(output_folder): os.makedirs(output_folder) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) if TimeStep == 'weekly': ALEXI_weekly(Date, Enddate, output_folder, latlim, lonlim, Year, Waitbar, total_amount, TimeStep) if TimeStep == 'daily': ALEXI_daily(Dates, output_folder, latlim, lonlim, Waitbar, total_amount, TimeStep)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, TimeStep, Waitbar): """ This scripts downloads ALEXI ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one week. The name of the file corresponds to the first day of the week. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' TimeStep -- 'daily' or 'weekly' (by using here monthly, an older dataset will be used) lonlim -- [ymin, ymax] (values must be between -60 and 70) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -60 or latlim[1] > 70: print 'Latitude above 70N or below 60S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0],-60) latlim[1] = np.min(latlim[1],70) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180) # Check Startdate and Enddate if not Startdate: if TimeStep == 'weekly': Startdate = pd.Timestamp('2003-01-01') if TimeStep == 'daily': Startdate = pd.Timestamp('2005-01-01') if not Enddate: if TimeStep == 'weekly': Enddate = pd.Timestamp('2015-12-31') if TimeStep == 'daily': Enddate = pd.Timestamp('2016-12-31') # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate if TimeStep == 'weekly': # Define the Startdate of ALEXI DOY = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_yday Year = datetime.datetime.strptime(Startdate, '%Y-%m-%d').timetuple().tm_year # Change the startdate so it includes an ALEXI date DOYstart = int(math.ceil(DOY/7.0)*7+1) DOYstart = str('%s-%s' %(DOYstart, Year)) Day = datetime.datetime.strptime(DOYstart, '%j-%Y') Month = '%02d' % Day.month Day = '%02d' % Day.day Date = (str(Year) + '-' + str(Month) + '-' + str(Day)) DOY = datetime.datetime.strptime(Date, '%Y-%m-%d').timetuple().tm_yday # The new Startdate Date = pd.Timestamp(Date) # amount of Dates weekly Dates = pd.date_range(Date, Enddate, freq = '7D') # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'ALEXI', 'Weekly') if not os.path.exists(output_folder): os.makedirs(output_folder) if TimeStep == 'daily': # Define Dates Dates = pd.date_range(Startdate, Enddate, freq = 'D') # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'ALEXI', 'Daily') if not os.path.exists(output_folder): os.makedirs(output_folder) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) if TimeStep == 'weekly': ALEXI_weekly(Date, Enddate, output_folder, latlim, lonlim, Year, Waitbar, total_amount, TimeStep) if TimeStep == 'daily': ALEXI_daily(Dates, output_folder, latlim, lonlim, Waitbar, total_amount, TimeStep)
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase): """ This function downloads TRMM daily or monthly data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. TimeCase -- String equal to 'daily' or 'monthly' Waitbar -- 1 (Default) will print a waitbar """ # String Parameters if TimeCase == 'daily': TimeFreq = 'D' output_folder = os.path.join(Dir, 'Precipitation', 'TRMM', 'Daily') elif TimeCase == 'monthly': TimeFreq = 'MS' output_folder = os.path.join(Dir, 'Precipitation', 'TRMM', 'Monthly') else: raise KeyError("The input time interval is not supported") # Make directory if not os.path.exists(output_folder): os.makedirs(output_folder) # Check variables if not Startdate: Startdate = pd.Timestamp('1998-01-01') if not Enddate: Enddate = pd.Timestamp('Now') Dates = pd.date_range(Startdate, Enddate, freq=TimeFreq) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) if latlim[0] < -50 or latlim[1] > 50: print('Latitude above 50N or below 50S is not possible.' ' Value set to maximum') latlim[0] = np.max(latlim[0], -50) latlim[1] = np.min(lonlim[1], 50) if lonlim[0] < -180 or lonlim[1] > 180: print('Longitude must be between 180E and 180W.' ' Now value is set to maximum') lonlim[0] = np.max(latlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Define IDs yID = np.int16( np.array( [np.ceil((latlim[0] + 50) * 4), np.floor((latlim[1] + 50) * 4)])) xID = np.int16( np.array([np.floor((lonlim[0]) * 4), np.ceil((lonlim[1]) * 4)]) + 720) # Pass variables to parallel function and run args = [output_folder, TimeCase, xID, yID, lonlim, latlim] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores): """ This function downloads RFE daily or monthly data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. TimeCase -- String equal to 'daily' or 'monthly' Waitbar -- 1 (Default) will print a waitbar """ # Check variables if not Startdate: Startdate = pd.Timestamp('2001-01-01') if not Enddate: Enddate = pd.Timestamp('Now') Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) if latlim[0] < -40.05 or latlim[1] > 40.05: print ('Latitude above 50N or below 50S is not possible.' ' Value set to maximum') latlim[0] = np.max(latlim[0], -40.05) latlim[1] = np.min(lonlim[1], 40.05) if lonlim[0] < -20.05 or lonlim[1] > 55.05: print ('Longitude must be between 180E and 180W.' ' Now value is set to maximum') lonlim[0] = np.max(latlim[0], -20.05) lonlim[1] = np.min(lonlim[1], 55.05) # Make directory output_folder = os.path.join(Dir, 'Precipitation', 'RFE', 'Daily/') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define IDs yID = 801 - np.int16(np.array([np.ceil((latlim[1] + 40.05)*10), np.floor((latlim[0] + 40.05)*10)-1])) xID = np.int16(np.array([np.floor((lonlim[0] + 20.05)*10), np.ceil((lonlim[1] + 20.05)*10)+1])) # Pass variables to parallel function and run args = [output_folder, lonlim, latlim, xID, yID] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): """ This scripts downloads CMRSET ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one month. The name of the file corresponds to the first day of the month. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below -90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0],-180) lonlim[1] = np.min(lonlim[1],180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2000-01-01') if not Enddate: Enddate = pd.Timestamp('2012-12-31') # Creates dates library Dates = pd.date_range(Startdate, Enddate, freq = "MS") # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'CMRSET', 'Monthly') if not os.path.exists(output_folder): os.makedirs(output_folder) for Date in Dates: # Define year and month year = Date.year month = Date.month # Date as printed in filename Filename_out= os.path.join(output_folder,'ETa_CMRSET_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define end filename Filename_in = os.path.join("M01CMRSETGlobalY%dM%02d.tif" %(year, month)) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_in) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: Download_CMRSET_from_WA_FTP(local_filename, Filename_in) # Clip dataset RC.Clip_Dataset_GDAL(local_filename, Filename_out, latlim, lonlim) os.remove(local_filename) except: print "Was not able to download file with date %s" %Date # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) return
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, hdf_library, remove_hdf): """ This function downloads MOD10 8-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. nameDownload -- The name of the subset that must be download can be Fpar_500m or Lai_500m Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date to max if not Startdate: Startdate = pd.Timestamp('2000-02-18') if not Enddate: Enddate = pd.Timestamp('Now') # Make an array of the days of which the FPAR is taken Dates = Make_TimeStamps(Startdate,Enddate) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the MODIS FPAR data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'MOD10') if not os.path.exists(output_folder): os.makedirs(output_folder) # Download list (txt file on the internet) which includes the lat and lon information of the MODIS tiles nameDownloadtext = 'https://modis-land.gsfc.nasa.gov/pdf/sn_gring_10deg.txt' file_nametext = os.path.join(output_folder, nameDownloadtext.split('/')[-1]) try: try: urllib.urlretrieve(nameDownloadtext, file_nametext) except: data = urllib2.urlopen(nameDownloadtext).read() with open(file_nametext, "wb") as fp: fp.write(data) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) with open(file_nametext, "wb") as fp: data = requests.get(nameDownloadtext, verify=False) fp.write(data.content) # Open text file with tiles which is downloaded before tiletext=np.genfromtxt(file_nametext,skip_header=7,skip_footer=1,usecols=(0,1,2,3,4,5,6,7,8,9)) tiletext2=tiletext[tiletext[:,2]>=-900,:] # This function converts the values in the text file into horizontal and vertical number of the tiles which must be downloaded to cover the extent defined by the user TilesVertical, TilesHorizontal = Tiles_to_download(tiletext2=tiletext2,lonlim1=lonlim,latlim1=latlim) # Pass variables to parallel function and run args = [output_folder, TilesVertical, TilesHorizontal,lonlim, latlim, hdf_library] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files files = glob.glob("*.txt") for f in files: os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, hdf_library, remove_hdf): """ This function downloads MOD17 8-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date to max if not Startdate: Startdate = pd.Timestamp('2000-02-18') if not Enddate: Enddate = pd.Timestamp('Now') # Make an array of the days of which the GPP is taken Dates = Make_TimeStamps(Startdate, Enddate) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the MODIS GPP data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'GPP', 'MOD17') if not os.path.exists(output_folder): os.makedirs(output_folder) # Define which MODIS tiles are required TilesVertical, TilesHorizontal = wa.Collect.MOD15.DataAccess.Get_tiles_from_txt( output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [ output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, hdf_library ] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files files = glob.glob("*.txt") for f in files: os.remove(os.path.join(output_folder, f)) return results
def main(Dir, Startdate='', Enddate='', latlim=[-50, 50], lonlim=[-180, 180], cores=False, Waitbar=1): """ This function downloads RFE V2.0 (monthly) data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Download data print '\nDownload monthly RFE precipitation data for period %s till %s' % ( Startdate, Enddate) # Check variables if not Startdate: Startdate = pd.Timestamp('2001-01-01') if not Enddate: Enddate = pd.Timestamp('Now') Dates = pd.date_range(Startdate, Enddate, freq='MS') # Make directory output_folder = os.path.join(Dir, 'Precipitation', 'RFE', 'Monthly/') if not os.path.exists(output_folder): os.makedirs(output_folder) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) for Date in Dates: month = Date.month year = Date.year end_day = calendar.monthrange(year, month)[1] Startdate_one_month = '%s-%02s-01' % (year, month) Enddate_one_month = '%s-%02s-%02s' % (year, month, end_day) DownloadData(Dir, Startdate_one_month, Enddate_one_month, latlim, lonlim, 0, cores) Dates_daily = pd.date_range(Startdate_one_month, Enddate_one_month, freq='D') # Make directory input_folder_daily = os.path.join(Dir, 'Precipitation', 'RFE', 'Daily/') i = 0 for Date_daily in Dates_daily: file_name = 'P_RFE.v2.0_mm-day-1_daily_%s.%02s.%02s.tif' % ( Date_daily.strftime('%Y'), Date_daily.strftime('%m'), Date_daily.strftime('%d')) file_name_daily_path = os.path.join(input_folder_daily, file_name) if os.path.exists(file_name_daily_path): if Date_daily == Dates_daily[i]: Raster_monthly = RC.Open_tiff_array(file_name_daily_path) else: Raster_monthly += RC.Open_tiff_array(file_name_daily_path) else: if Date_daily == Dates_daily[i]: i += 1 geo_out, proj, size_X, size_Y = RC.Open_array_info( file_name_daily_path) file_name = 'P_RFE.v2.0_mm-month-1_monthly_%s.%02s.01.tif' % ( Date.strftime('%Y'), Date.strftime('%m')) file_name_output = os.path.join(output_folder, file_name) DC.Save_as_tiff(file_name_output, Raster_monthly, geo_out, projection="WGS84") if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50)
def DownloadData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase, CaseParameters): """ This function downloads ECMWF six-hourly, daily or monthly data Keyword arguments: """ # correct latitude and longitude limits latlim_corr_one = np.floor(latlim[0]/0.125) * 0.125 latlim_corr_two = np.ceil(latlim[1]/0.125) * 0.125 latlim_corr = [latlim_corr_one, latlim_corr_two] # correct latitude and longitude limits lonlim_corr_one = np.floor(lonlim[0]/0.125) * 0.125 lonlim_corr_two = np.ceil(lonlim[1]/0.125) * 0.125 lonlim_corr = [lonlim_corr_one, lonlim_corr_two] # Load factors / unit / type of variables / accounts VarInfo = VariablesInfo(TimeCase) Varname_dir = VarInfo.file_name[Var] # Create Out directory out_dir = os.path.join(Dir, "Weather_Data", "Model", "ECMWF", TimeCase, Varname_dir, "mean") if not os.path.exists(out_dir): os.makedirs(out_dir) DownloadType = VarInfo.DownloadType[Var] # Set required data for the three hourly option if TimeCase == 'six_hourly': string1 = 'oper' # Set required data for the daily option elif TimeCase == 'daily': Dates = pd.date_range(Startdate, Enddate, freq='D') elif TimeCase == 'monthly': Dates = pd.date_range(Startdate, Enddate, freq='MS') if DownloadType == 1: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'sfc' string8 = 'an' if DownloadType == 2: string1 = 'oper' string4 = "12" string6 = "00:00:00/12:00:00" string2 = 'sfc' string8 = 'fc' if DownloadType == 3: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'pl' string8 = 'an' string7 = '%s/to/%s' %(Startdate, Enddate) parameter_number = VarInfo.number_para[Var] string3 = '%03d.128' %(parameter_number) string5 = '0.125/0.125' string9 = 'ei' string10 = '%s/%s/%s/%s' %(latlim_corr[1], lonlim_corr[0], latlim_corr[0], lonlim_corr[1]) #N, W, S, E # Download data by using the ECMWF API import wa.Collect.ECMWF.ECMWFdownload as Download print 'Use API ECMWF to collect the data, please wait' Download.API(Dir, DownloadType, string1, string2, string3, string4, string5, string6, string7, string8, string9, string10) # Open the downloaded data NC_filename = os.path.join(Dir,'data_interim.nc') fh = Dataset(NC_filename, mode='r') # Get the NC variable parameter parameter_var = VarInfo.var_name[Var] Var_unit = VarInfo.units[Var] factors_add = VarInfo.factors_add[Var] factors_mul = VarInfo.factors_mul[Var] # Open the NC data Data = fh.variables[parameter_var][:] Data_time = fh.variables['time'][:] lons = fh.variables['longitude'][:] lats = fh.variables['latitude'][:] # Define the georeference information Geo_four = np.nanmax(lats) Geo_one = np.nanmin(lons) Geo_out = tuple([Geo_one, 0.125, 0.0, Geo_four, 0.0, -0.125]) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) for date in Dates: # Define the year, month and day year = date.year month = date.month day = date.day # Hours since 1900-01-01 start = datetime.datetime(year=1900, month=1, day=1) end = datetime.datetime(year, month, day) diff = end - start hours_from_start_begin = diff.total_seconds()/60/60 Date_good = np.zeros(len(Data_time)) if TimeCase == 'daily': days_later = 1 if TimeCase == 'monthly': days_later = calendar.monthrange(year,month)[1] Date_good[np.logical_and(Data_time>=hours_from_start_begin, Data_time<(hours_from_start_begin + 24 * days_later))] = 1 Data_one = np.zeros([int(np.sum(Date_good)),int(np.size(Data,1)),int(np.size(Data,2))]) Data_one = Data[np.int_(Date_good) == 1, :, :] # Calculate the average temperature in celcius degrees Data_end = factors_mul * np.nanmean(Data_one,0) + factors_add if VarInfo.types[Var] == 'flux': Data_end = Data_end * days_later VarOutputname = VarInfo.file_name[Var] # Define the out name name_out = os.path.join(out_dir, "%s_ECMWF_ERA-Interim_%s_%s_%d.%02d.%02d.tif" %(VarOutputname, Var_unit, TimeCase, year,month,day)) # Create Tiff files DC.Save_as_tiff(name_out, Data_end, Geo_out, "WGS84") if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) fh.close() return()
def CollectData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores, Version): """ This function collects daily CFSR data in geotiff format Keyword arguments: Dir -- 'C:/file/to/path/' Var -- 'dlwsfc','dswsfc','ulwsfc', or 'uswsfc' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) Waitbar -- 1 (Default) will print a wait bar cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Version -- 1 or 2 (1 = CFSR, 2 = CFSRv2) """ # Creates an array of the days of which the ET is taken Dates = pd.date_range(Startdate,Enddate,freq = 'D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # For collecting CFSR data if Version == 1: # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -89.9171038899 or latlim[1] > 89.9171038899: print('Latitude above 89.917N or below 89.917S is not possible. Value set to maximum') latlim[0] = np.maximum(latlim[0],-89.9171038899) latlim[1] = np.minimum(latlim[1],89.9171038899) if lonlim[0] < -180 or lonlim[1] > 179.843249782: print('Longitude must be between 179.84E and 179.84W. Now value is set to maximum') lonlim[0] = np.maximum(lonlim[0],-180) lonlim[1] = np.minimum(lonlim[1],179.843249782) # Make directory for the CFSR data output_folder=os.path.join(Dir,'Radiation','CFSR') if not os.path.exists(output_folder): os.makedirs(output_folder) # For collecting CFSRv2 data if Version == 2: # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -89.9462116040955806 or latlim[1] > 89.9462116040955806: print('Latitude above 89.917N or below 89.946S is not possible. Value set to maximum') latlim[0] = np.maximum(latlim[0],-89.9462116040955806) latlim[1] = np.minimum(latlim[1],89.9462116040955806) if lonlim[0] < -180 or lonlim[1] > 179.8977275: print('Longitude must be between 179.90E and 179.90W. Now value is set to maximum') lonlim[0] = np.maximum(lonlim[0],-180) lonlim[1] = np.minimum(lonlim[1],179.8977275) # Make directory for the CFSRv2 data output_folder=os.path.join(Dir,'Radiation','CFSRv2') if not os.path.exists(output_folder): os.makedirs(output_folder) # Pass variables to parallel function and run args = [output_folder, latlim, lonlim, Var, Version] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) # Remove all .nc and .grb2 files for f in os.listdir(output_folder): if re.search(".nc", f): os.remove(os.path.join(output_folder, f)) for f in os.listdir(output_folder): if re.search(".grb2", f): os.remove(os.path.join(output_folder, f)) for f in os.listdir(output_folder): if re.search(".grib2", f): os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): """ This scripts downloads CMRSET ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one month. The name of the file corresponds to the first day of the month. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below -90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2000-01-01') if not Enddate: Enddate = pd.Timestamp('2012-12-31') # Creates dates library Dates = pd.date_range(Startdate, Enddate, freq="MS") # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'CMRSET', 'Monthly') if not os.path.exists(output_folder): os.makedirs(output_folder) for Date in Dates: # Define year and month year = Date.year month = Date.month # Date as printed in filename Filename_out = os.path.join( output_folder, 'ETa_CMRSET_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define end filename Filename_in = os.path.join("M01CMRSETGlobalY%dM%02d.tif" % (year, month)) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_in) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: Download_CMRSET_from_WA_FTP(local_filename, Filename_in) # Clip dataset RC.Clip_Dataset_GDAL(local_filename, Filename_out, latlim, lonlim) os.remove(local_filename) except: print "Was not able to download file with date %s" % Date # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) return
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, timestep, Waitbar, cores, hdf_library, remove_hdf): """ This function downloads MOD13 16-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date if not Startdate: Startdate = pd.Timestamp('2000-01-01') if not Enddate: Enddate = pd.Timestamp('2014-12-31') # Make an array of the days of which the ET is taken if timestep == 'monthly': Dates = pd.date_range(Startdate,Enddate,freq = 'M') TIMESTEP = 'Monthly' elif timestep == '8-daily': Dates = Make_TimeStamps(Startdate,Enddate) TIMESTEP = '8_Daily' # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Make directory for the MODIS ET data output_folder=os.path.join(Dir,'Evaporation','MOD16', TIMESTEP) if not os.path.exists(output_folder): os.makedirs(output_folder) TilesVertical, TilesHorizontal = wa.Collect.MOD15.DataAccess.Get_tiles_from_txt(output_folder, hdf_library, latlim, lonlim) # Pass variables to parallel function and run args = [output_folder, TilesVertical, TilesHorizontal,latlim, lonlim, timestep, hdf_library] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files files = glob.glob("*.txt") for f in files: os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, latlim, lonlim, Waitbar): """ This function downloads JRC data Keyword arguments: Dir -- 'C:/file/to/path/' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) Waitbar -- 1 (Default) will print a waitbar """ # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print( 'Latitude above 90N or below 90S is not possible. Value set to maximum' ) latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print( 'Longitude must be between 180E and 180W. Now value is set to maximum' ) lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the JRC water occurrence data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'JRC', 'Occurrence') if not os.path.exists(output_folder): os.makedirs(output_folder) fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif') if not os.path.exists(fileName_out): # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = 1 amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # This function defines the name of dataset that needs to be collected Names_to_download = Tiles_to_download(lonlim, latlim) # Pass variables to parallel function and run args = [output_folder, Names_to_download, lonlim, latlim] RetrieveData(args) if Waitbar == 1: amount = 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) else: print('JRC water occurrence map already exists') return ()
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, version): """ This scripts downloads SSEBop ET data from the UNESCO-IHE ftp server. The output files display the total ET in mm for a period of one month. The name of the file corresponds to the first day of the month. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] (values must be between -90 and 90) latlim -- [xmin, xmax] (values must be between -180 and 180) """ if version == "FTP": # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -59.2 or latlim[1] > 80: print 'Latitude above 80N or below -59.2S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -59.2) latlim[1] = np.min(latlim[1], 80) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: Enddate = pd.Timestamp('2014-10-31') if version == "V4": # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -60 or latlim[1] > 80.0022588483988670: print 'Latitude above 80N or below -59.2S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -60) latlim[1] = np.min(latlim[1], 80.0022588483988670) if lonlim[0] < -180 or lonlim[1] > 180.0002930387853439: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180.0002930387853439) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2003-01-01') if not Enddate: import datetime Enddate = pd.Timestamp(datetime.datetime.now()) # Creates dates library Dates = pd.date_range(Startdate, Enddate, freq="MS") # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'Evaporation', 'SSEBop', 'Monthly') if not os.path.exists(output_folder): os.makedirs(output_folder) for Date in Dates: # Define year and month year = Date.year month = Date.month if version == "FTP": # Date as printed in filename Filename_out = os.path.join( output_folder, 'ETa_SSEBop_FTP_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define end filename Filename_dir = os.path.join("%s" % year, "m%s%02d.tif" % (str(year)[2:], month)) Filename_only = "m%s%02d.tif" % (str(year)[2:], month) if version == "V4": # Date as printed in filename Filename_out = os.path.join( output_folder, 'ETa_SSEBop_V4_mm-month-1_monthly_%s.%02s.%02s.tif' % (Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) # Define the downloaded zip file Filename_only_zip = "m%s%02d.zip" % (str(year), month) # The end file name after downloading and unzipping Filename_only = "m%s%02d_modisSSEBopETv4_actual_mm.tif" % ( str(year), month) # Temporary filename for the downloaded global file local_filename = os.path.join(output_folder, Filename_only) # Download the data from FTP server if the file not exists if not os.path.exists(Filename_out): try: if version == "FTP": Download_SSEBop_from_WA_FTP(local_filename, Filename_dir) if version == "V4": Download_SSEBop_from_Web(output_folder, Filename_only_zip) # Clip dataset RC.Clip_Dataset_GDAL(local_filename, Filename_out, latlim, lonlim) os.remove(local_filename) except: print "Was not able to download file with date %s" % Date # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) if version == "V4": import glob os.chdir(output_folder) zipfiles = glob.glob("*.zip") for zipfile in zipfiles: os.remove(os.path.join(output_folder, zipfile)) xmlfiles = glob.glob("*.xml") for xmlfile in xmlfiles: os.remove(os.path.join(output_folder, xmlfile)) return
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, cores, hdf_library, remove_hdf): """ This function downloads MOD10 8-daily data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -90 and 90) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. nameDownload -- The name of the subset that must be download can be Fpar_500m or Lai_500m Waitbar -- 1 (Default) will print a waitbar """ # Check start and end date and otherwise set the date to max if not Startdate: Startdate = pd.Timestamp('2000-02-18') if not Enddate: Enddate = pd.Timestamp('Now') # Make an array of the days of which the FPAR is taken Dates = Make_TimeStamps(Startdate, Enddate) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Check the latitude and longitude and otherwise set lat or lon on greatest extent if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible. Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W. Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Make directory for the MODIS FPAR data Dir = Dir.replace("/", os.sep) output_folder = os.path.join(Dir, 'MOD10') if not os.path.exists(output_folder): os.makedirs(output_folder) # Download list (txt file on the internet) which includes the lat and lon information of the MODIS tiles nameDownloadtext = 'https://modis-land.gsfc.nasa.gov/pdf/sn_gring_10deg.txt' file_nametext = os.path.join(output_folder, nameDownloadtext.split('/')[-1]) try: try: urllib.urlretrieve(nameDownloadtext, file_nametext) except: data = urllib2.urlopen(nameDownloadtext).read() with open(file_nametext, "wb") as fp: fp.write(data) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) with open(file_nametext, "wb") as fp: data = requests.get(nameDownloadtext, verify=False) fp.write(data.content) # Open text file with tiles which is downloaded before tiletext = np.genfromtxt(file_nametext, skip_header=7, skip_footer=1, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)) tiletext2 = tiletext[tiletext[:, 2] >= -900, :] # This function converts the values in the text file into horizontal and vertical number of the tiles which must be downloaded to cover the extent defined by the user TilesVertical, TilesHorizontal = Tiles_to_download(tiletext2=tiletext2, lonlim1=lonlim, latlim1=latlim) # Pass variables to parallel function and run args = [ output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, hdf_library ] if not cores: for Date in Dates: RetrieveData(Date, args) if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) results = True else: results = Parallel(n_jobs=cores)(delayed(RetrieveData)(Date, args) for Date in Dates) if remove_hdf == 1: # Remove all .hdf files os.chdir(output_folder) files = glob.glob("*.hdf") for f in files: os.remove(os.path.join(output_folder, f)) # Remove all .txt files files = glob.glob("*.txt") for f in files: os.remove(os.path.join(output_folder, f)) return results
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): # Create an array with the dates that will be calculated Dates = pd.date_range(Startdate, Enddate, freq = 'MS') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) # Define the minimum and maximum lat and long ETensemble Tile Min_lat_tile = int(np.floor((100 - latlim[1])/10)) Max_lat_tile = int(np.floor((100 - latlim[0]-0.00125)/10)) Min_lon_tile = int(np.floor((190 + lonlim[0])/10)) Max_lon_tile = int(np.floor((190 + lonlim[1]-0.00125)/10)) # Create the Lat and Lon tiles that will be downloaded Lat_tiles = [Min_lat_tile, Max_lat_tile] Lon_tiles = [Min_lon_tile, Max_lon_tile] # Define output folder and create this if it not exists output_folder = os.path.join(Dir, 'Evaporation', 'ETensV1_0') if not os.path.exists(output_folder): os.makedirs(output_folder) # Create Geotransform of the output files GEO_1 = lonlim[0] GEO_2 = 0.0025 GEO_3 = 0.0 GEO_4 = latlim[1] GEO_5 = 0.0 GEO_6 = -0.0025 geo = [GEO_1, GEO_2, GEO_3, GEO_4, GEO_5, GEO_6] geo_new=tuple(geo) # Define the parameter for downloading the data Downloaded = 0 # Calculate the ET data date by date for Date in Dates: # Define the output name and folder file_name = 'ET_ETensemble250m_mm-month-1_monthly_%d.%02d.01.tif' %(Date.year,Date.month) output_file = os.path.join(output_folder, file_name) # If output file not exists create this if not os.path.exists(output_file): # If not downloaded than download if Downloaded == 0: # Download the ETens data from the FTP server Download_ETens_from_WA_FTP(output_folder, Lat_tiles, Lon_tiles) # Unzip the folder Unzip_ETens_data(output_folder, Lat_tiles, Lon_tiles) Downloaded = 1 # Create the ET data for the area of interest ET_data = Collect_dataset(output_folder, Date, Lat_tiles, Lon_tiles, latlim, lonlim) # Save this array as a tiff file DC.Save_as_tiff(output_file, ET_data, geo_new, projection='WGS84') # Create Waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) ''' # Remove all the raw dataset for v_tile in range(Lat_tiles[0], Lat_tiles[1]+1): for h_tile in range(Lon_tiles[0], Lon_tiles[1]+1): Tilename = "h%sv%s" %(h_tile, v_tile) filename = os.path.join(output_folder, Tilename) if os.path.exists(filename): shutil.rmtree(filename) # Remove all .zip files for f in os.listdir(output_folder): if re.search(".zip", f): os.remove(os.path.join(output_folder, f)) ''' return()