def livestock_feed(output_folder, lu_fh, ndm_fhs, feed_dict, live_feed, cattle_fh, fraction_fhs, ndmdates): """ Calculate natural livestock feed production INPUTS ---------- lu_fh : str filehandle for land use map ndm_fhs: nd array array of filehandles of NDM maps ndm_dates: nd array array of dates for NDM maps feed_dict: dict dictionnary 'pasture class':[list of LULC] feed_pct: dict dictionnary 'pasture class':[percent available as feed] cattle_fh : str filehandle for cattle map """ Data_Path_Feed = "Feed" out_folder = os.path.join(output_folder, Data_Path_Feed) if not os.path.exists(out_folder): os.mkdir(out_folder) area_ha = becgis.MapPixelAreakm(lu_fh) * 100 LULC = RC.Open_tiff_array(lu_fh) # cattle = RC.Open_tiff_array(cattle_fh) geo_out, proj, size_X, size_Y = RC.Open_array_info(lu_fh) f_pct = np.zeros(LULC.shape) for lu_type in feed_dict.keys(): classes = feed_dict[lu_type] mask = np.logical_or.reduce([LULC == value for value in classes]) f_pct[mask] = live_feed[lu_type] feed_fhs_landscape = [] feed_fhs_incremental = [] for d in range(len(ndm_fhs)): ndm_fh = ndm_fhs[d] fraction_fh = fraction_fhs[d] date1 = ndmdates[d] year = '%d' %date1.year month = '%02d' %date1.month yield_fract = RC.Open_tiff_array(fraction_fh) out_fh_l = out_folder+'\\feed_prod_landscape_%s_%s.tif' %(year, month) out_fh_i = out_folder+'\\feed_prod_incremental_%s_%s.tif' %(year, month) # out_fh2 = out_folder+'\\Feed_prod_pH_%s_%s.tif' %(year, month) NDM = becgis.OpenAsArray(ndm_fh, nan_values=True) NDM_feed = NDM * f_pct NDM_feed_incremental = NDM_feed * yield_fract * area_ha/1e6 NDM_feed_landscape = (NDM_feed *(1-yield_fract)) * area_ha/1e6 DC.Save_as_tiff(out_fh_l, NDM_feed_landscape, geo_out) DC.Save_as_tiff(out_fh_i, NDM_feed_incremental, geo_out) # NDM_feed_perHead = NDM_feed / cattle # DC.Save_as_tiff(out_fh2, NDM_feed, geo_out) feed_fhs_landscape.append(out_fh_l) feed_fhs_incremental.append(out_fh_i) return feed_fhs_landscape, feed_fhs_incremental
def fuel_wood(output_folder, lu_fh, ndm_fhs, fraction_fhs, ndmdates): """ Calculate natural livestock feed production INPUTS ---------- lu_fh : str filehandle for land use map ndm_fhs: nd array array of filehandles of NDM maps abv_grnd_biomass_ratio: dict dictionnary 'LULC':[above ground biomass] """ Data_Path_Fuel = "Fuel" out_folder = os.path.join(output_folder, Data_Path_Fuel) if not os.path.exists(out_folder): os.mkdir(out_folder) area_ha = becgis.MapPixelAreakm(lu_fh) * 100 LULC = RC.Open_tiff_array(lu_fh) geo_out, proj, size_X, size_Y = RC.Open_array_info(lu_fh) fuel_classes = [1, 8, 9, 10, 11, 12, 13] fuel_mask = np.zeros(LULC.shape) for fc in fuel_classes: fuel_mask[np.where(LULC == fc)] = 1 fuel_fhs_landscape = [] fuel_fhs_incremental = [] for d in range(len(ndm_fhs)): ndm_fh = ndm_fhs[d] fraction_fh = fraction_fhs[d] yield_fract = RC.Open_tiff_array(fraction_fh) date1 = ndmdates[d] year = '%d' %date1.year month = '%02d' %date1.month # year = ndm_fh[-14:-10] # month = ndm_fh[-9:-7] out_fh_l = out_folder+'\\fuel_prod_landscape_%s_%s.tif' %(year, month) out_fh_i = out_folder+'\\fuel_prod_incremental_%s_%s.tif' %(year, month) NDM = becgis.OpenAsArray(ndm_fh, nan_values=True) NDM_fuel_incremental = NDM * .05 * fuel_mask * yield_fract * area_ha/1e6 NDM_fuel_landscape = NDM * .05 * fuel_mask *(1-yield_fract) * area_ha/1e6 DC.Save_as_tiff(out_fh_i, NDM_fuel_incremental, geo_out) DC.Save_as_tiff(out_fh_l, NDM_fuel_landscape, geo_out) fuel_fhs_landscape.append(out_fh_l) fuel_fhs_incremental.append(out_fh_i) return fuel_fhs_landscape, fuel_fhs_incremental
def Clip_Dataset(local_filename, Filename_out, latlim, lonlim): import wa.General.raster_conversions as RC # Open Dataset HiHydroSoil_Array = RC.Open_tiff_array(local_filename) # Define area XID = [ int(np.floor((180 + lonlim[0]) / 0.00833333)), int(np.ceil((180 + lonlim[1]) / 0.00833333)) ] YID = [ int(np.ceil((90 - latlim[1]) / 0.00833333)), int(np.floor((90 - latlim[0]) / 0.00833333)) ] # Define Georeference geo = tuple([ -180 + 0.00833333 * XID[0], 0.00833333, 0, 90 - 0.00833333 * YID[0], 0, -0.00833333 ]) # Clip Array HiHydroSoil_Array_clipped = HiHydroSoil_Array[YID[0]:YID[1], XID[0]:XID[1]] # Save tiff file DC.Save_as_tiff(Filename_out, HiHydroSoil_Array_clipped, geo, "WGS84")
def Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID, TimeStep): """ This function retrieves ALEXI data for a given date from the ftp.wateraccounting.unesco-ihe.org server. Restrictions: The data and this python file may not be distributed to others without permission of the WA+ team due data restriction of the ALEXI developers. Keyword arguments: local_filename -- name of the temporary file which contains global ALEXI data DirFile -- name of the end file with the weekly ALEXI data filename -- name of the end file lonlim -- [ymin, ymax] (values must be between -60 and 70) latlim -- [xmin, xmax] (values must be between -180 and 180) """ # Collect account and FTP information username, password = WebAccounts.Accounts(Type='FTP_WA') ftpserver = "ftp.wateraccounting.unesco-ihe.org" # Download data from FTP ftp = FTP(ftpserver) ftp.login(username, password) if TimeStep is "weekly": directory = "/WaterAccounting/Data_Satellite/Evaporation/ALEXI/World/" if TimeStep is "daily": directory = "/WaterAccounting/Data_Satellite/Evaporation/ALEXI/World_05182018/" ftp.cwd(directory) lf = open(local_filename, "wb") ftp.retrbinary("RETR " + filename, lf.write) lf.close() if TimeStep is "weekly": # Open global ALEXI data dataset = RC.Open_tiff_array(local_filename) # Clip extend out of world data data = dataset[yID[0]:yID[1], xID[0]:xID[1]] data[data < 0] = -9999 if TimeStep is "daily": DC.Extract_Data_gz(local_filename, os.path.splitext(local_filename)[0]) raw_data = np.fromfile(os.path.splitext(local_filename)[0], dtype="<f4") dataset = np.flipud(np.resize(raw_data, [3000, 7200])) data = dataset[yID[0]:yID[1], xID[0]:xID[1]] data[data < 0] = -9999 # make geotiff file geo = [lonlim[0], 0.05, 0, latlim[1], 0, -0.05] DC.Save_as_tiff(name=DirFile, data=data, geo=geo, projection="WGS84") return
def gap_filling(dataset, NoDataValue, method=1): """ This function fills the no data gaps in a numpy array Keyword arguments: dataset -- 'C:/' path to the source data (dataset that must be filled) NoDataValue -- Value that must be filled """ import wa.General.data_conversions as DC try: if dataset.split('.')[-1] == 'tif': # Open the numpy array data = Open_tiff_array(dataset) Save_as_tiff = 1 else: data = dataset Save_as_tiff = 0 except: data = dataset Save_as_tiff = 0 # fill the no data values if NoDataValue is np.nan: mask = ~(np.isnan(data)) else: mask = ~(data == NoDataValue) xx, yy = np.meshgrid(np.arange(data.shape[1]), np.arange(data.shape[0])) xym = np.vstack((np.ravel(xx[mask]), np.ravel(yy[mask]))).T data0 = np.ravel(data[:, :][mask]) if method == 1: interp0 = scipy.interpolate.NearestNDInterpolator(xym, data0) data_end = interp0(np.ravel(xx), np.ravel(yy)).reshape(xx.shape) if method == 2: interp0 = scipy.interpolate.LinearNDInterpolator(xym, data0) data_end = interp0(np.ravel(xx), np.ravel(yy)).reshape(xx.shape) if Save_as_tiff == 1: EndProduct = dataset[:-4] + '_GF.tif' # collect the geoinformation geo_out, proj, size_X, size_Y = Open_array_info(dataset) # Save the filled array as geotiff DC.Save_as_tiff(name=EndProduct, data=data_end, geo=geo_out, projection=proj) else: EndProduct = data_end return (EndProduct)
def Download_ALEXI_from_WA_FTP(local_filename, DirFile, filename, lonlim, latlim, yID, xID): """ This function retrieves ALEXI data for a given date from the ftp.wateraccounting.unesco-ihe.org server. Restrictions: The data and this python file may not be distributed to others without permission of the WA+ team due data restriction of the ALEXI developers. Keyword arguments: local_filename -- name of the temporary file which contains global ALEXI data DirFile -- name of the end file with the weekly ALEXI data filename -- name of the end file lonlim -- [ymin, ymax] (values must be between -60 and 70) latlim -- [xmin, xmax] (values must be between -180 and 180) """ try: # Collect account and FTP information username, password = WebAccounts.Accounts(Type='FTP_WA') ftpserver = "ftp.wateraccounting.unesco-ihe.org" # Download data from FTP ftp = FTP(ftpserver) ftp.login(username, password) directory = "/WaterAccounting/Data_Satellite/Evaporation/ALEXI/World/" ftp.cwd(directory) lf = open(local_filename, "wb") ftp.retrbinary("RETR " + filename, lf.write) lf.close() # Open global ALEXI data dataset = RC.Open_tiff_array(local_filename) # Clip extend out of world data data = dataset[yID[0]:yID[1], xID[0]:xID[1]] data[data < 0] = -9999 # make geotiff file geo = [lonlim[0], 0.05, 0, latlim[1], 0, -0.05] DC.Save_as_tiff(name=DirFile, data=data, geo=geo, projection="WGS84") # delete old tif file os.remove(local_filename) except: print "file not exists" return
def recycle(output_folder, et_bg_fhs, recy_ratio, lu_fh, et_type): Data_Path_rec = "temp_et_recycle" out_folder = os.path.join(output_folder, Data_Path_rec) geo_out, proj, size_X, size_Y = RC.Open_array_info(lu_fh) if not os.path.exists(out_folder): os.mkdir(out_folder) recycle_fhs = [] for et_fh in et_bg_fhs: out_fh = out_folder + "\\recycled_et_"+et_type+et_fh[-11:-4]+".tif" et = becgis.OpenAsArray(et_fh, nan_values=True) et_recy = et*recy_ratio DC.Save_as_tiff(out_fh, et_recy, geo_out) recycle_fhs.append(out_fh) return recycle_fhs
def RetrieveData(Date, args): """ This function retrieves MOD11 LST data for a given date from the https://e4ftl01.cr.usgs.gov/ server. Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Argument [ output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, TimeStep, hdf_library ] = args # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles try: Collect_data(TilesHorizontal, TilesVertical, Date, output_folder, TimeStep, hdf_library) except: print "Was not able to download the file" # Define the output name of the collect data function name_collect = os.path.join(output_folder, 'Merged.tif') # Reproject the MODIS product to epsg_to epsg_to = '4326' name_reprojected = RC.reproject_MODIS(name_collect, epsg_to) # Clip the data to the users extend data, geo = RC.clip_data(name_reprojected, latlim, lonlim) # Save results as Gtiff if TimeStep == 8: LSTfileName = os.path.join( output_folder, 'LST_MOD11A2_K_8-daily_' + Date.strftime('%Y') + '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif') if TimeStep == 1: LSTfileName = os.path.join( output_folder, 'LST_MOD11A1_K_daily_' + Date.strftime('%Y') + '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif') DC.Save_as_tiff(name=LSTfileName, data=data, geo=geo, projection='WGS84') # remove the side products os.remove(os.path.join(output_folder, name_collect)) os.remove(os.path.join(output_folder, name_reprojected)) return True
def RetrieveData(Date, args): """ This function retrieves MOD16 ET data for a given date from the ftp://ftp.ntsg.umt.edu/ server. Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Argument [ output_folder, TilesVertical, TilesHorizontal, latlim, lonlim, timestep, hdf_library ] = args # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles try: Collect_data(TilesHorizontal, TilesVertical, Date, output_folder, timestep, hdf_library) except: print "Was not able to download the file" # Define the output name of the collect data function name_collect = os.path.join(output_folder, 'Merged.tif') # Reproject the MODIS product to epsg_to epsg_to = '4326' name_reprojected = RC.reproject_MODIS(name_collect, epsg_to) # Clip the data to the users extend data, geo = RC.clip_data(name_reprojected, latlim, lonlim) if timestep == 'monthly': ETfileName = os.path.join( output_folder, 'ET_MOD16A2_mm-month-1_monthly_' + Date.strftime('%Y') + '.' + Date.strftime('%m') + '.01.tif') elif timestep == '8-daily': ETfileName = os.path.join( output_folder, 'ET_MOD16A2_mm-8days-1_8-daily_' + Date.strftime('%Y') + '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif') DC.Save_as_tiff(name=ETfileName, data=data, geo=geo, projection='WGS84') # remove the side products os.remove(os.path.join(output_folder, name_collect)) os.remove(os.path.join(output_folder, name_reprojected)) return ()
def Download_GWF_from_WA_FTP(output_folder, filename_Out, lonlim, latlim): """ This function retrieves GWF data for a given date from the ftp.wateraccounting.unesco-ihe.org server. Keyword arguments: output_folder -- name of the end file with the weekly ALEXI data End_filename -- name of the end file lonlim -- [ymin, ymax] (values must be between -60 and 70) latlim -- [xmin, xmax] (values must be between -180 and 180) """ try: # Collect account and FTP information username, password = WebAccounts.Accounts(Type='FTP_WA') ftpserver = "ftp.wateraccounting.unesco-ihe.org" # Set the file names and directories filename = "Gray_Water_Footprint.tif" local_filename = os.path.join(output_folder, filename) # Download data from FTP ftp = FTP(ftpserver) ftp.login(username, password) directory = "/WaterAccounting_Guest/Static_WA_Datasets/" ftp.cwd(directory) lf = open(local_filename, "wb") ftp.retrbinary("RETR " + filename, lf.write) lf.close() # Clip extend out of world data dataset, Geo_out = RC.clip_data(local_filename, latlim, lonlim) # make geotiff file DC.Save_as_tiff(name=filename_Out, data=dataset, geo=Geo_out, projection="WGS84") # delete old tif file os.remove(local_filename) except: print "file not exists" return
def split_yield(output_folder, p_fhs, et_blue_fhs, et_green_fhs, ab=(1.0, 1.0)): Data_Path_split = "split_y" out_folder = os.path.join(output_folder, Data_Path_split) if not os.path.exists(out_folder): os.mkdir(out_folder) sp_yield_fhs = [] geo_out, proj, size_X, size_Y = RC.Open_array_info(p_fhs[0]) for m in range(len(p_fhs)): out_fh = out_folder+'\\split_yield'+et_blue_fhs[m][-12:] P = RC.Open_tiff_array(p_fhs[m]) ETBLUE = RC.Open_tiff_array(et_blue_fhs[m]) ETGREEN = RC.Open_tiff_array(et_green_fhs[m]) etbfraction = ETBLUE / (ETBLUE + ETGREEN) pfraction = P / np.nanmax(P) fraction = sh3.split_Yield(pfraction, etbfraction, ab[0], ab[1]) DC.Save_as_tiff(out_fh, fraction, geo_out) sp_yield_fhs.append(out_fh) return sp_yield_fhs
def Clip_Dataset(local_filename, Filename_out, latlim, lonlim): # Open Dataset SEBS_Array = spio.loadmat(local_filename)['ETmon'] # Define area XID = [ int(np.floor((180 + lonlim[0]) / 0.05)), int(np.ceil((180 + lonlim[1]) / 0.05)) ] YID = [ int(np.ceil((90 - latlim[1]) / 0.05)), int(np.floor((90 - latlim[0]) / 0.05)) ] # Define Georeference geo = tuple([-180 + 0.05 * XID[0], 0.05, 0, 90 - 0.05 * YID[0], 0, -0.05]) # Clip Array SEBS_Array_clipped = SEBS_Array[YID[0]:YID[1], XID[0]:XID[1]] # Save tiff file DC.Save_as_tiff(Filename_out, SEBS_Array_clipped, geo, "WGS84")
def dry_season_bf(output_folder, WPixOutFile, dry_months): data_path = "dry_bf" out_folder = os.path.join(output_folder, data_path) if not os.path.exists(out_folder): os.mkdir(out_folder) WPOut = nc.Dataset(WPixOutFile) lat = WPOut.variables['latitude'] lon = WPOut.variables['longitude'] px_size_lat = np.mean([lat[i+1]-lat[i] for i in range(len(lat)-1)]) px_size_lon = np.mean([lon[i+1]-lon[i] for i in range(len(lon)-1)]) geo_out = (np.min(lon)-px_size_lon/2, px_size_lon, 0, np.max(lat)-px_size_lat/2, 0, px_size_lat) times = WPOut.variables['time_yyyymm'][:] years = WPOut.variables['time_yyyy'][:] months = np.array(times) % 100 #might need this later if not a complete year etc. dry_months2 = np.where([months[i] in dry_months for i in range(len(months))])[0] bf = WPOut.variables['Baseflow_M'][:].data dry_bf = bf[:, :, dry_months2] dry_bf = np.nanmean(dry_bf, axis=2) dry_bf_fh = out_folder + '\\dry_bf_%d.tif' %years DC.Save_as_tiff(dry_bf_fh, dry_bf, geo_out) return dry_bf_fh
def Calc_Rainy_Days(Dir_Basin, Data_Path_P, Startdate, Enddate): """ This functions calculates the amount of rainy days based on daily precipitation data. Parameters ---------- Dir_Basin : str Path to all the output data of the Basin Data_Path_P : str Path from the Dir_Basin to the daily rainfall data Startdate : str Contains the start date of the model 'yyyy-mm-dd' Enddate : str Contains the end date of the model 'yyyy-mm-dd' Returns ------- Data_Path_RD : str Path from the Dir_Basin to the rainy days data """ # import WA+ modules import wa.General.data_conversions as DC import wa.General.raster_conversions as RC # Create an output directory to store the rainy days tiffs Data_Path_RD = 'Rainy_Days' Dir_RD = os.path.join(Dir_Basin, Data_Path_RD) if not os.path.exists(Dir_RD): os.mkdir(Dir_RD) # Define the dates that must be created Dates = pd.date_range(Startdate, Enddate, freq='MS') # Set working directory to the rainfall folder Dir_path_Prec = os.path.join(Dir_Basin, Data_Path_P) os.chdir(Dir_path_Prec) # Open all the daily data and store the data in a 3D array for Date in Dates: # Define the year and month and amount of days in month year = Date.year month = Date.month daysinmonth = calendar.monthrange(year, month)[1] # Set the third (time) dimension of array starting at 0 i = 0 # Find all files of that month files = glob.glob('*daily_%d.%02d.*.tif' % (year, month)) # Check if the amount of files corresponds with the amount of days in month if len(files) is not daysinmonth: print 'ERROR: Not all Rainfall days for month %d and year %d are downloaded' % ( month, year) # Loop over the days and store data in raster for File in files: dir_file = os.path.join(Dir_path_Prec, File) # Get array information and create empty numpy array for daily rainfall when looping the first file if File == files[0]: # Open geolocation info and define projection geo_out, proj, size_X, size_Y = RC.Open_array_info(dir_file) if int(proj.split('"')[-2]) == 4326: proj = "WGS84" # Create empty array for the whole month P_Daily = np.zeros([daysinmonth, size_Y, size_X]) # Open data and put the data in 3D array Data = RC.Open_tiff_array(dir_file) # Remove the weird numbers Data[Data < 0] = 0 # Add the precipitation to the monthly cube P_Daily[i, :, :] = Data i += 1 # Define a rainy day P_Daily[P_Daily > 0.201] = 1 P_Daily[P_Daily != 1] = 0 # Sum the amount of rainy days RD_one_month = np.nansum(P_Daily, 0) # Define output name Outname = os.path.join( Dir_RD, 'Rainy_Days_NumOfDays_monthly_%d.%02d.01.tif' % (year, month)) # Save tiff file DC.Save_as_tiff(Outname, RD_one_month, geo_out, proj) return (Data_Path_RD)
def RetrieveData_monthly(Date, args): """ This function retrieves GLDAS monthly data for a given date. Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Argument [ path, url, Var, VarStr, VarInfo, TimeCase, xID, yID, lonlim, latlim, CaseParameters, username, password ] = args # Open variable info parameters VarFactor = VarInfo.factors[Var] # Check whether the file already exist or the worldfile is downloaded BasinDir = path + '/' + VarStr + '_GLDAS-NOAH_' + \ VarInfo.units[Var] + '_monthly_' + Date.strftime('%Y.%m.%d') + \ '.tif' # Define month and year of current month Y = Date.year M = Date.month Mday = calendar.monthrange(Y, M)[1] # Check GLDAS version version = url[-3:] # Check if the outputfile already excists if not os.path.isfile(BasinDir): # Reset the begin parameters for downloading downloaded = 0 N = 0 # Create the time dimension if version == '2.1': zID = (Y - 2000) * 12 + (M - 1) elif version == '2.0': zID = (Y - 1948) * 12 + (M - 1) # define total url url_GLDAS = url + '.ascii?%s[%s][%s:1:%s][%s:1:%s]' % ( Var, zID, yID[0], yID[1], xID[0], xID[1]) # if not downloaded try to download file while downloaded == 0: try: # open URL try: dataset = requests.get(url_GLDAS, allow_redirects=False, stream=True) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings( InsecureRequestWarning) dataset = requests.get(url_GLDAS, allow_redirects=False, stream=True, verify=False) try: get_dataset = requests.get(dataset.headers['location'], auth=(username, password), stream=True) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings( InsecureRequestWarning) get_dataset = requests.get(dataset.headers['location'], auth=(username, password), stream=True, verify=False) # download data (first save as text file) pathtext = os.path.join(path, 'temp%s.txt' % str(zID)) z = open(pathtext, 'w') z.write(get_dataset.content) z.close() # Open text file and remove header and footer data_start = np.genfromtxt(pathtext, dtype=float, skip_header=1, skip_footer=6, delimiter=',') data = data_start[:, 1:] # Add the VarFactor if VarFactor < 0: data = data + VarFactor else: data = data * VarFactor if VarInfo.types[Var] == 'flux': data = data * Mday # Set Nan value for values lower than -9999 data[data < -9999] = -9999 # Say that download was succesfull downloaded = 1 # If download was not succesfull except: data = [] # Try another time N = N + 1 # Stop trying after 10 times if N == 10: print 'Data from ' + Date.strftime( '%Y-%m-%d') + ' is not available' downloaded = 1 # define geo lonlimGLDAS = xID[0] * 0.25 - 180 latlimGLDAS = (yID[1] + 1) * 0.25 - 60 # Save to geotiff file geo = [lonlimGLDAS, 0.25, 0, latlimGLDAS, 0, -0.25] DC.Save_as_tiff(name=BasinDir, data=np.flipud(data[:, :]), geo=geo, projection="WGS84") # Delete data and text file del data os.remove(pathtext) return True
def RetrieveData_daily(Date, args): """ This function retrieves GLDAS daily data for a given date. Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Open all the parameters [ path, url, Var, VarStr, VarInfo, TimeCase, xID, yID, lonlim, latlim, CaseParameters, username, password ] = args [selected, types] = CaseParameters # Reset the begin parameters for downloading downloaded = 0 N = 0 data_end = [] # Check GLDAS version version = url[-3:] # Open all variable info for T in types: if T == 'mean': VarStr = VarInfo.names[Var] else: VarStr = VarInfo.names[Var] + '-' + T # Check whether the file already exist or # the worldfile is downloaded BasinDir = path[T] + '/' + VarStr + '_GLDAS-NOAH_' + \ VarInfo.units[Var] + '_daily_' + Date.strftime('%Y.%m.%d') + \ '.tif' # Check if the outputfile already excists if not os.path.isfile(BasinDir): # Create the time dimension if version == '2.0': zID_start = int(((Date - pd.Timestamp("1948-1-1")).days) * 8) zID_end = zID_start + 7 elif version == '2.1': zID_start = int(((Date - pd.Timestamp("2000-1-1")).days) * 8) zID_end = zID_start + 7 # define total url url_GLDAS = url + '.ascii?%s[%s:1:%s][%s:1:%s][%s:1:%s]' % ( Var, zID_start, zID_end, yID[0], yID[1], xID[0], xID[1]) # if not downloaded try to download file while downloaded == 0: try: # open URL try: dataset = requests.get(url_GLDAS, allow_redirects=False, stream=True) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings( InsecureRequestWarning) dataset = requests.get(url_GLDAS, allow_redirects=False, stream=True, verify=False) try: get_dataset = requests.get(dataset.headers['location'], auth=(username, password), stream=True) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings( InsecureRequestWarning) get_dataset = requests.get(dataset.headers['location'], auth=(username, password), stream=True, verify=False) # download data (first save as text file) pathtext = os.path.join(path[T], 'temp%s.txt' % str(zID_start)) z = open(pathtext, 'w') z.write(get_dataset.content) z.close() # Reshape data datashape = [8, yID[1] - yID[0] + 1, xID[1] - xID[0] + 1] data_start = np.genfromtxt(pathtext, dtype=float, skip_header=1, skip_footer=6, delimiter=',') data_list = np.asarray(data_start[:, 1:]) data_end = np.resize(data_list, (8, datashape[1], datashape[2])) os.remove(pathtext) # Add the VarFactor if VarInfo.factors[Var] < 0: data_end[data_end != -9999] = data_end[ data_end != -9999] + VarInfo.factors[Var] else: data_end[data_end != -9999] = data_end[ data_end != -9999] * VarInfo.factors[Var] data_end[data_end < -9999] = -9999 # define geo lonlimGLDAS = xID[0] * 0.25 - 180 latlimGLDAS = (yID[1] + 1) * 0.25 - 60 # Download was succesfull downloaded = 1 # If download was not succesfull except: # Try another time N = N + 1 # Stop trying after 10 times if N == 10: print 'Data from ' + Date.strftime( '%Y-%m-%d') + ' is not available' downloaded = 1 try: # Save to geotiff file if T == 'mean': data = np.flipud(np.mean(data_end, axis=0)) if T == 'max': data = np.flipud(np.max(data_end, axis=0)) if T == 'min': data = np.flipud(np.min(data_end, axis=0)) geo = [lonlimGLDAS, 0.25, 0, latlimGLDAS, 0, -0.25] DC.Save_as_tiff(name=BasinDir, data=data, geo=geo, projection="WGS84") except: print 'GLDAS map from ' + Date.strftime( '%Y-%m-%d') + ' is not created' return True
def NPP_GPP_Based(Dir_Basin, Data_Path_GPP, Data_Path_NPP, Startdate, Enddate): """ This functions calculated monthly NDM based on the yearly NPP and monthly GPP. Parameters ---------- Dir_Basin : str Path to all the output data of the Basin Data_Path_GPP : str Path from the Dir_Basin to the GPP data Data_Path_NPP : str Path from the Dir_Basin to the NPP data Startdate : str Contains the start date of the model 'yyyy-mm-dd' Enddate : str Contains the end date of the model 'yyyy-mm-dd' Simulation : int Defines the simulation Returns ------- Data_Path_NDM : str Path from the Dir_Basin to the normalized dry matter data """ # import WA+ modules import wa.General.data_conversions as DC import wa.General.raster_conversions as RC # Define output folder for Normalized Dry Matter Data_Path_NDM = os.path.join(Dir_Basin, "NDM") if not os.path.exists(Data_Path_NDM): os.mkdir(Data_Path_NDM) # Define monthly time steps that will be created Dates = pd.date_range(Startdate, Enddate, freq = 'MS') # Define the years that will be calculated Year_Start = int(Startdate[0:4]) Year_End = int(Enddate[0:4]) Years = range(Year_Start, Year_End+1) # Loop over the years for year in Years: # Change working directory to the NPP folder os.chdir(Data_Path_NPP) # Open yearly NPP data yearly_NPP_File = glob.glob('*yearly*%d.01.01.tif' %int(year))[0] Yearly_NPP = RC.Open_tiff_array(yearly_NPP_File) # Get the No Data Value of the NPP file dest = gdal.Open(yearly_NPP_File) NDV = dest.GetRasterBand(1).GetNoDataValue() # Set the No Data Value to Nan Yearly_NPP[Yearly_NPP == NDV] = np.nan # Change working directory to the GPP folder os.chdir(Data_Path_GPP) # Find all the monthly files of that year monthly_GPP_Files = glob.glob('*monthly*%d.*.01.tif' %int(year)) # Check if it are 12 files otherwise something is wrong and send the ERROR if not len(monthly_GPP_Files) == 12: print 'ERROR: Some monthly GPP Files are missing' # Get the projection information of the GPP inputs geo_out, proj, size_X, size_Y = RC.Open_array_info(monthly_GPP_Files[0]) geo_out_NPP, proj_NPP, size_X_NPP, size_Y_NPP = RC.Open_array_info(os.path.join(Data_Path_NPP,yearly_NPP_File)) if int(proj.split('"')[-2]) == 4326: proj = "WGS84" # Get the No Data Value of the GPP files dest = gdal.Open(monthly_GPP_Files[0]) NDV = dest.GetRasterBand(1).GetNoDataValue() # Create a empty numpy array Yearly_GPP = np.zeros([size_Y, size_X]) # Calculte the total yearly GPP for monthly_GPP_File in monthly_GPP_Files: # Open array Data = RC.Open_tiff_array(monthly_GPP_File) # Remove nan values Data[Data == NDV] = np.nan # Add data to yearly sum Yearly_GPP += Data # Check if size is the same of NPP and GPP otherwise resize if not (size_X_NPP is size_X or size_Y_NPP is size_Y): Yearly_NPP = RC.resize_array_example(Yearly_NPP, Yearly_GPP) # Loop over the monthly dates for Date in Dates: # If the Date is in the same year as the yearly NPP and GPP if Date.year == year: # Create empty GPP array monthly_GPP = np.ones([size_Y, size_X]) * np.nan # Get current month month = Date.month # Get the GPP file of the current year and month monthly_GPP_File = glob.glob('*monthly_%d.%02d.01.tif' %(int(year), int(month)))[0] monthly_GPP = RC.Open_tiff_array(monthly_GPP_File) monthly_GPP[monthly_GPP == NDV] = np.nan # Calculate the NDM based on the monthly and yearly NPP and GPP (fraction of GPP) Monthly_NDM = Yearly_NPP * monthly_GPP / Yearly_GPP * (30./12.) *10000 # kg/ha # Define output name output_name = os.path.join(Data_Path_NDM, 'NDM_MOD17_kg_ha-1_monthly_%d.%02d.01.tif' %(int(year), int(month))) # Save the NDM as tiff file DC.Save_as_tiff(output_name, Monthly_NDM, geo_out, proj) return(Data_Path_NDM)
def main(Dir, Startdate='', Enddate='', latlim=[-60, 60], lonlim=[-180, 180], pixel_size=False, cores=False, LANDSAF=0, SourceLANDSAF='', Waitbar=1): """ This function downloads TRMM3B43 V7 (monthly) data Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) cores -- The number of cores used to run the routine. It can be 'False' to avoid using parallel computing routines. Waitbar -- 1 (Default) will print the waitbar """ print 'Create monthly Reference ET data for period %s till %s' % ( Startdate, Enddate) # An array of monthly dates which will be calculated Dates = pd.date_range(Startdate, Enddate, freq='MS') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Calculate the ETref day by day for every month for Date in Dates: # Collect date data Y = Date.year M = Date.month Mday = calendar.monthrange(Y, M)[1] Days = pd.date_range(Date, Date + pd.Timedelta(days=Mday), freq='D') StartTime = Date.strftime('%Y') + '-' + Date.strftime('%m') + '-01' EndTime = Date.strftime('%Y') + '-' + Date.strftime('%m') + '-' + str( Mday) # Get ETref on daily basis daily(Dir=Dir, Startdate=StartTime, Enddate=EndTime, latlim=latlim, lonlim=lonlim, pixel_size=pixel_size, cores=cores, LANDSAF=LANDSAF, SourceLANDSAF=SourceLANDSAF, Waitbar=0) # Load DEM if not pixel_size: nameDEM = 'DEM_HydroShed_m_3s.tif' DEMmap = os.path.join(Dir, 'HydroSHED', 'DEM', nameDEM) else: DEMmap = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_HydroShed_m_reshaped_for_ETref.tif') # Get some geo-data to save results geo_ET, proj, size_X, size_Y = RC.Open_array_info(DEMmap) dataMonth = np.zeros([size_Y, size_X]) for Day in Days[:-1]: DirDay = os.path.join( Dir, 'ETref', 'Daily', 'ETref_mm-day-1_daily_' + Day.strftime('%Y.%m.%d') + '.tif') dataDay = gdal.Open(DirDay) Dval = dataDay.GetRasterBand(1).ReadAsArray().astype(np.float32) Dval[Dval < 0] = 0 dataMonth = dataMonth + Dval dataDay = None # make geotiff file output_folder_month = os.path.join(Dir, 'ETref', 'Monthly') if os.path.exists(output_folder_month) == False: os.makedirs(output_folder_month) DirMonth = os.path.join( output_folder_month, 'ETref_mm-month-1_monthly_' + Date.strftime('%Y.%m.%d') + '.tif') # Create the tiff file DC.Save_as_tiff(DirMonth, dataMonth, geo_ET, proj) # Create Waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50)
def DownloadData(output_folder, latlim, lonlim, parameter, resolution): """ This function downloads DEM data from HydroSHED Keyword arguments: output_folder -- directory of the result latlim -- [ymin, ymax] (values must be between -50 and 50) lonlim -- [xmin, xmax] (values must be between -180 and 180) Resample -- 1 = The data will be resampled to 0.001 degree spatial resolution -- 0 = The data will have the same pixel size as the data obtained from the internet """ # Define parameter depedent variables if parameter == "dir_3s": para_name = "DIR" unit = "-" resolution = '3s' parameter = 'dir' if parameter == "dem_3s": para_name = "DEM" unit = "m" resolution = '3s' parameter = 'dem' if parameter == "dir_15s": para_name = "DIR" unit = "-" resolution = '15s' parameter = 'dir' if parameter == "dem_15s": para_name = "DEM" unit = "m" resolution = '15s' parameter = 'dem' # converts the latlim and lonlim into names of the tiles which must be # downloaded if resolution == '3s': name, rangeLon, rangeLat = Find_Document_Names(latlim, lonlim, parameter) # Memory for the map x and y shape (starts with zero) size_X_tot = 0 size_Y_tot = 0 if resolution == '15s': name = Find_Document_names_15s(latlim, lonlim, parameter, resolution) nameResults = [] # Create a temporary folder for processing output_folder_trash = os.path.join(output_folder, "Temp") if not os.path.exists(output_folder_trash): os.makedirs(output_folder_trash) # Download, extract, and converts all the files to tiff files for nameFile in name: try: # Download the data from # http://earlywarning.usgs.gov/hydrodata/ output_file, file_name = Download_Data(nameFile, output_folder_trash, parameter, para_name,resolution) # extract zip data DC.Extract_Data(output_file, output_folder_trash) # Converts the data with a adf extention to a tiff extension. # The input is the file name and in which directory the data must be stored file_name_tiff = file_name.split('.')[0] + '_trans_temporary.tif' file_name_extract = file_name.split('_')[0:3] if resolution == '3s': file_name_extract2 = file_name_extract[0]+'_'+file_name_extract[1] if resolution == '15s': file_name_extract2 = file_name_extract[0]+'_'+file_name_extract[1]+'_15s' input_adf = os.path.join(output_folder_trash, file_name_extract2, file_name_extract2, 'hdr.adf') output_tiff = os.path.join(output_folder_trash, file_name_tiff) # convert data from adf to a tiff file output_tiff = DC.Convert_adf_to_tiff(input_adf, output_tiff) geo_out, proj, size_X, size_Y = RC.Open_array_info(output_tiff) if int(size_X) != int(6000) or int(size_Y) != int(6000): data = np.ones((6000, 6000)) * -9999 # Create the latitude bound Vfile = str(nameFile)[1:3] SignV = str(nameFile)[0] SignVer = 1 # If the sign before the filename is a south sign than latitude is negative if SignV is "s": SignVer = -1 Bound2 = int(SignVer)*int(Vfile) # Create the longitude bound Hfile = str(nameFile)[4:7] SignH = str(nameFile)[3] SignHor = 1 # If the sign before the filename is a west sign than longitude is negative if SignH is "w": SignHor = -1 Bound1 = int(SignHor) * int(Hfile) Expected_X_min = Bound1 Expected_Y_max = Bound2 + 5 Xid_start = int(np.round((geo_out[0] - Expected_X_min)/geo_out[1])) Xid_end = int(np.round(((geo_out[0] + size_X * geo_out[1]) - Expected_X_min)/geo_out[1])) Yid_start = int(np.round((Expected_Y_max - geo_out[3])/(-geo_out[5]))) Yid_end = int(np.round((Expected_Y_max - (geo_out[3] + (size_Y * geo_out[5])))/(-geo_out[5]))) data[Yid_start:Yid_end,Xid_start:Xid_end] = RC.Open_tiff_array(output_tiff) if np.max(data)==255: data[data==255] = -9999 data[data<-9999] = -9999 geo_in = [Bound1, 0.00083333333333333, 0.0, int(Bound2 + 5), 0.0, -0.0008333333333333333333] # save chunk as tiff file DC.Save_as_tiff(name=output_tiff, data=data, geo=geo_in, projection="WGS84") except: if resolution == '3s': # If tile not exist create a replacing zero tile (sea tiles) output = nameFile.split('.')[0] + "_trans_temporary.tif" output_tiff = os.path.join(output_folder_trash, output) file_name = nameFile data = np.ones((6000, 6000)) * -9999 data = data.astype(np.float32) # Create the latitude bound Vfile = str(file_name)[1:3] SignV = str(file_name)[0] SignVer = 1 # If the sign before the filename is a south sign than latitude is negative if SignV is "s": SignVer = -1 Bound2 = int(SignVer)*int(Vfile) # Create the longitude bound Hfile = str(file_name)[4:7] SignH = str(file_name)[3] SignHor = 1 # If the sign before the filename is a west sign than longitude is negative if SignH is "w": SignHor = -1 Bound1 = int(SignHor) * int(Hfile) # Geospatial data for the tile geo_in = [Bound1, 0.00083333333333333, 0.0, int(Bound2 + 5), 0.0, -0.0008333333333333333333] # save chunk as tiff file DC.Save_as_tiff(name=output_tiff, data=data, geo=geo_in, projection="WGS84") if resolution == '15s': print 'no 15s data is in dataset' if resolution =='3s': # clip data Data, Geo_data = RC.clip_data(output_tiff, latlim, lonlim) size_Y_out = int(np.shape(Data)[0]) size_X_out = int(np.shape(Data)[1]) # Total size of the product so far size_Y_tot = int(size_Y_tot + size_Y_out) size_X_tot = int(size_X_tot + size_X_out) if nameFile is name[0]: Geo_x_end = Geo_data[0] Geo_y_end = Geo_data[3] else: Geo_x_end = np.min([Geo_x_end,Geo_data[0]]) Geo_y_end = np.max([Geo_y_end,Geo_data[3]]) # create name for chunk FileNameEnd = "%s_temporary.tif" % (nameFile) nameForEnd = os.path.join(output_folder_trash, FileNameEnd) nameResults.append(str(nameForEnd)) # save chunk as tiff file DC.Save_as_tiff(name=nameForEnd, data=Data, geo=Geo_data, projection="WGS84") if resolution =='3s': #size_X_end = int(size_X_tot) #! #size_Y_end = int(size_Y_tot) #! size_X_end = int(size_X_tot/len(rangeLat)) + 1 #! size_Y_end = int(size_Y_tot/len(rangeLon)) + 1 #! # Define the georeference of the end matrix geo_out = [Geo_x_end, Geo_data[1], 0, Geo_y_end, 0, Geo_data[5]] latlim_out = [geo_out[3] + geo_out[5] * size_Y_end, geo_out[3]] lonlim_out = [geo_out[0], geo_out[0] + geo_out[1] * size_X_end] # merge chunk together resulting in 1 tiff map datasetTot = Merge_DEM(latlim_out, lonlim_out, nameResults, size_Y_end, size_X_end) datasetTot[datasetTot<-9999] = -9999 if resolution =='15s': output_file_merged = os.path.join(output_folder_trash,'merged.tif') datasetTot, geo_out = Merge_DEM_15s(output_folder_trash, output_file_merged,latlim, lonlim) # name of the end result output_DEM_name = "%s_HydroShed_%s_%s.tif" %(para_name,unit,resolution) Save_name = os.path.join(output_folder, output_DEM_name) # Make geotiff file DC.Save_as_tiff(name=Save_name, data=datasetTot, geo=geo_out, projection="WGS84") os.chdir(output_folder) # Delete the temporary folder shutil.rmtree(output_folder_trash)
# -*- coding: utf-8 -*- """ Created on Mon Jun 19 10:09:38 2017 @author: tih """ Tfile = r"J:\Tyler\Input\Meteo\daily\avgsurft_inst\mean\T_GLDAS-NOAH_C_daily_2016.06.15.tif" Pfile = r"J:\Tyler\Input\Meteo\daily\psurf_f_inst\mean\P_GLDAS-NOAH_kpa_daily_2016.06.15.tif" Hfile = r"J:\Tyler\Input\Meteo\daily\qair_f_inst\mean\Hum_GLDAS-NOAH_kg-kg_daily_2016.06.15.tif" Outfilename = r"J:\Tyler\Input\Meteo\daily\Hum_Calculated\Humidity_percentage_Calculated_daily.tif" import gdal import os import wa.General.raster_conversions as RC import wa.General.data_conversions as DC import numpy as np geo_out, proj, size_X, size_Y = RC.Open_array_info(Tfile) Tdata = RC.Open_tiff_array(Tfile) Tdata[Tdata < -900] = np.nan Pdata = RC.Open_tiff_array(Pfile) Hdata = RC.Open_tiff_array(Hfile) Esdata = 0.6108 * np.exp((17.27 * Tdata) / (Tdata + 237.3)) HumData = np.minimum((1.6077717 * Hdata * Pdata / Esdata), 1) * 100 DC.Save_as_tiff(Outfilename, HumData, geo_out, "WGS84")
def RetrieveData(Date, args): """ This function retrieves TRMM data for a given date from the ftp://disc2.nascom.nasa.gov server. Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Argument [output_folder, TimeCase, xID, yID, lonlim, latlim] = args year = Date.year month = Date.month day = Date.day from wa import WebAccounts username, password = WebAccounts.Accounts(Type='NASA') # Create https if TimeCase == 'daily': URL = 'https://disc2.gesdisc.eosdis.nasa.gov/opendap/TRMM_L3/TRMM_3B42_Daily.7/%d/%02d/3B42_Daily.%d%02d%02d.7.nc4.ascii?precipitation[%d:1:%d][%d:1:%d]' % ( year, month, year, month, day, xID[0], xID[1] - 1, yID[0], yID[1] - 1) DirFile = os.path.join( output_folder, "P_TRMM3B42.V7_mm-day-1_daily_%d.%02d.%02d.tif" % (year, month, day)) Scaling = 1 if TimeCase == 'monthly': if Date >= pd.Timestamp('2010-10-01'): URL = 'https://disc2.gesdisc.eosdis.nasa.gov/opendap/TRMM_L3/TRMM_3B43.7/%d/3B43.%d%02d01.7.HDF.ascii?precipitation[%d:1:%d][%d:1:%d]' % ( year, year, month, xID[0], xID[1] - 1, yID[0], yID[1] - 1) else: URL = 'https://disc2.gesdisc.eosdis.nasa.gov/opendap/TRMM_L3/TRMM_3B43.7/%d/3B43.%d%02d01.7A.HDF.ascii?precipitation[%d:1:%d][%d:1:%d]' % ( year, year, month, xID[0], xID[1] - 1, yID[0], yID[1] - 1) Scaling = calendar.monthrange(year, month)[1] * 24 DirFile = os.path.join( output_folder, "P_TRMM3B43.V7_mm-month-1_monthly_%d.%02d.01.tif" % (year, month)) if not os.path.isfile(DirFile): dataset = requests.get(URL, allow_redirects=False, stream=True) try: get_dataset = requests.get(dataset.headers['location'], auth=(username, password), stream=True) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) get_dataset = requests.get(dataset.headers['location'], auth=(username, password), verify=False) # download data (first save as text file) pathtext = os.path.join(output_folder, 'temp.txt') z = open(pathtext, 'w') z.write(get_dataset.content) z.close() # Open text file and remove header and footer data_start = np.genfromtxt(pathtext, dtype=float, skip_header=1, delimiter=',') data = data_start[:, 1:] * Scaling data[data < 0] = -9999 data = data.transpose() data = np.flipud(data) # Delete .txt file os.remove(pathtext) # Make geotiff file geo = [lonlim[0], 0.25, 0, latlim[1], 0, -0.25] DC.Save_as_tiff(name=DirFile, data=data, geo=geo, projection="WGS84") return True
def RetrieveData(Date, args): """ This function retrieves GLEAM ET data for a given date from the www.gleam.eu server. Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Argument [output_folder, latlim, lonlim, VarCode, TimeCase] = args # Adjust latlim to GLEAM dataset latlim1 = [latlim[1] * -1, latlim[0] * -1] # select the spatial dataset Ystart = int(np.floor((latlim1[0] + 90) / 0.25)) Yend = int(np.ceil((latlim1[1] + 90) / 0.25)) Xstart = int(np.floor((lonlim[0] + 180) / 0.25)) Xend = int(np.ceil((lonlim[1] + 180) / 0.25)) Year = Date.year Month = Date.month filename = 'E_' + str(Year) + '_GLEAM_v3.1b.nc' local_filename = os.path.join(output_folder, filename) f = Dataset(local_filename, mode='r') if TimeCase == 'monthly': # defines the start and end of the month Datesend1 = str(Date) Datesend2 = Datesend1.replace(Datesend1[8:10], "01") Datesend3 = Datesend2[0:10] Datesend4 = Datesend1[0:10] Datestart = pd.date_range(Datesend3, Datesend4, freq='MS') # determine the DOY-1 and DOYend (those are use to define the temporal boundaries of the yearly data) DOY = int(Datestart[0].strftime('%j')) DOYend = int(Date.strftime('%j')) DOYDownload = DOY - 1 Day = 1 Data = f.variables['E'][DOYDownload:DOYend, Xstart:Xend, Ystart:Yend] data = np.array(Data) f.close() # Sum ET data in time and change the no data value into -999 dataSum = sum(data, 1) dataSum[dataSum < -100] = -999.000 dataCor = np.swapaxes(dataSum, 0, 1) if TimeCase == 'daily': Day = Date.day # Define the DOY, DOY-1 is taken from the yearly dataset DOY = int(Date.strftime('%j')) DOYDownload = DOY - 1 Data = f.variables['E'][DOYDownload, Xstart:Xend, Ystart:Yend] data = np.array(Data) f.close() data[data < -100] = -999.000 dataCor = np.swapaxes(data, 0, 1) # The Georeference of the map geo_in = [lonlim[0], 0.25, 0.0, latlim[1], 0.0, -0.25] # Name of the map dataset_name = VarCode + '_' + str(Year) + '.' + str(Month).zfill( 2) + '.' + str(Day).zfill(2) + '.tif' output_file = os.path.join(output_folder, dataset_name) # save data as tiff file DC.Save_as_tiff(name=output_file, data=dataCor, geo=geo_in, projection="WGS84") return True
def Nearest_Interpolate(Dir_in, Startdate, Enddate, Dir_out=None): """ This functions calculates monthly tiff files based on the daily tiff files. (will calculate the total sum) Parameters ---------- Dir_in : str Path to the input data Startdate : str Contains the start date of the model 'yyyy-mm-dd' Enddate : str Contains the end date of the model 'yyyy-mm-dd' Dir_out : str Path to the output data, default is same as Dir_in """ # import WA+ modules import wa.General.data_conversions as DC import wa.General.raster_conversions as RC # Change working directory os.chdir(Dir_in) # Define end and start date Dates = pd.date_range(Startdate, Enddate, freq='MS') # Find all monthly files files = glob.glob('*daily*.tif') # Get array information and define projection geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0]) if int(proj.split('"')[-2]) == 4326: proj = "WGS84" # Get the No Data Value dest = gdal.Open(files[0]) NDV = dest.GetRasterBand(1).GetNoDataValue() for date in Dates: Year = date.year Month = date.month files_one_year = glob.glob('*daily*%d.%02d*.tif' % (Year, Month)) # Create empty arrays Month_data = np.zeros([size_Y, size_X]) # Get amount of days in month Amount_days_in_month = int(calendar.monthrange(Year, Month)[1]) if len(files_one_year) is not Amount_days_in_month: print("One day is missing!!!") for file_one_year in files_one_year: file_path = os.path.join(Dir_in, file_one_year) Day_data = RC.Open_tiff_array(file_path) Day_data[np.isnan(Day_data)] = 0.0 Day_data[Day_data == -9999] = 0.0 Month_data += Day_data # Define output directory if Dir_out is None: Dir_out = Dir_in # Define output name output_name = os.path.join(Dir_out, file_one_year .replace('daily', 'monthly') .replace('day', 'month')) output_name = output_name[:-14] + '%d.%02d.01.tif' % (date.year, date.month) # Save tiff file DC.Save_as_tiff(output_name, Month_data, geo_out, proj) return
def DownloadData(Dir, Var, Startdate, Enddate, latlim, lonlim, Waitbar, cores, TimeCase, CaseParameters): """ This function downloads ECMWF six-hourly, daily or monthly data Keyword arguments: """ # correct latitude and longitude limits latlim_corr_one = np.floor(latlim[0]/0.125) * 0.125 latlim_corr_two = np.ceil(latlim[1]/0.125) * 0.125 latlim_corr = [latlim_corr_one, latlim_corr_two] # correct latitude and longitude limits lonlim_corr_one = np.floor(lonlim[0]/0.125) * 0.125 lonlim_corr_two = np.ceil(lonlim[1]/0.125) * 0.125 lonlim_corr = [lonlim_corr_one, lonlim_corr_two] # Load factors / unit / type of variables / accounts VarInfo = VariablesInfo(TimeCase) Varname_dir = VarInfo.file_name[Var] # Create Out directory out_dir = os.path.join(Dir, "Weather_Data", "Model", "ECMWF", TimeCase, Varname_dir, "mean") if not os.path.exists(out_dir): os.makedirs(out_dir) DownloadType = VarInfo.DownloadType[Var] # Set required data for the three hourly option if TimeCase == 'six_hourly': string1 = 'oper' # Set required data for the daily option elif TimeCase == 'daily': Dates = pd.date_range(Startdate, Enddate, freq='D') elif TimeCase == 'monthly': Dates = pd.date_range(Startdate, Enddate, freq='MS') if DownloadType == 1: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'sfc' string8 = 'an' if DownloadType == 2: string1 = 'oper' string4 = "12" string6 = "00:00:00/12:00:00" string2 = 'sfc' string8 = 'fc' if DownloadType == 3: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'pl' string8 = 'an' string7 = '%s/to/%s' %(Startdate, Enddate) parameter_number = VarInfo.number_para[Var] string3 = '%03d.128' %(parameter_number) string5 = '0.125/0.125' string9 = 'ei' string10 = '%s/%s/%s/%s' %(latlim_corr[1], lonlim_corr[0], latlim_corr[0], lonlim_corr[1]) #N, W, S, E # Download data by using the ECMWF API import wa.Collect.ECMWF.ECMWFdownload as Download print 'Use API ECMWF to collect the data, please wait' Download.API(Dir, DownloadType, string1, string2, string3, string4, string5, string6, string7, string8, string9, string10) # Open the downloaded data NC_filename = os.path.join(Dir,'data_interim.nc') fh = Dataset(NC_filename, mode='r') # Get the NC variable parameter parameter_var = VarInfo.var_name[Var] Var_unit = VarInfo.units[Var] factors_add = VarInfo.factors_add[Var] factors_mul = VarInfo.factors_mul[Var] # Open the NC data Data = fh.variables[parameter_var][:] Data_time = fh.variables['time'][:] lons = fh.variables['longitude'][:] lats = fh.variables['latitude'][:] # Define the georeference information Geo_four = np.nanmax(lats) Geo_one = np.nanmin(lons) Geo_out = tuple([Geo_one, 0.125, 0.0, Geo_four, 0.0, -0.125]) # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) for date in Dates: # Define the year, month and day year = date.year month = date.month day = date.day # Hours since 1900-01-01 start = datetime.datetime(year=1900, month=1, day=1) end = datetime.datetime(year, month, day) diff = end - start hours_from_start_begin = diff.total_seconds()/60/60 Date_good = np.zeros(len(Data_time)) if TimeCase == 'daily': days_later = 1 if TimeCase == 'monthly': days_later = calendar.monthrange(year,month)[1] Date_good[np.logical_and(Data_time>=hours_from_start_begin, Data_time<(hours_from_start_begin + 24 * days_later))] = 1 Data_one = np.zeros([int(np.sum(Date_good)),int(np.size(Data,1)),int(np.size(Data,2))]) Data_one = Data[np.int_(Date_good) == 1, :, :] # Calculate the average temperature in celcius degrees Data_end = factors_mul * np.nanmean(Data_one,0) + factors_add if VarInfo.types[Var] == 'flux': Data_end = Data_end * days_later VarOutputname = VarInfo.file_name[Var] # Define the out name name_out = os.path.join(out_dir, "%s_ECMWF_ERA-Interim_%s_%s_%d.%02d.%02d.tif" %(VarOutputname, Var_unit, TimeCase, year,month,day)) # Create Tiff files DC.Save_as_tiff(name_out, Data_end, Geo_out, "WGS84") if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) fh.close() return()
def Nearest_Interpolate(Dir_in, Startdate, Enddate, Dir_out=None): """ This functions calculates monthly tiff files based on the 8 daily tiff files. (will calculate the average) Parameters ---------- Dir_in : str Path to the input data Startdate : str Contains the start date of the model 'yyyy-mm-dd' Enddate : str Contains the end date of the model 'yyyy-mm-dd' Dir_out : str Path to the output data, default is same as Dir_in """ # import WA+ modules import wa.General.data_conversions as DC import wa.General.raster_conversions as RC # Change working directory os.chdir(Dir_in) # Find all eight daily files files = glob.glob('*8-daily*.tif') # Create array with filename and keys (DOY and year) of all the 8 daily files i = 0 DOY_Year = np.zeros([len(files), 3]) for File in files: # Get the time characteristics from the filename year = File.split('.')[-4][-4:] month = File.split('.')[-3] day = File.split('.')[-2] # Create pandas Timestamp date_file = '%s-%02s-%02s' % (year, month, day) Datum = pd.Timestamp(date_file) # Get day of year DOY = Datum.strftime('%j') # Save data in array DOY_Year[i, 0] = i DOY_Year[i, 1] = DOY DOY_Year[i, 2] = year # Loop over files i += 1 # Check enddate: Enddate_split = Enddate.split('-') month_range = calendar.monthrange(int(Enddate_split[0]), int(Enddate_split[1]))[1] Enddate = '%d-%02d-%02d' % (int(Enddate_split[0]), int( Enddate_split[1]), month_range) # Check startdate: Startdate_split = Startdate.split('-') Startdate = '%d-%02d-01' % (int(Startdate_split[0]), int( Startdate_split[1])) # Define end and start date Dates = pd.date_range(Startdate, Enddate, freq='MS') DatesEnd = pd.date_range(Startdate, Enddate, freq='M') # Get array information and define projection geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0]) if int(proj.split('"')[-2]) == 4326: proj = "WGS84" # Get the No Data Value dest = gdal.Open(files[0]) NDV = dest.GetRasterBand(1).GetNoDataValue() # Loop over months and create monthly tiff files i = 0 for date in Dates: # Get Start and end DOY of the current month DOY_month_start = date.strftime('%j') DOY_month_end = DatesEnd[i].strftime('%j') # Search for the files that are between those DOYs year = date.year DOYs = DOY_Year[DOY_Year[:, 2] == year] DOYs_oneMonth = DOYs[np.logical_and( (DOYs[:, 1] + 8) >= int(DOY_month_start), DOYs[:, 1] <= int(DOY_month_end))] # Create empty arrays Monthly = np.zeros([size_Y, size_X]) Weight_tot = np.zeros([size_Y, size_X]) Data_one_month = np.ones([size_Y, size_X]) * np.nan # Loop over the files that are within the DOYs for EightDays in DOYs_oneMonth[:, 0]: # Calculate the amount of days in this month of each file Weight = np.ones([size_Y, size_X]) # For start of month if EightDays == DOYs_oneMonth[:, 0][0]: Weight = Weight * int(DOYs_oneMonth[:, 1][0] + 8 - int(DOY_month_start)) # For end of month elif EightDays == DOYs_oneMonth[:, 0][-1]: Weight = Weight * (int(DOY_month_end) - DOYs_oneMonth[:, 1][-1] + 1) # For the middle of the month else: Weight = Weight * 8 # Open the array of current file input_name = os.path.join(Dir_in, files[int(EightDays)]) Data = RC.Open_tiff_array(input_name) # Remove NDV Weight[Data == NDV] = 0 Data[Data == NDV] = np.nan # Multiply weight time data Data = Data * Weight # Calculate the total weight and data Weight_tot += Weight Monthly[~np.isnan(Data)] += Data[~np.isnan(Data)] # Go to next month i += 1 # Calculate the average Data_one_month[Weight_tot != 0.] = Monthly[ Weight_tot != 0.] / Weight_tot[Weight_tot != 0.] # Define output directory if Dir_out == None: Dir_out = Dir_in # Define output name output_name = os.path.join( Dir_out, files[int(EightDays)].replace('8-daily', 'monthly')) output_name = output_name[:-6] + '01.tif' # Save tiff file DC.Save_as_tiff(output_name, Data_one_month, geo_out, proj) return
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar): """ This scripts downloads ASCAT SWI data from the VITO server. The output files display the Surface Water Index. Keyword arguments: Dir -- 'C:/file/to/path/' Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' lonlim -- [ymin, ymax] latlim -- [xmin, xmax] """ # Check the latitude and longitude and otherwise reset lat and lon. if latlim[0] < -90 or latlim[1] > 90: print 'Latitude above 90N or below 90S is not possible.\ Value set to maximum' latlim[0] = np.max(latlim[0], -90) latlim[1] = np.min(latlim[1], 90) if lonlim[0] < -180 or lonlim[1] > 180: print 'Longitude must be between 180E and 180W.\ Now value is set to maximum' lonlim[0] = np.max(lonlim[0], -180) lonlim[1] = np.min(lonlim[1], 180) # Check Startdate and Enddate if not Startdate: Startdate = pd.Timestamp('2007-01-01') if not Enddate: Enddate = pd.Timestamp('2018-12-31') # Make a panda timestamp of the date try: Enddate = pd.Timestamp(Enddate) except: Enddate = Enddate # amount of Dates weekly Dates = pd.date_range(Startdate, Enddate, freq='D') # Create Waitbar if Waitbar == 1: import wa.Functions.Start.WaitbarConsole as WaitbarConsole total_amount = len(Dates) amount = 0 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # Define directory and create it if not exists output_folder = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily') if not os.path.exists(output_folder): os.makedirs(output_folder) output_folder_temp = os.path.join(Dir, 'SWI', 'ASCAT', 'Daily', 'Temp') if not os.path.exists(output_folder_temp): os.makedirs(output_folder_temp) # loop over dates for Date in Dates: # Define end filename End_filename = os.path.join( output_folder, 'SWI_ASCAT_V3_Percentage_daily_%d.%02d.%02d.tif' % (Date.year, Date.month, Date.day)) # Define IDs xID = 1800 + np.int16( np.array([np.ceil((lonlim[0]) * 10), np.floor((lonlim[1]) * 10)])) yID = np.int16( np.array([np.floor((-latlim[1]) * 10), np.ceil((-latlim[0]) * 10)])) + 900 # Download the data from FTP server if the file not exists if not os.path.exists(End_filename): try: data = Download_ASCAT_from_VITO(End_filename, output_folder_temp, Date, yID, xID) # make geotiff file geo = [lonlim[0], 0.1, 0, latlim[1], 0, -0.1] DC.Save_as_tiff(name=End_filename, data=data, geo=geo, projection="WGS84") except: print "Was not able to download file with date %s" % Date # Adjust waitbar if Waitbar == 1: amount += 1 WaitbarConsole.printWaitBar(amount, total_amount, prefix='Progress:', suffix='Complete', length=50) # remove the temporary folder shutil.rmtree(output_folder_temp)
def Calculate(Basin, P_Product, ET_Product, Inflow_Text_Files, Reservoirs_Lakes_Calculations, Startdate, Enddate, Simulation): ''' This functions consists of the following sections: 1. Set General Parameters 2. Download Data 3. Convert the RAW data to NETCDF files 4. Create Mask based on LU map 5. Calculate Runoff based on Budyko 6. Add inflow in Runoff 7. Calculate River flow 7.1 Route Runoff 7.2 Add Reservoirs 7.3 Add surface water withdrawals ''' # import General modules import os import gdal import numpy as np import pandas as pd import copy # import WA plus modules from wa.General import raster_conversions as RC from wa.General import data_conversions as DC import wa.Functions.Five as Five import wa.Functions.Start as Start ######################### 1. Set General Parameters ############################## # Get environmental variable for the Home folder WA_env_paths = os.environ["WA_HOME"].split(';') Dir_Home = WA_env_paths[0] # Create the Basin folder Dir_Basin = os.path.join(Dir_Home, Basin) if not os.path.exists(Dir_Basin): os.makedirs(Dir_Basin) # Get the boundaries of the basin based on the shapefile of the watershed # Boundaries, Shape_file_name_shp = Start.Boundaries.Determine(Basin) Boundaries, LU_dataset = Start.Boundaries.Determine_LU_Based(Basin) LU_data = RC.Open_tiff_array(LU_dataset) geo_out_LU, proj_LU, size_X_LU, size_Y_LU = RC.Open_array_info(LU_dataset) # Define resolution of SRTM Resolution = '15s' # Get the amount of months Amount_months = len(pd.date_range(Startdate, Enddate, freq='MS')) Amount_months_reservoirs = Amount_months + 1 # Startdate for moving window Budyko Startdate_2months_Timestamp = pd.Timestamp(Startdate) - pd.DateOffset( months=2) Startdate_2months = Startdate_2months_Timestamp.strftime('%Y-%m-%d') ############################# 2. Download Data ################################### # Download data Data_Path_P = Start.Download_Data.Precipitation( Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']], [Boundaries['Lonmin'], Boundaries['Lonmax']], Startdate_2months, Enddate, P_Product) Data_Path_ET = Start.Download_Data.Evapotranspiration( Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']], [Boundaries['Lonmin'], Boundaries['Lonmax']], Startdate_2months, Enddate, ET_Product) Data_Path_DEM = Start.Download_Data.DEM( Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']], [Boundaries['Lonmin'], Boundaries['Lonmax']], Resolution) if Resolution is not '3s': Data_Path_DEM = Start.Download_Data.DEM( Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']], [Boundaries['Lonmin'], Boundaries['Lonmax']], Resolution) Data_Path_DEM_Dir = Start.Download_Data.DEM_Dir( Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']], [Boundaries['Lonmin'], Boundaries['Lonmax']], Resolution) Data_Path_ETref = Start.Download_Data.ETreference( Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']], [Boundaries['Lonmin'], Boundaries['Lonmax']], Startdate_2months, Enddate) Data_Path_JRC_occurrence = Start.Download_Data.JRC_occurrence( Dir_Basin, [Boundaries['Latmin'], Boundaries['Latmax']], [Boundaries['Lonmin'], Boundaries['Lonmax']]) Data_Path_P_Monthly = os.path.join(Data_Path_P, 'Monthly') ###################### 3. Convert the RAW data to NETCDF files ############################## # The sequence of converting the data is: # DEM # DEM flow directions # Precipitation # Evapotranspiration # Reference Evapotranspiration #_____________________________________DEM__________________________________ # Get the data of DEM and save as nc, This dataset is also used as reference for others Example_dataset = os.path.join(Dir_Basin, Data_Path_DEM, 'DEM_HydroShed_m_%s.tif' % Resolution) DEMdest = gdal.Open(Example_dataset) Xsize_CR = int(DEMdest.RasterXSize) Ysize_CR = int(DEMdest.RasterYSize) DataCube_DEM_CR = DEMdest.GetRasterBand(1).ReadAsArray() Name_NC_DEM_CR = DC.Create_NC_name('DEM_CR', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_DEM_CR): DC.Save_as_NC(Name_NC_DEM_CR, DataCube_DEM_CR, 'DEM_CR', Example_dataset) DEMdest = None #___________________________________DEM Dir________________________________ # Get the data of flow direction and save as nc. Dir_dataset = os.path.join(Dir_Basin, Data_Path_DEM_Dir, 'DIR_HydroShed_-_%s.tif' % Resolution) DEMDirdest = gdal.Open(Dir_dataset) DataCube_DEM_Dir_CR = DEMDirdest.GetRasterBand(1).ReadAsArray() Name_NC_DEM_Dir_CR = DC.Create_NC_name('DEM_Dir_CR', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_DEM_Dir_CR): DC.Save_as_NC(Name_NC_DEM_Dir_CR, DataCube_DEM_Dir_CR, 'DEM_Dir_CR', Example_dataset) DEMDirdest = None del DataCube_DEM_Dir_CR #______________________________ Precipitation______________________________ # Define info for the nc files info = [ 'monthly', 'mm', ''.join([Startdate_2months[5:7], Startdate_2months[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] # Precipitation data Name_NC_Prec_CR = DC.Create_NC_name('Prec_CR', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_Prec_CR): # Get the data of Precipitation and save as nc DataCube_Prec_CR = RC.Get3Darray_time_series_monthly( Dir_Basin, Data_Path_P_Monthly, Startdate_2months, Enddate, Example_data=Example_dataset) DC.Save_as_NC(Name_NC_Prec_CR, DataCube_Prec_CR, 'Prec_CR', Example_dataset, Startdate_2months, Enddate, 'monthly', 0.01) del DataCube_Prec_CR #____________________________ Evapotranspiration___________________________ # Evapotranspiration data info = [ 'monthly', 'mm', ''.join([Startdate_2months[5:7], Startdate_2months[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_NC_ET_CR = DC.Create_NC_name('ET_CR', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_ET_CR): # Get the data of Evaporation and save as nc DataCube_ET_CR = RC.Get3Darray_time_series_monthly( Dir_Basin, Data_Path_ET, Startdate_2months, Enddate, Example_data=Example_dataset) DC.Save_as_NC(Name_NC_ET_CR, DataCube_ET_CR, 'ET_CR', Example_dataset, Startdate_2months, Enddate, 'monthly', 0.01) del DataCube_ET_CR #_______________________Reference Evapotranspiration_______________________ # Reference Evapotranspiration data Name_NC_ETref_CR = DC.Create_NC_name('ETref_CR', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_ETref_CR): # Get the data of Reference Evapotranspiration and save as nc DataCube_ETref_CR = RC.Get3Darray_time_series_monthly( Dir_Basin, Data_Path_ETref, Startdate_2months, Enddate, Example_data=Example_dataset) DC.Save_as_NC(Name_NC_ETref_CR, DataCube_ETref_CR, 'ETref_CR', Example_dataset, Startdate_2months, Enddate, 'monthly', 0.01) del DataCube_ETref_CR #_______________________fraction surface water _______________________ Name_NC_frac_sw_CR = DC.Create_NC_name('Fraction_SW_CR', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_frac_sw_CR): DataCube_frac_sw = np.ones_like(LU_data) * np.nan import wa.Functions.Start.Get_Dictionaries as GD # Get dictionaries and keys lulc = GD.get_sheet5_classes() lulc_dict = GD.get_sheet5_classes().keys() consumed_frac_dict = GD.sw_supply_fractions_sheet5() for key in lulc_dict: Numbers = lulc[key] for LU_nmbr in Numbers: Mask = np.zeros_like(LU_data) Mask[LU_data == LU_nmbr] = 1 DataCube_frac_sw[Mask == 1] = consumed_frac_dict[key] dest_frac_sw = DC.Save_as_MEM(DataCube_frac_sw, geo_out_LU, proj_LU) dest_frac_sw_CR = RC.reproject_dataset_example(dest_frac_sw, Example_dataset) DataCube_frac_sw_CR = dest_frac_sw_CR.ReadAsArray() DataCube_frac_sw_CR[DataCube_frac_sw_CR == 0] = np.nan DC.Save_as_NC(Name_NC_frac_sw_CR, DataCube_frac_sw_CR, 'Fraction_SW_CR', Example_dataset, Scaling_factor=0.01) del DataCube_frac_sw_CR del DataCube_DEM_CR ##################### 4. Create Mask based on LU map ########################### # Now a mask will be created to define the area of interest (pixels where there is a landuse defined) #_____________________________________LU___________________________________ destLU = RC.reproject_dataset_example(LU_dataset, Example_dataset, method=1) DataCube_LU_CR = destLU.GetRasterBand(1).ReadAsArray() Raster_Basin_CR = np.zeros([Ysize_CR, Xsize_CR]) Raster_Basin_CR[DataCube_LU_CR > 0] = 1 Name_NC_Basin_CR = DC.Create_NC_name('Basin_CR', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_Basin_CR): DC.Save_as_NC(Name_NC_Basin_CR, Raster_Basin_CR, 'Basin_CR', Example_dataset) #del Raster_Basin ''' Name_NC_Basin = DC.Create_NC_name('Basin_CR', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_Basin): Raster_Basin = RC.Vector_to_Raster(Dir_Basin, Shape_file_name_shp, Example_dataset) Raster_Basin = np.clip(Raster_Basin, 0, 1) DC.Save_as_NC(Name_NC_Basin, Raster_Basin, 'Basin_CR', Example_dataset) #del Raster_Basin ''' ###################### 5. Calculate Runoff based on Budyko ########################### # Define info for the nc files info = [ 'monthly', 'mm', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] # Define the output names of section 5 and 6 Name_NC_Runoff_CR = DC.Create_NC_name('Runoff_CR', Simulation, Dir_Basin, 5, info) Name_NC_Runoff_for_Routing_CR = Name_NC_Runoff_CR if not os.path.exists(Name_NC_Runoff_CR): # Calculate runoff based on Budyko DataCube_Runoff_CR = Five.Budyko.Calc_runoff(Name_NC_ETref_CR, Name_NC_Prec_CR) # Save the runoff as netcdf DC.Save_as_NC(Name_NC_Runoff_CR, DataCube_Runoff_CR, 'Runoff_CR', Example_dataset, Startdate, Enddate, 'monthly', 0.01) del DataCube_Runoff_CR ''' ###################### Calculate Runoff with P min ET ########################### Name_NC_Runoff_CR = DC.Create_NC_name('Runoff_CR', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_Runoff_CR): ET = RC.Open_nc_array(Name_NC_ET_CR) P = RC.Open_nc_array(Name_NC_Prec_CR) DataCube_Runoff_CR = P - ET DataCube_Runoff_CR[:,:,:][DataCube_Runoff_CR<=0.1] = 0 DataCube_Runoff_CR[:,:,:][np.isnan(DataCube_Runoff_CR)] = 0 DC.Save_as_NC(Name_NC_Runoff_CR, DataCube_Runoff_CR, 'Runoff_CR', Example_dataset, Startdate, Enddate, 'monthly') del DataCube_Runoff_CR ''' ############### 6. Add inflow in basin by using textfile ######################### # add inlets if there are textfiles defined if len(Inflow_Text_Files) > 0: # Create name of the Runoff with inlets Name_NC_Runoff_with_Inlets_CR = DC.Create_NC_name( 'Runoff_with_Inlets_CR', Simulation, Dir_Basin, 5, info) # Use this runoff name for the routing (it will overwrite the runoff without inlets) Name_NC_Runoff_for_Routing_CR = Name_NC_Runoff_with_Inlets_CR # Create the file if it not exists if not os.path.exists(Name_NC_Runoff_with_Inlets_CR): # Calculate the runoff that will be routed by including the inlets DataCube_Runoff_with_Inlets_CR = Five.Inlets.Add_Inlets( Name_NC_Runoff_CR, Inflow_Text_Files) # Save this runoff as netcdf DC.Save_as_NC(Name_NC_Runoff_with_Inlets_CR, DataCube_Runoff_with_Inlets_CR, 'Runoff_with_Inlets_CR', Example_dataset, Startdate, Enddate, 'monthly', 0.01) del DataCube_Runoff_with_Inlets_CR ######################### 7. Now the surface water is calculated ################# # Names for dicionaries and nc files # CR1 = Natural_flow with only green water # CR2 = Natural_flow with only green water and reservoirs # CR3 = Flow with green, blue and reservoirs ######################### 7.1 Apply Channel Routing ############################### # Create the name for the netcdf outputs for section 7.1 info = [ 'monthly', 'pixels', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_NC_Acc_Pixels_CR = DC.Create_NC_name('Acc_Pixels_CR', Simulation, Dir_Basin, 5) info = [ 'monthly', 'm3', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_NC_Discharge_CR1 = DC.Create_NC_name('Discharge_CR1', Simulation, Dir_Basin, 5, info) # If one of the outputs does not exists, run this part if not (os.path.exists(Name_NC_Acc_Pixels_CR) and os.path.exists(Name_NC_Discharge_CR1)): Accumulated_Pixels_CR, Discharge_CR1 = Five.Channel_Routing.Channel_Routing( Name_NC_DEM_Dir_CR, Name_NC_Runoff_for_Routing_CR, Name_NC_Basin_CR, Example_dataset, Degrees=1) # Save Results DC.Save_as_NC(Name_NC_Acc_Pixels_CR, Accumulated_Pixels_CR, 'Acc_Pixels_CR', Example_dataset) DC.Save_as_NC(Name_NC_Discharge_CR1, Discharge_CR1, 'Discharge_CR1', Example_dataset, Startdate, Enddate, 'monthly') ################# Calculate the natural river and river zones ################# Name_NC_Rivers_CR = DC.Create_NC_name('Rivers_CR', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_Rivers_CR): # Open routed discharge array Discharge_CR1 = RC.Open_nc_array(Name_NC_Discharge_CR1) Raster_Basin = RC.Open_nc_array(Name_NC_Basin_CR) # Calculate mean average over the period if len(np.shape(Discharge_CR1)) > 2: Routed_Discharge_Ave = np.nanmean(Discharge_CR1, axis=0) else: Routed_Discharge_Ave = Discharge_CR1 # Define the 2% highest pixels as rivers Rivers = np.zeros([ np.size(Routed_Discharge_Ave, 0), np.size(Routed_Discharge_Ave, 1) ]) Routed_Discharge_Ave[Raster_Basin != 1] = np.nan Routed_Discharge_Ave_number = np.nanpercentile(Routed_Discharge_Ave, 98) Rivers[ Routed_Discharge_Ave > Routed_Discharge_Ave_number] = 1 # if yearly average is larger than 5000km3/month that it is a river # Save the river file as netcdf file DC.Save_as_NC(Name_NC_Rivers_CR, Rivers, 'Rivers_CR', Example_dataset) ########################## Create river directories ########################### Name_py_River_dict_CR1 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'River_dict_CR1_simulation%d.npy' % (Simulation)) Name_py_DEM_dict_CR1 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'DEM_dict_CR1_simulation%d.npy' % (Simulation)) Name_py_Distance_dict_CR1 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'Distance_dict_CR1_simulation%d.npy' % (Simulation)) if not (os.path.exists(Name_py_River_dict_CR1) and os.path.exists(Name_py_DEM_dict_CR1) and os.path.exists(Name_py_Distance_dict_CR1)): # Get river and DEM dict River_dict_CR1, DEM_dict_CR1, Distance_dict_CR1 = Five.Create_Dict.Rivers_General( Name_NC_DEM_CR, Name_NC_DEM_Dir_CR, Name_NC_Acc_Pixels_CR, Name_NC_Rivers_CR, Example_dataset) np.save(Name_py_River_dict_CR1, River_dict_CR1) np.save(Name_py_DEM_dict_CR1, DEM_dict_CR1) np.save(Name_py_Distance_dict_CR1, Distance_dict_CR1) else: # Load River_dict_CR1 = np.load(Name_py_River_dict_CR1).item() DEM_dict_CR1 = np.load(Name_py_DEM_dict_CR1).item() Distance_dict_CR1 = np.load(Name_py_Distance_dict_CR1).item() Name_py_Discharge_dict_CR1 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'Discharge_dict_CR1_simulation%d.npy' % (Simulation)) if not os.path.exists(Name_py_Discharge_dict_CR1): # Get discharge dict Discharge_dict_CR1 = Five.Create_Dict.Discharge( Name_NC_Discharge_CR1, River_dict_CR1, Amount_months, Example_dataset) np.save(Name_py_Discharge_dict_CR1, Discharge_dict_CR1) else: # Load Discharge_dict_CR1 = np.load(Name_py_Discharge_dict_CR1).item() ###################### 7.2 Calculate surface water storage characteristics ###################### Name_py_Discharge_dict_CR2 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'Discharge_dict_CR2_simulation%d.npy' % (Simulation)) Name_py_River_dict_CR2 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'River_dict_CR2_simulation%d.npy' % (Simulation)) Name_py_DEM_dict_CR2 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'DEM_dict_CR2_simulation%d.npy' % (Simulation)) Name_py_Distance_dict_CR2 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'Distance_dict_CR2_simulation%d.npy' % (Simulation)) Name_py_Diff_Water_Volume = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'Diff_Water_Volume_CR2_simulation%d.npy' % (Simulation)) Name_py_Regions = os.path.join(Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'Regions_simulation%d.npy' % (Simulation)) if not (os.path.exists(Name_py_Discharge_dict_CR2) and os.path.exists(Name_py_River_dict_CR2) and os.path.exists(Name_py_DEM_dict_CR2) and os.path.exists(Name_py_Distance_dict_CR2)): # Copy dicts as starting adding reservoir Discharge_dict_CR2 = copy.deepcopy(Discharge_dict_CR1) River_dict_CR2 = copy.deepcopy(River_dict_CR1) DEM_dict_CR2 = copy.deepcopy(DEM_dict_CR1) Distance_dict_CR2 = copy.deepcopy(Distance_dict_CR1) if Reservoirs_Lakes_Calculations == 1: # define input tiffs for surface water calculations input_JRC = os.path.join(Dir_Basin, Data_Path_JRC_occurrence, 'JRC_Occurrence_percent.tif') DEM_dataset = os.path.join(Dir_Basin, Data_Path_DEM, 'DEM_HydroShed_m_3s.tif') sensitivity = 700 # 900 is less sensitive 1 is very sensitive Regions = Five.Reservoirs.Calc_Regions(Name_NC_Basin_CR, input_JRC, sensitivity, Boundaries) Diff_Water_Volume = np.zeros( [len(Regions), Amount_months_reservoirs - 1, 3]) reservoir = 0 for region in Regions: popt = Five.Reservoirs.Find_Area_Volume_Relation( region, input_JRC, DEM_dataset) Area_Reservoir_Values = Five.Reservoirs.GEE_calc_reservoir_area( region, Startdate, Enddate) Diff_Water_Volume[ reservoir, :, :] = Five.Reservoirs.Calc_Diff_Storage( Area_Reservoir_Values, popt) reservoir += 1 ################# 7.3 Add storage reservoirs and change outflows ################## Discharge_dict_CR2, River_dict_CR2, DEM_dict_CR2, Distance_dict_CR2 = Five.Reservoirs.Add_Reservoirs( Name_NC_Rivers_CR, Name_NC_Acc_Pixels_CR, Diff_Water_Volume, River_dict_CR2, Discharge_dict_CR2, DEM_dict_CR2, Distance_dict_CR2, Regions, Example_dataset) np.save(Name_py_Regions, Regions) np.save(Name_py_Diff_Water_Volume, Diff_Water_Volume) np.save(Name_py_Discharge_dict_CR2, Discharge_dict_CR2) np.save(Name_py_River_dict_CR2, River_dict_CR2) np.save(Name_py_DEM_dict_CR2, DEM_dict_CR2) np.save(Name_py_Distance_dict_CR2, Distance_dict_CR2) else: # Load Discharge_dict_CR2 = np.load(Name_py_Discharge_dict_CR2).item() River_dict_CR2 = np.load(Name_py_River_dict_CR2).item() DEM_dict_CR2 = np.load(Name_py_DEM_dict_CR2).item() Distance_dict_CR2 = np.load(Name_py_Distance_dict_CR2).item() ####################### 7.3 Add surface water withdrawals ############################# Name_py_Discharge_dict_CR3 = os.path.join( Dir_Basin, 'Simulations', 'Simulation_%d' % Simulation, 'Sheet_5', 'Discharge_dict_CR3_simulation%d.npy' % (Simulation)) if not os.path.exists(Name_py_Discharge_dict_CR3): Discharge_dict_CR3, DataCube_ETblue_m3 = Five.Irrigation.Add_irrigation( Discharge_dict_CR2, River_dict_CR2, Name_NC_Rivers_CR, Name_NC_ET_CR, Name_NC_ETref_CR, Name_NC_Prec_CR, Name_NC_Basin_CR, Name_NC_frac_sw_CR, Startdate, Enddate, Example_dataset) np.save(Name_py_Discharge_dict_CR3, Discharge_dict_CR3) # save ETblue as nc info = [ 'monthly', 'm3-month-1', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_NC_ETblue = DC.Create_NC_name('ETblue', Simulation, Dir_Basin, 5, info) DC.Save_as_NC(Name_NC_ETblue, DataCube_ETblue_m3, 'ETblue', Example_dataset, Startdate, Enddate, 'monthly') else: Discharge_dict_CR3 = np.load(Name_py_Discharge_dict_CR3).item() ################################# Plot graph ################################## # Draw graph Five.Channel_Routing.Graph_DEM_Distance_Discharge( Discharge_dict_CR3, Distance_dict_CR2, DEM_dict_CR2, River_dict_CR2, Startdate, Enddate, Example_dataset) ######################## Change data to fit the LU data ####################### # Discharge # Define info for the nc files info = [ 'monthly', 'm3-month-1', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_NC_Discharge = DC.Create_NC_name('Discharge', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_Discharge): # Get the data of Reference Evapotranspiration and save as nc DataCube_Discharge_CR = DC.Convert_dict_to_array( River_dict_CR2, Discharge_dict_CR3, Example_dataset) DC.Save_as_NC(Name_NC_Discharge, DataCube_Discharge_CR, 'Discharge', Example_dataset, Startdate, Enddate, 'monthly') del DataCube_Discharge_CR # DEM Name_NC_DEM = DC.Create_NC_name('DEM', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_DEM): # Get the data of Reference Evapotranspiration and save as nc DataCube_DEM_CR = RC.Open_nc_array(Name_NC_DEM_CR) DataCube_DEM = RC.resize_array_example(DataCube_DEM_CR, LU_data, method=1) DC.Save_as_NC(Name_NC_DEM, DataCube_DEM, 'DEM', LU_dataset) del DataCube_DEM # flow direction Name_NC_DEM_Dir = DC.Create_NC_name('DEM_Dir', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_DEM_Dir): # Get the data of Reference Evapotranspiration and save as nc DataCube_DEM_Dir_CR = RC.Open_nc_array(Name_NC_DEM_Dir_CR) DataCube_DEM_Dir = RC.resize_array_example(DataCube_DEM_Dir_CR, LU_data, method=1) DC.Save_as_NC(Name_NC_DEM_Dir, DataCube_DEM_Dir, 'DEM_Dir', LU_dataset) del DataCube_DEM_Dir # Precipitation # Define info for the nc files info = [ 'monthly', 'mm', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_NC_Prec = DC.Create_NC_name('Prec', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_Prec): # Get the data of Reference Evapotranspiration and save as nc DataCube_Prec = RC.Get3Darray_time_series_monthly( Dir_Basin, Data_Path_P_Monthly, Startdate, Enddate, LU_dataset) DC.Save_as_NC(Name_NC_Prec, DataCube_Prec, 'Prec', LU_dataset, Startdate, Enddate, 'monthly', 0.01) del DataCube_Prec # Evapotranspiration Name_NC_ET = DC.Create_NC_name('ET', Simulation, Dir_Basin, 5) if not os.path.exists(Name_NC_ET): # Get the data of Reference Evapotranspiration and save as nc DataCube_ET = RC.Get3Darray_time_series_monthly( Dir_Basin, Data_Path_ET, Startdate, Enddate, LU_dataset) DC.Save_as_NC(Name_NC_ET, DataCube_ET, 'ET', LU_dataset, Startdate, Enddate, 'monthly', 0.01) del DataCube_ET # Reference Evapotranspiration data Name_NC_ETref = DC.Create_NC_name('ETref', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_ETref): # Get the data of Reference Evapotranspiration and save as nc DataCube_ETref = RC.Get3Darray_time_series_monthly( Dir_Basin, Data_Path_ETref, Startdate, Enddate, LU_dataset) DC.Save_as_NC(Name_NC_ETref, DataCube_ETref, 'ETref', LU_dataset, Startdate, Enddate, 'monthly', 0.01) del DataCube_ETref # Rivers Name_NC_Rivers = DC.Create_NC_name('Rivers', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_Rivers): # Get the data of Reference Evapotranspiration and save as nc Rivers_CR = RC.Open_nc_array(Name_NC_Rivers_CR) DataCube_Rivers = RC.resize_array_example(Rivers_CR, LU_data) DC.Save_as_NC(Name_NC_Rivers, DataCube_Rivers, 'Rivers', LU_dataset) del DataCube_Rivers, Rivers_CR # Discharge # Define info for the nc files info = [ 'monthly', 'm3', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_NC_Routed_Discharge = DC.Create_NC_name('Routed_Discharge', Simulation, Dir_Basin, 5, info) if not os.path.exists(Name_NC_Routed_Discharge): # Get the data of Reference Evapotranspiration and save as nc Routed_Discharge_CR = RC.Open_nc_array(Name_NC_Discharge) DataCube_Routed_Discharge = RC.resize_array_example( Routed_Discharge_CR, LU_data) DC.Save_as_NC(Name_NC_Routed_Discharge, DataCube_Routed_Discharge, 'Routed_Discharge', LU_dataset, Startdate, Enddate, 'monthly') del DataCube_Routed_Discharge, Routed_Discharge_CR # Get raster information geo_out, proj, size_X, size_Y = RC.Open_array_info(Example_dataset) Rivers = RC.Open_nc_array(Name_NC_Rivers_CR) # Create ID Matrix y, x = np.indices((size_Y, size_X)) ID_Matrix = np.int32( np.ravel_multi_index(np.vstack((y.ravel(), x.ravel())), (size_Y, size_X), mode='clip').reshape(x.shape)) + 1 # Get tiff array time dimension: time_dimension = int(np.shape(Discharge_dict_CR3[0])[0]) # create an empty array Result = np.zeros([time_dimension, size_Y, size_X]) for river_part in range(0, len(River_dict_CR2)): for river_pixel in range(1, len(River_dict_CR2[river_part])): river_pixel_ID = River_dict_CR2[river_part][river_pixel] if len(np.argwhere(ID_Matrix == river_pixel_ID)) > 0: row, col = np.argwhere(ID_Matrix == river_pixel_ID)[0][:] Result[:, row, col] = Discharge_dict_CR3[river_part][:, river_pixel] print(river_part) Outflow = Discharge_dict_CR3[0][:, 1] for i in range(0, time_dimension): output_name = r'C:/testmap/rtest_%s.tif' % i Result_one = Result[i, :, :] DC.Save_as_tiff(output_name, Result_one, geo_out, "WGS84") import os # Get environmental variable for the Home folder WA_env_paths = os.environ["WA_HOME"].split(';') Dir_Home = WA_env_paths[0] # Create the Basin folder Dir_Basin = os.path.join(Dir_Home, Basin) info = [ 'monthly', 'm3-month-1', ''.join([Startdate[5:7], Startdate[0:4]]), ''.join([Enddate[5:7], Enddate[0:4]]) ] Name_Result = DC.Create_NC_name('DischargeEnd', Simulation, Dir_Basin, 5, info) Result[np.logical_and(Result == 0.0, Rivers == 0.0)] = np.nan DC.Save_as_NC(Name_Result, Result, 'DischargeEnd', Example_dataset, Startdate, Enddate, 'monthly') return ()
def CollectLANDSAF(SourceLANDSAF, Dir, Startdate, Enddate, latlim, lonlim): """ This function collects and clip LANDSAF data Keyword arguments: SourceLANDSAF -- 'C:/' path to the LANDSAF source data (The directory includes SIS and SID) Dir -- 'C:/' path to the WA map Startdate -- 'yyyy-mm-dd' Enddate -- 'yyyy-mm-dd' latlim -- [ymin, ymax] (values must be between -60 and 60) lonlim -- [xmin, xmax] (values must be between -180 and 180) """ # Make an array of the days of which the ET is taken Dates = pd.date_range(Startdate, Enddate, freq='D') # make directories SISdir = os.path.join(Dir, 'Landsaf_Clipped', 'SIS') if os.path.exists(SISdir) is False: os.makedirs(SISdir) SIDdir = os.path.join(Dir, 'Landsaf_Clipped', 'SID') if os.path.exists(SIDdir) is False: os.makedirs(SIDdir) ShortwaveBasin(SourceLANDSAF, Dir, latlim, lonlim, Dates=[Startdate, Enddate]) DEMmap_str = os.path.join(Dir, 'HydroSHED', 'DEM', 'DEM_HydroShed_m_3s.tif') geo_out, proj, size_X, size_Y = RC.Open_array_info(DEMmap_str) # Open DEM map demmap = RC.Open_tiff_array(DEMmap_str) demmap[demmap < 0] = 0 # make lat and lon arrays) dlat = geo_out[5] dlon = geo_out[1] lat = geo_out[3] + (np.arange(size_Y) + 0.5) * dlat lon = geo_out[0] + (np.arange(size_X) + 0.5) * dlon for date in Dates: # day of year day = date.dayofyear Horizontal, Sloping, sinb, sinb_hor, fi, slope, ID = SlopeInfluence( demmap, lat, lon, day) SIDname = os.path.join( SIDdir, 'SAF_SID_Daily_W-m2_' + date.strftime('%Y-%m-%d') + '.tif') SISname = os.path.join( SISdir, 'SAF_SIS_Daily_W-m2_' + date.strftime('%Y-%m-%d') + '.tif') #PREPARE SID MAPS SIDdest = RC.reproject_dataset_example(SIDname, DEMmap_str, method=3) SIDdata = SIDdest.GetRasterBand(1).ReadAsArray() #PREPARE SIS MAPS SISdest = RC.reproject_dataset_example(SISname, DEMmap_str, method=3) SISdata = SISdest.GetRasterBand(1).ReadAsArray() # Calculate ShortWave net Short_Wave_Net = SIDdata * (Sloping / Horizontal) + SISdata * 86400 / 1e6 # Calculate ShortWave Clear Short_Wave = Sloping Short_Wave_Clear = Short_Wave * (0.75 + demmap * 2 * 10**-5) # make directories PathClear = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Clear_Sky') if os.path.exists(PathClear) is False: os.makedirs(PathClear) PathNet = os.path.join(Dir, 'Landsaf_Clipped', 'Shortwave_Net') if os.path.exists(PathNet) is False: os.makedirs(PathNet) # name Shortwave Clear and Net nameFileNet = 'ShortWave_Net_Daily_W-m2_' + date.strftime( '%Y-%m-%d') + '.tif' nameNet = os.path.join(PathNet, nameFileNet) nameFileClear = 'ShortWave_Clear_Daily_W-m2_' + date.strftime( '%Y-%m-%d') + '.tif' nameClear = os.path.join(PathClear, nameFileClear) # Save net and clear short wave radiation DC.Save_as_tiff(nameNet, Short_Wave_Net, geo_out, proj) DC.Save_as_tiff(nameClear, Short_Wave_Clear, geo_out, proj) return
def RetrieveData_monthly(Date, args): """ This function retrieves MSWEP precipitation monthly data for a given date. Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Argument [path, url, TimeCase, xID, yID, lonlim, latlim, username, password] = args # Check whether the file already exist or the worldfile is downloaded BasinDir = path + '/P_MSWEP_mm-month_monthly_' + Date.strftime('%Y.%m.%d') + '.tif' # Define month and year of current month Y = Date.year M = Date.month # Check if the outputfile already excists if not os.path.isfile(BasinDir): # Reset the begin parameters for downloading downloaded = 0 N=0 # Create the time dimension zID = (Y - 1979) * 12 + (M - 1) # define total url url_MSWEP = url + '.ascii?precipitation[%s][%s:1:%s][%s:1:%s]' %(zID,yID[0],yID[1],xID[0],xID[1]) # if not downloaded try to download file while downloaded == 0: try: # open URL try: dataset = requests.get(url_MSWEP , allow_redirects=False,stream = True) except: from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) dataset = requests.get(url_MSWEP , allow_redirects=False,stream = True, verify = False) # download data (first save as text file) pathtext = os.path.join(path,'temp%s.txt' %str(zID)) z = open(pathtext,'w') z.write(dataset.content) z.close() # Open text file and remove header and footer data_start = np.genfromtxt(pathtext,dtype = float,skip_header = 1,skip_footer = 6,delimiter=',') data = data_start[1:,1:] # Set Nan value for values lower than -9999 data[data < -9998] = np.nan # Say that download was succesfull downloaded = 1 # If download was not succesfull except: data=[] # Try another time N = N + 1 # Stop trying after 10 times if N == 10: print 'Data from ' + Date.strftime('%Y-%m-%d') + ' is not available' downloaded = 1 # define geo lonlimMSWEP = xID[0] * 0.10 - 180 latlimMSWEP = yID[1] * 0.10 - 90 # Save to geotiff file geo = [lonlimMSWEP ,0.1,0,latlimMSWEP ,0,-0.1] DC.Save_as_tiff(name=BasinDir, data = data, geo=geo, projection="WGS84") # Delete data and text file del data os.remove(pathtext) return True
def RetrieveData(args): """ This function retrieves JRC data for a given date from the http://storage.googleapis.com/global-surface-water/downloads/ server. Keyword arguments: args -- A list of parameters defined in the DownloadData function. """ # Argument [output_folder, Names_to_download, lonlim, latlim] = args # Collect the data from the JRC webpage and returns the data and lat and long in meters of those tiles try: Collect_data(Names_to_download, output_folder) except: print "Was not able to download the file" # Clip the data to the users extend if len(Names_to_download) == 1: trash_folder = os.path.join(output_folder, "Trash") data_in = os.path.join(trash_folder, Names_to_download[0]) data_end, geo_end = RC.clip_data(data_in, latlim, lonlim) else: data_end = np.zeros([ int((latlim[1] - latlim[0]) / 0.00025), int((lonlim[1] - lonlim[0]) / 0.00025) ]) for Name_to_merge in Names_to_download: trash_folder = os.path.join(output_folder, "Trash") data_in = os.path.join(trash_folder, Name_to_merge) geo_out, proj, size_X, size_Y = RC.Open_array_info(data_in) lat_min_merge = np.maximum(latlim[0], geo_out[3] + size_Y * geo_out[5]) lat_max_merge = np.minimum(latlim[1], geo_out[3]) lon_min_merge = np.maximum(lonlim[0], geo_out[0]) lon_max_merge = np.minimum(lonlim[1], geo_out[0] + size_X * geo_out[1]) lonmerge = [lon_min_merge, lon_max_merge] latmerge = [lat_min_merge, lat_max_merge] data_one, geo_one = RC.clip_data(data_in, latmerge, lonmerge) Ystart = int((geo_one[3] - latlim[1]) / geo_one[5]) Yend = int(Ystart + np.shape(data_one)[0]) Xstart = int((geo_one[0] - lonlim[0]) / geo_one[1]) Xend = int(Xstart + np.shape(data_one)[1]) data_end[Ystart:Yend, Xstart:Xend] = data_one geo_end = tuple([lonlim[0], geo_one[1], 0, latlim[1], 0, geo_one[5]]) # Save results as Gtiff fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif') DC.Save_as_tiff(name=fileName_out, data=data_end, geo=geo_end, projection='WGS84') shutil.rmtree(trash_folder) return True