def PrepareInputs(Rasteri,InputFolder,FolderName): """ ================================================================ PrepareInputs(Raster,InputFolder,FolderName) ================================================================ this function prepare downloaded raster data to have the same align and nodatavalue from a GIS raster (DEM, flow accumulation, flow direction raster) and return a folder with the output rasters with a name "New_Rasters" Inputs: 1-Raster: [String] path to the spatial information source raster to get the spatial information (coordinate system, no of rows & columns) A_path should include the name of the raster and the extension like "data/dem.tif" 2-InputFolder: [String] path of the folder of the rasters you want to adjust their no of rows, columns and resolution (alignment) like raster A the folder should not have any other files except the rasters 3-FolderName: [String] name to create a folder to store resulted rasters Example: Ex1: dem_path="01GIS/inputs/4000/acc4000.tif" prec_in_path="02Precipitation/CHIRPS/Daily/" Inputs.PrepareInputs(dem_path,prec_in_path,"prec") Ex2: dem_path="01GIS/inputs/4000/acc4000.tif" outputpath="00inputs/meteodata/4000/" evap_in_path="03Weather_Data/evap/" Inputs.PrepareInputs(dem_path,evap_in_path,outputpath+"evap") """ # input data validation # data type assert type(FolderName)== str, "FolderName input should be string type" # create a new folder for new created alligned rasters in temp # check if you can create the folder try: os.makedirs(os.path.join(os.environ['TEMP'],"AllignedRasters")) except WindowsError : # if not able to create the folder delete the folder with the same name and create one empty shutil.rmtree(os.path.join(os.environ['TEMP']+"/AllignedRasters")) os.makedirs(os.path.join(os.environ['TEMP'],"AllignedRasters")) temp=os.environ['TEMP']+"/AllignedRasters/" # match alignment print("First alligned files will be created in a folder 'AllignedRasters' in the Temp folder in you environment variable") raster.MatchDataAlignment(Rasteri,InputFolder,temp) # create new folder in the current directory for alligned and nodatavalue matched cells try: os.makedirs(os.path.join(os.getcwd(),FolderName)) except WindowsError: assert False, "please The function is trying to create a folder with a name"+ str(FolderName) +"New_Rasters to complete the process if there is a folder with the same name please rename it to other name" # match nodata value print("second matching NoDataValue from the DEM raster too all raster will be created in the outputpath") raster.MatchDataNoValuecells(Rasteri,temp,FolderName+"/") # delete the processing folder from temp shutil.rmtree(temp)
def function(args): # argument a list of two components # first argument is the raster object [gdal object] A = args[0] # second argument is the path to save the resulted raster path = args[1] func = np.abs # first function B = Raster.MapAlgebra(A, func) Raster.SaveRaster(B, path)
def ExtractParametersBoundaries(Basin): """ ===================================================== ExtractParametersBoundaries(Basin) ===================================================== Parameters ---------- Basin : [Geodataframe] gepdataframe of catchment polygon, make sure that the geodataframe contains one row only, if not merge all the polygons in the shapefile first. Returns ------- UB : [list] list of the upper bound of the parameters. LB : [list] list of the lower bound of the parameters. the parameters are ["tt", "sfcf","cfmax","cwh","cfr","fc","beta", "lp","k0","k1","k2","uzl","perc", "maxbas"] """ ParametersPath = os.path.dirname(Hapi.__file__) ParametersPath = ParametersPath + "/Parameters" ParamList = ["tt", "sfcf","cfmax","cwh","cfr","fc","beta", #"rfcf","e_corr", "lp","k0","k1","k2","uzl","perc", "maxbas"] #,"c_flux" raster = rasterio.open(ParametersPath + "/max/" + ParamList[0] + "-Max.tif") Basin = Basin.to_crs(crs=raster.crs) # max values UB = list() for i in range(len(ParamList)): raster = rasterio.open(ParametersPath + "/max/" + ParamList[i] + "-Max.tif") array = raster.read(1) affine = raster.transform UB.append(zonal_stats(Basin, array, affine=affine, stats=['max'])[0]['max']) #stats=['min', 'max', 'mean', 'median', 'majority'] # min values LB = list() for i in range(len(ParamList)): raster = rasterio.open(ParametersPath + "/min/" + ParamList[i] + "-Min.tif") array = raster.read(1) affine = raster.transform LB.append(zonal_stats(Basin, array, affine=affine, stats=['min'])[0]['min']) # plot the given basin with the parameters raster # Plot DEM ax = show((raster, 1), with_bounds=True) Basin.plot(facecolor='None', edgecolor='blue', linewidth=2, ax=ax) # ax.set_xbound([Basin.bounds.loc[0,'minx']-10,Basin.bounds.loc[0,'maxx']+10]) # ax.set_ybound([Basin.bounds.loc[0,'miny']-1, Basin.bounds.loc[0,'maxy']+1]) return UB, LB
def CreateLumpedInputs(Path): """ ========================================================= CreateLumpedInputs(Path) ========================================================= CreateLumpedInputs method generate a lumped parameters from distributed parameters by taking the average Parameters ---------- Path : [str] path to folder that contains the parameter rasters. Returns ------- data : [array] array contains the average values of the distributed parameters. """ # data type assert type(Path) == str, "PrecPath input should be string type" # check wether the path exists or not assert os.path.exists(Path), Path + " you have provided does not exist" # check wether the folder has the rasters or not assert len(os.listdir(Path)) > 0, Path+" folder you have provided is empty" # read data data = raster.ReadRastersFolder(Path) data = data.mean(axis = 0) data = data.mean(0) return data
def OverlayMaps(self, Path, BaseMapF, ExcludedValue, OccupiedCellsOnly, SavePath): """ ================================================================== OverlayMaps(self, Path, BaseMapF, ExcludedValue, OccupiedCellsOnly, SavePath) ================================================================== OverlayMaps method reads all the maps in the folder given by Path input and overlay them with the basemap and for each value in the basemap it create a dictionary with the intersected values from all maps Inputs: 1-Path [String] a path to the folder includng the maps. 2-BaseMapF: [String] a path includng the name of the ASCII and extention like path="data/cropped.asc" 3-ExcludedValue: [Numeric] values you want to exclude from exteacted values 5-OccupiedCellsOnly: [Bool] if you want to count only cells that is not ExcludedValue. 6-SavePath: [String] a path to the folder to save a text file for each value in the base map including all the intersected values from other maps. Outputs: 1- ExtractedValues: [Dict] dictonary with a list of values in the basemap as keys and for each key a list of all the intersected values in the maps from the path 2- NonZeroCells: [dataframe] dataframe with the first column as the "file" name and the second column is the number of cells in each map """ self.DepthValues, NonZeroCells = Raster.OverlayMaps(Path, BaseMapF, self.DepthPrefix, ExcludedValue, self.Compressed,OccupiedCellsOnly) # NonZeroCells dataframe with the first column as the "file" name and the second column # is the number of cells in each map NonZeroCells['days'] = [int(i[len(self.DepthPrefix):-4]) for i in NonZeroCells['files'].tolist()] # get the numbe of inundated cells in the Event index data frame self.EventIndex['cells'] = 0 for i in range(len(NonZeroCells)): # get the location in the EventIndex dataframe try: loc = np.where(NonZeroCells.loc[i,'days'] == self.EventIndex.loc[:,"id"] )[0][0] except IndexError: # if it does not find the event in the eventindex table ignore continue # store number of cells self.EventIndex.loc[loc,'cells'] = NonZeroCells.loc[i,'cells'] # save depths of each sub-basin inundatedSubs = list(self.DepthValues.keys()) for i in range(len(inundatedSubs)): np.savetxt(SavePath +"/" + str(inundatedSubs[i]) + ".txt", self.DepthValues[inundatedSubs[i]],fmt="%4.2f")
def ExtractParameters(src,scenario, AsRaster=False, SaveTo=''): """ ===================================================== ExtractParameters(Basin) ===================================================== Parameters ---------- src : [Geodataframe] gepdataframe of catchment polygon, make sure that the geodataframe contains one row only, if not merge all the polygons in the shapefile first. Returns ------- Parameters : [list] list of the upper bound of the parameters. the parameters are ["tt", rfcf,"sfcf","cfmax","cwh","cfr","fc","beta",'etf' "lp","k0","k1","k2","uzl","perc", "maxbas",'K_muskingum', 'x_muskingum'] """ ParametersPath = os.path.dirname(Hapi.__file__) ParametersPath = ParametersPath + "/Parameters/" + scenario ParamList = ["tt", "rfcf", "sfcf","cfmax","cwh","cfr","fc","beta", "etf" "lp","k0","k1","k2","uzl","perc", "maxbas", "K_muskingum", "x_muskingum"] if not AsRaster: raster = rasterio.open(ParametersPath + "/" + ParamList[0] + ".tif") src = src.to_crs(crs=raster.crs) # max values Par = list() for i in range(len(ParamList)): raster = rasterio.open(ParametersPath + "/" + ParamList[i] + ".tif") array = raster.read(1) affine = raster.transform Par.append(zonal_stats(src, array, affine=affine, stats=['max'])[0]['max']) #stats=['min', 'max', 'mean', 'median', 'majority'] # plot the given basin with the parameters raster # Plot DEM ax = show((raster, 1), with_bounds=True) src.plot(facecolor='None', edgecolor='blue', linewidth=2, ax=ax) # ax.set_xbound([Basin.bounds.loc[0,'minx']-10,Basin.bounds.loc[0,'maxx']+10]) # ax.set_ybound([Basin.bounds.loc[0,'miny']-1, Basin.bounds.loc[0,'maxy']+1]) return Par else: Inputs.PrepareInputs(src,ParametersPath+ "/",SaveTo)
new_folder_path = "data/meteodata_prepared/new_evap/" # Raster.FolderCalculator(folder_path,new_folder_path,function) """ in order to run the model all inputs have to have the same number of rows and columns for this purpose MatchRasterAlignment function was made to resample, change the coordinate system of the second raster and give it the same alignment like a source raster (DEM raster) """ soil_path = "Data/GIS/soil/4000/soil_raster.tif" DEM = gdal.Open(dem_path) dem_A = DEM.ReadAsArray() soil = gdal.Open(soil_path) soil_A = soil.ReadAsArray() # align aligned_soil = Raster.MatchRasterAlignment(DEM, soil) # to check alignment of DEM raster compared to aligned_soil_A raster aligned_soil_A = aligned_soil.ReadAsArray() # nodatavalue is still different and some cells are no data value in the soil type raster but it is not in the dem raster # to match use Match MatchNoDataValue # match dst_Aligned_M = Raster.MatchNoDataValue(DEM, aligned_soil) dst_Aligned_M_A = dst_Aligned_M.ReadAsArray() # save the new raster Raster.SaveRaster(dst_Aligned_M, "Data/GIS/soil/4000/soil_type.tif") #Raster.SaveRaster(dst_Aligned_M,"00inputs/GIS/4000/soil_typeِِA.tif")
def ExtractParameters(src, scenario, AsRaster=False, SaveTo=''): """ ===================================================== ExtractParameters(Basin) ===================================================== ExtractParameters method extracts the parameter rasters at the location of the source raster, there are 12 set of parameters 10 sets of parameters (Beck et al., (2016)) and the max, min and average of all sets Beck, H. E., Dijk, A. I. J. M. van, Ad de Roo, Diego G. Miralles, T. R. M. & Jaap Schellekens, and L. A. B. (2016) Global-scale regionalization of hydrologic model parameters-Supporting materials 3599–3622. doi:10.1002/2015WR018247.Received Parameters ---------- src : [Geodataframe] gepdataframe of catchment polygon, make sure that the geodataframe contains one row only, if not merge all the polygons in the shapefile first. Returns ------- Parameters : [list] list of the upper bound of the parameters. scenario : [str] name of the parameter set, there are 12 sets of parameters ["1","2","3","4","5","6","7","8","9","10","avg","max","min"] the parameters are ["tt", rfcf,"sfcf","cfmax","cwh","cfr","fc","beta",'etf' "lp","k0","k1","k2","uzl","perc", "maxbas",'K_muskingum', 'x_muskingum'] """ ParametersPath = os.path.dirname(Hapi.__file__) ParametersPath = ParametersPath + "/Parameters/" + scenario ParamList = [ "01_tt", "02_rfcf", "03_sfcf", "04_cfmax", "05_cwh", "06_cfr", "07_fc", "08_beta", "09_etf", "10_lp", "11_k0", "12_k1", "13_k2", "14_uzl", "15_perc", "16_maxbas", "17_K_muskingum", "18_x_muskingum" ] if not AsRaster: raster = rasterio.open(ParametersPath + "/" + ParamList[0] + ".tif") src = src.to_crs(crs=raster.crs) # max values Par = list() for i in range(len(ParamList)): raster = rasterio.open(ParametersPath + "/" + ParamList[i] + ".tif") array = raster.read(1) affine = raster.transform Par.append( zonal_stats(src, array, affine=affine, stats=['max'])[0]['max'] ) #stats=['min', 'max', 'mean', 'median', 'majority'] # plot the given basin with the parameters raster # Plot DEM ax = show((raster, 1), with_bounds=True) src.plot(facecolor='None', edgecolor='blue', linewidth=2, ax=ax) # ax.set_xbound([Basin.bounds.loc[0,'minx']-10,Basin.bounds.loc[0,'maxx']+10]) # ax.set_ybound([Basin.bounds.loc[0,'miny']-1, Basin.bounds.loc[0,'maxy']+1]) return Par else: Inputs.PrepareInputs(src, ParametersPath + "/", SaveTo)
def ExtractParametersBoundaries(Basin): """ ===================================================== ExtractParametersBoundaries(Basin) ===================================================== Parameters ---------- Basin : [Geodataframe] gepdataframe of catchment polygon, make sure that the geodataframe contains one row only, if not merge all the polygons in the shapefile first. Returns ------- UB : [list] list of the upper bound of the parameters. LB : [list] list of the lower bound of the parameters. the parameters are ["tt", "sfcf","cfmax","cwh","cfr","fc","beta", "lp","k0","k1","k2","uzl","perc", "maxbas"] """ ParametersPath = os.path.dirname(Hapi.__file__) ParametersPath = ParametersPath + "/Parameters" ParamList = [ "01_tt", "02_rfcf", "03_sfcf", "04_cfmax", "05_cwh", "06_cfr", "07_fc", "08_beta", "09_etf", "10_lp", "11_k0", "12_k1", "13_k2", "14_uzl", "15_perc", "16_maxbas", "17_K_muskingum", "18_x_muskingum" ] raster = rasterio.open(ParametersPath + "/max/" + ParamList[0] + ".tif") Basin = Basin.to_crs(crs=raster.crs) # max values UB = list() for i in range(len(ParamList)): raster = rasterio.open(ParametersPath + "/max/" + ParamList[i] + ".tif") array = raster.read(1) affine = raster.transform UB.append( zonal_stats(Basin, array, affine=affine, stats=['max'])[0] ['max']) #stats=['min', 'max', 'mean', 'median', 'majority'] # min values LB = list() for i in range(len(ParamList)): raster = rasterio.open(ParametersPath + "/min/" + ParamList[i] + ".tif") array = raster.read(1) affine = raster.transform LB.append( zonal_stats(Basin, array, affine=affine, stats=['min'])[0]['min']) Par = pd.DataFrame(index=ParamList) Par['UB'] = UB Par['LB'] = LB # plot the given basin with the parameters raster ax = show((raster, 1), with_bounds=True) Basin.plot(facecolor='None', edgecolor='blue', linewidth=2, ax=ax) # ax.set_xbound([Basin.bounds.loc[0,'minx']-10,Basin.bounds.loc[0,'maxx']+10]) # ax.set_ybound([Basin.bounds.loc[0,'miny']-1, Basin.bounds.loc[0,'maxy']+1]) return Par
make sure to change the directory to the Examples folder in the repo """ from Hapi.raster import Raster import rasterio import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import make_axes_locatable import os ParentPath = "F:/Users/mofarrag/Documents/01Algorithms/HAPI/Examples/" #%% Netcdf file that contains only one layer FileName = ParentPath + "/data/GIS/MSWEP_1979010100.nc" SaveTo = ParentPath + "/data/GIS/" VarName = None Raster.NCtoTiff(FileName, SaveTo, Separator="_") #%plot src = rasterio.open(SaveTo + "MSWEP_1979010100.nc") fig = plt.figure(figsize=(12, 8)) im = plt.imshow(src.read(1) / 100.0, cmap="gist_rainbow") plt.title("Monthly mean sea level pressure") divider = make_axes_locatable(plt.gca()) cax = divider.append_axes("right", "5%", pad="3%") plt.colorbar(im, cax=cax) plt.tight_layout() plt.show() #%% Netcdf file that contains multiple layer FileName = ParentPath + "/data/GIS/precip.1979.nc" SaveTo = ParentPath + "/data/GIS/Save_prec_netcdf_multiple/"
"15_perc", "16_maxbas", "17_K_muskingum", "18_x_muskingum" ] SaveTo = Comp + "01Algorithms/HAPI/Hapi/Parameters/" # par = "UZL" for i in range(len(ParamList)): Path = list() for j in range(0, 10): if j < 9: folder = "0" + str(j + 1) else: folder = str(j + 1) Path.append(Comp + "01Algorithms/HAPI/Hapi/Parameters/" + folder + "/" + ParamList[i] + ".tif") parameters = R.ReadRastersFolder(Path, WithOrder=False) MaxValue = parameters.max(axis=2) MinValue = parameters.min(axis=2) MeanValue = parameters.mean(axis=2) # Path1 = path + "/" + ParamList[i] + "-1.tif" src = gdal.Open(Path[0]) Saveto1 = SaveTo + "/max/" + ParamList[i] + ".tif" Saveto2 = SaveTo + "/min/" + ParamList[i] + ".tif" Saveto3 = SaveTo + "/avg/" + ParamList[i] + ".tif" R.RasterLike(src, MaxValue, Saveto1, pixel_type=1) R.RasterLike(src, MinValue, Saveto2, pixel_type=1) R.RasterLike(src, MeanValue, Saveto3, pixel_type=1)
def Dist_HBV2(ConceptualModel, lakecell, q_lake, DEM, flow_acc, flow_acc_plan, sp_prec, sp_et, sp_temp, sp_pars, p2, init_st=None, ll_temp=None, q_0=None): """ original function """ n_steps = sp_prec.shape[ 2] + 1 # no of time steps =length of time series +1 # intiialise vector of nans to fill states dummy_states = np.empty([n_steps, 5]) # [sp,sm,uz,lz,wc] dummy_states[:] = np.nan # Get the mask mask, no_val = raster.get_mask(DEM) # shape of the fpl raster (rows, columns)-------------- rows are x and columns are y x_ext, y_ext = mask.shape # y_ext, x_ext = mask.shape # shape of the fpl raster (rows, columns)------------ should change rows are y and columns are x # Get deltas of pixel geo_trans = DEM.GetGeoTransform( ) # get the coordinates of the top left corner and cell size [x,dx,y,dy] dx = np.abs(geo_trans[1]) / 1000.0 # dx in Km dy = np.abs(geo_trans[-1]) / 1000.0 # dy in Km px_area = dx * dy # area of the cell # Enumerate the total number of pixels in the catchment tot_elem = np.sum( np.sum([ [1 for elem in mask_i if elem != no_val] for mask_i in mask ])) # get row by row and search [mask_i for mask_i in mask] # total pixel area px_tot_area = tot_elem * px_area # total area of pixels # Get number of non-value data st = [] # Spatially distributed states qlz = [] quz = [] #------------------------------------------------------------------------------ for x in range(x_ext): # no of rows st_i = [] q_lzi = [] q_uzi = [] # q_out_i = [] # run all cells in one row ---------------------------------------------------- for y in range(y_ext): # no of columns if mask[x, y] != no_val: # only for cells in the domain # Calculate the states per cell # TODO optimise for multiprocessing these loops # _, _st, _uzg, _lzg = ConceptualModel.simulate_new_model(avg_prec = sp_prec[x, y,:], _, _st, _uzg, _lzg = ConceptualModel.Simulate( prec=sp_prec[x, y, :], temp=sp_temp[x, y, :], et=sp_et[x, y, :], par=sp_pars[x, y, :], p2=p2, init_st=init_st, ll_temp=None, q_0=q_0, snow=0) #extra_out = True # append column after column in the same row ----------------- st_i.append(np.array(_st)) #calculate upper zone Q = K1*(LZ_int_1) q_lz_temp = np.array(sp_pars[x, y, 6]) * _lzg q_lzi.append(q_lz_temp) # calculate lower zone Q = k*(UZ_int_3)**(1+alpha) q_uz_temp = np.array(sp_pars[x, y, 5]) * (np.power( _uzg, (1.0 + sp_pars[x, y, 7]))) q_uzi.append(q_uz_temp) #print("total = "+str(fff)+"/"+str(tot_elem)+" cell, row= "+str(x+1)+" column= "+str(y+1) ) else: # if the cell is novalue------------------------------------- # Fill the empty cells with a nan vector st_i.append( dummy_states ) # fill all states(5 states) for all time steps = nan q_lzi.append( dummy_states[:, 0] ) # q lower zone =nan for all time steps = nan q_uzi.append( dummy_states[:, 0] ) # q upper zone =nan for all time steps = nan # store row by row-------- ---------------------------------------------------- #st.append(st_i) # state variables st.append(st_i) # state variables qlz.append(np.array(q_lzi)) # lower zone discharge mm/timestep quz.append( np.array(q_uzi)) # upper zone routed discharge mm/timestep #------------------------------------------------------------------------------ # convert to arrays st = np.array(st) qlz = np.array(qlz) quz = np.array(quz) #%% convert quz from mm/time step to m3/sec area_coef = p2[1] / px_tot_area quz = quz * px_area * area_coef / (p2[0] * 3.6) no_cells = list( set([ flow_acc_plan[i, j] for i in range(x_ext) for j in range(y_ext) if not np.isnan(flow_acc_plan[i, j]) ])) # no_cells=list(set([int(flow_acc_plan[i,j]) for i in range(x_ext) for j in range(y_ext) if flow_acc_plan[i,j] != no_val])) no_cells.sort() #%% routing lake discharge with DS cell k & x and adding to cell Q q_lake = routing.Muskingum_V(q_lake, q_lake[0], sp_pars[lakecell[0], lakecell[1], 10], sp_pars[lakecell[0], lakecell[1], 11], p2[0]) q_lake = np.append(q_lake, q_lake[-1]) # both lake & Quz are in m3/s #new quz[lakecell[0], lakecell[1], :] = quz[lakecell[0], lakecell[1], :] + q_lake #%% cells at the divider quz_routed = np.zeros_like(quz) * np.nan # for all cell with 0 flow acc put the quz for x in range(x_ext): # no of rows for y in range(y_ext): # no of columns if mask[x, y] != no_val and flow_acc_plan[x, y] == 0: quz_routed[x, y, :] = quz[x, y, :] #%% new for j in range(1, len(no_cells)): #2):# for x in range(x_ext): # no of rows for y in range(y_ext): # no of columns # check from total flow accumulation if mask[x, y] != no_val and flow_acc_plan[x, y] == no_cells[j]: # print(no_cells[j]) q_r = np.zeros(n_steps) for i in range(len(flow_acc[str(x) + "," + str(y)])): # no_cells[j] # bring the indexes of the us cell x_ind = flow_acc[str(x) + "," + str(y)][i][0] y_ind = flow_acc[str(x) + "," + str(y)][i][1] # sum the Q of the US cells (already routed for its cell) # route first with there own k & xthen sum q_r = q_r + routing.Muskingum_V( quz_routed[x_ind, y_ind, :], quz_routed[x_ind, y_ind, 0], sp_pars[x_ind, y_ind, 10], sp_pars[x_ind, y_ind, 11], p2[0]) # q=q_r # add the routed upstream flows to the current Quz in the cell quz_routed[x, y, :] = quz[x, y, :] + q_r #%% check if the max flow _acc is at the outlet # if tot_elem != np.nanmax(flow_acc_plan): # raise ("flow accumulation plan is not correct") # outlet is the cell that has the max flow_acc outlet = np.where(flow_acc_plan == np.nanmax( flow_acc_plan)) #np.nanmax(flow_acc_plan) outletx = outlet[0][0] outlety = outlet[1][0] #%% qlz = np.array([np.nanmean(qlz[:, :, i]) for i in range(n_steps) ]) # average of all cells (not routed mm/timestep) # convert Qlz to m3/sec qlz = qlz * p2[1] / (p2[0] * 3.6) # generation qout = qlz + quz_routed[outletx, outlety, :] return qout, st, quz_routed, qlz, quz
def Histogram(self, Day, ExcludeValue, OccupiedCellsOnly, Map = 1, filter1 = 0.2, filter2 = 15): """ ================================================================== Histogram(Day, ExcludeValue, OccupiedCellsOnly, Map = 1) ================================================================== Histogram method extract values fro the event MaxDepth map and plot the histogram th emethod check first if you already extracted the values before then plot the histogram Parameters ---------- 1-Day : [Integer] DESCRIPTION. 2-ExcludeValue : [Integer] DESCRIPTION. 3-OccupiedCellsOnly : TYPE DESCRIPTION. 4-Map : [integer], optional 1 for the max depth maps, 2 for the duration map, 3 for the return period maps. The default is 1. Returns ------- None. """ # check if the object has the attribute ExtractedValues if hasattr(self,'ExtractedValues'): # get the list of event that then object has their Extractedvalues if Day not in list(self.ExtractedValues.keys()): # depth map if Map == 1: Path = self.TwoDResultPath + self.DepthPrefix + str(Day) + ".zip" elif Map == 2: Path = self.TwoDResultPath + self.DurationPrefix + str(Day) + ".zip" else: Path = self.TwoDResultPath + self.ReturnPeriodPrefix + str(Day) + ".zip" ExtractedValues, NonZeroCells = Raster.ExtractValues(Path, ExcludeValue, self.Compressed, OccupiedCellsOnly) self.ExtractedValues[Day] = ExtractedValues ExtractedValues = self.ExtractedValues[Day] # filter values ExtractedValues = [j for j in ExtractedValues if j > filter1] ExtractedValues = [j for j in ExtractedValues if j < filter2] #plot # fig, ax1 = plt.subplots(figsize=(10,8)) # ax1.hist(ExtractedValues, bins=15, alpha = 0.4) #width = 0.2, n, bins , patches = plt.hist(x= ExtractedValues, bins=15, color="#0504aa" , alpha=0.7, rwidth=0.85) plt.grid(axis='y', alpha=0.75) plt.xlabel('Value',fontsize=15) plt.ylabel('Frequency',fontsize=15) plt.xticks(fontsize=15) plt.yticks(fontsize=15) plt.ylabel('Frequency',fontsize=15) plt.tight_layout() # plt.title('Normal Distribution Histogram matplotlib',fontsize=15) plt.show() return n, bins , patches
def DeleteBasins(basins, pathout): """ =========================================================== DeleteBasins(basins,pathout) =========================================================== this function deletes all the basins in a basin raster created when delineating a catchment and leave only the first basin which is the biggest basin in the raster Inputs: ---------- 1- basins: [gdal.dataset] raster you create during delineation of a catchment values of its cells are the number of the basin it belongs to 2- pathout: [String] path you want to save the resulted raster to it should include the extension ".tif" Outputs: ---------- 1- raster with only one basin (the basin that its name is 1 ) Example: ---------- basins=gdal.Open("Data/basins.tif") pathout="mask.tif" DeleteBasins(basins,pathout) """ # input data validation # data type assert type(pathout) == str, "A_path input should be string type" assert type( basins ) == gdal.Dataset, "basins raster should be read using gdal (gdal dataset please read it using gdal library) " # input values # check wether the user wrote the extension of the raster or not ext = pathout[-4:] assert ext == ".tif", "please add the extension at the end of the path input" # get number of rows rows = basins.RasterYSize # get number of columns cols = basins.RasterXSize # array basins_A = basins.ReadAsArray() # no data value no_val = np.float32(basins.GetRasterBand(1).GetNoDataValue()) # get number of basins and there names basins_val = list( set([ int(basins_A[i, j]) for i in range(rows) for j in range(cols) if basins_A[i, j] != no_val ])) # keep the first basin and delete the others by filling their cells by nodata value for i in range(rows): for j in range(cols): if basins_A[i, j] != no_val and basins_A[i, j] != basins_val[0]: basins_A[i, j] = no_val raster.RasterLike(basins, basins_A, pathout)
""" # import os import numpy as np from Hapi.raster import Raster as R Path = "data/GIS/ZonalStatistics/" SavePath = Path BaseMapF = Path + "Polygons.tif" ExcludedValue = 0 Compressed = True OccupiedCellsOnly = False # one map ExtractedValues, NonZeroCells = R.OverlayMap(Path + "data/Map1.zip", BaseMapF, ExcludedValue, Compressed, OccupiedCellsOnly) MapPrefix = "Map" # several maps ExtractedValues, NonZeroCells = R.OverlayMaps(Path + "data", BaseMapF, MapPrefix, ExcludedValue, Compressed, OccupiedCellsOnly) # save extracted values in different files Polygons = list(ExtractedValues.keys()) for i in range(len(Polygons)): np.savetxt(SavePath + "/" + str(Polygons[i]) + ".txt", ExtractedValues[Polygons[i]], fmt="%4.2f")
# -*- coding: utf-8 -*- """ Created on Sat Mar 27 19:09:20 2021 @author: mofarrag Make sure the working directory is set to the examples folder in the Hapi repo" currunt_work_directory = Hapi/Example """ from Hapi.raster import Raster import gdal dem_path = "Data/GIS/Hapi_GIS_Data/acc4000.tif" SaveTo = "data/parameters/" #%% 'craeate a raster typicall to the DEM and fill it with 1' K = 1 src = gdal.Open(dem_path) Raster.RasterFill(src, K, SaveTo + '11_K_muskingum.tif') #%% X = 0.2 Raster.RasterFill(src, X, SaveTo + '12_X_muskingum.tif')
def DownloadData(self, Var, Waitbar): """ This function downloads ECMWF six-hourly, daily or monthly data Keyword arguments: """ # Load factors / unit / type of variables / accounts VarInfo = Variables(self.Time) Varname_dir = VarInfo.file_name[Var] # Create Out directory out_dir = os.path.join(self.Path, self.Time, Varname_dir) if not os.path.exists(out_dir): os.makedirs(out_dir) DownloadType = VarInfo.DownloadType[Var] if DownloadType == 1: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'sfc' string8 = 'an' if DownloadType == 2: string1 = 'oper' string4 = "12" string6 = "00:00:00/12:00:00" string2 = 'sfc' string8 = 'fc' if DownloadType == 3: string1 = 'oper' string4 = "0" string6 = "00:00:00/06:00:00/12:00:00/18:00:00" string2 = 'pl' string8 = 'an' parameter_number = VarInfo.number_para[Var] string3 = '%03d.128' %(parameter_number) string5 = '0.125/0.125' string9 = 'ei' string10 = '%s/%s/%s/%s' %(self.latlim_corr[1], self.lonlim_corr[0], self.latlim_corr[0], self.lonlim_corr[1]) #N, W, S, E # Download data by using the ECMWF API print('Use API ECMWF to collect the data, please wait') RemoteSensing.API(self.Path, DownloadType, string1, string2, string3, string4, string5, string6, self.string7, string8, string9, string10) # Open the downloaded data NC_filename = os.path.join(self.Path,'data_interim.nc') fh = Dataset(NC_filename, mode='r') # Get the NC variable parameter parameter_var = VarInfo.var_name[Var] Var_unit = VarInfo.units[Var] factors_add = VarInfo.factors_add[Var] factors_mul = VarInfo.factors_mul[Var] # Open the NC data Data = fh.variables[parameter_var][:] Data_time = fh.variables['time'][:] lons = fh.variables['longitude'][:] lats = fh.variables['latitude'][:] # Define the georeference information Geo_four = np.nanmax(lats) Geo_one = np.nanmin(lons) Geo_out = tuple([Geo_one, 0.125, 0.0, Geo_four, 0.0, -0.125]) # Create Waitbar if Waitbar == 1: total_amount = len(self.Dates) amount = 0 weirdFn.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) for date in self.Dates: # Define the year, month and day year = date.year month = date.month day = date.day # Hours since 1900-01-01 start = dt.datetime(year=1900, month=1, day=1) end = dt.datetime(year, month, day) diff = end - start hours_from_start_begin = diff.total_seconds()/60/60 Date_good = np.zeros(len(Data_time)) if self.Time == 'daily': days_later = 1 if self.Time == 'monthly': days_later = calendar.monthrange(year,month)[1] Date_good[np.logical_and(Data_time>=hours_from_start_begin, Data_time<(hours_from_start_begin + 24 * days_later))] = 1 Data_one = np.zeros([int(np.sum(Date_good)),int(np.size(Data,1)),int(np.size(Data,2))]) Data_one = Data[np.int_(Date_good) == 1, :, :] # Calculate the average temperature in celcius degrees Data_end = factors_mul * np.nanmean(Data_one,0) + factors_add if VarInfo.types[Var] == 'flux': Data_end = Data_end * days_later VarOutputname = VarInfo.file_name[Var] # Define the out name name_out = os.path.join(out_dir, "%s_ECMWF_ERA-Interim_%s_%s_%d.%02d.%02d.tif" %(VarOutputname, Var_unit, self.Time, year,month,day)) # Create Tiff files # Raster.Save_as_tiff(name_out, Data_end, Geo_out, "WGS84") Raster.CreateRaster(Path=name_out, data=Data_end, geo=Geo_out, EPSG="WGS84") if Waitbar == 1: amount = amount + 1 weirdFn.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50) fh.close() return()
#%% ### Read the Input rasters # the source raster is of the ASCII format src = gdal.Open(RasterAPath) src_Array = src.ReadAsArray() print("Shape of source raster = " + str(src_Array.shape)) # read destination array dst = gdal.Open(RasterBPath) Dst_Array = dst.ReadAsArray() print("Shape of distnation raster Before matching = " + str(Dst_Array.shape)) ### Match the alignment of both rasters NewRasterB = Raster.MatchRasterAlignment(src, dst) NewRasterB_array = NewRasterB.ReadAsArray() print("Shape of distnation raster after matching = " + str(NewRasterB_array.shape)) message = "Error the shape of the result raster does not match the source raster" assert NewRasterB_array.shape[0] == src_Array.shape[ 0] and NewRasterB_array.shape[1] == src_Array.shape[1], message ### Match the NODataValue NewRasterB_ND = Raster.MatchNoDataValue(src, NewRasterB) NoDataValue = NewRasterB_ND.GetRasterBand(1).GetNoDataValue()
def RetrieveData(Date, args): """ This function retrieves CHIRPS data for a given date from the ftp://chg-ftpout.geog.ucsb.edu server. https://data.chc.ucsb.edu/ Keyword arguments: Date -- 'yyyy-mm-dd' args -- A list of parameters defined in the DownloadData function. """ # Argument [output_folder, TimeCase, xID, yID, lonlim, latlim] = args # open ftp server # ftp = FTP("chg-ftpout.geog.ucsb.edu", "", "") ftp = FTP("data.chc.ucsb.edu") ftp.login() # Define FTP path to directory if TimeCase == 'daily': pathFTP = 'pub/org/chg/products/CHIRPS-2.0/global_daily/tifs/p05/%s/' %Date.strftime('%Y') elif TimeCase == 'monthly': pathFTP = 'pub/org/chg/products/CHIRPS-2.0/global_monthly/tifs/' else: raise KeyError("The input time interval is not supported") # find the document name in this directory ftp.cwd(pathFTP) listing = [] # read all the file names in the directory ftp.retrlines("LIST", listing.append) # create all the input name (filename) and output (outfilename, filetif, DiFileEnd) names if TimeCase == 'daily': filename = 'chirps-v2.0.%s.%02s.%02s.tif.gz' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d')) outfilename = os.path.join(output_folder,'chirps-v2.0.%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) DirFileEnd = os.path.join(output_folder,'P_CHIRPS.v2.0_mm-day-1_daily_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) elif TimeCase == 'monthly': filename = 'chirps-v2.0.%s.%02s.tif.gz' %(Date.strftime('%Y'), Date.strftime('%m')) outfilename = os.path.join(output_folder,'chirps-v2.0.%s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'))) DirFileEnd = os.path.join(output_folder,'P_CHIRPS.v2.0_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d'))) else: raise KeyError("The input time interval is not supported") # download the global rainfall file try: local_filename = os.path.join(output_folder, filename) lf = open(local_filename, "wb") ftp.retrbinary("RETR " + filename, lf.write, 8192) lf.close() # unzip the file zip_filename = os.path.join(output_folder, filename) Raster.ExtractFromGZ(zip_filename, outfilename, delete=True) # open tiff file dataset,NoDataValue = Raster.GetRasterData(outfilename) # clip dataset to the given extent data = dataset[yID[0]:yID[1], xID[0]:xID[1]] # replace -ve values with -9999 data[data < 0] = -9999 # save dataset as geotiff file geo = [lonlim[0], 0.05, 0, latlim[1], 0, -0.05] Raster.CreateRaster(Path=DirFileEnd, data=data, geo=geo, EPSG="WGS84",NoDataValue = NoDataValue) # delete old tif file os.remove(outfilename) except: print("file not exists") return True
PlotNumbers=False, TicksSpacing=1, Interval=10, Gauges=False, cmap='inferno', Textloc=[0.6, 0.8], Gaugecolor='red', ColorScale=2, IDcolor='blue', IDsize=25, gamma=0.08) #%% Path = SaveTo + "anim.mov" Jiboa.SaveAnimation(VideoFormat="mov", Path=Path, SaveFrames=3) #%% store the result into rasters # create list of names src = gdal.Open(FlowAccPath) # index=pd.date_range(Jiboa.StartDate,Jiboa.EndDate,freq="1H") resultspath = "results/upper_zone_discharge/4000/" names = [resultspath + str(i)[:-6] for i in Jiboa.Index] names = [i.replace("-", "_") for i in names] names = [i.replace(" ", "_") for i in names] names = [i + ".tif" for i in names] """ to save the upper zone discharge distributerd discharge in a raster forms uncomment the next line """ Raster.RastersLike(src, q_uz_routed[:, :, :-1], names)
# -*- coding: utf-8 -*- """ Created on Fri Mar 26 21:44:12 2021 @author: mofarrag """ import os Comp = "F:/Users/mofarrag/" os.chdir(Comp + "/Coello/HAPI/Data") from osgeo import gdal # from gdalconst import GA_ReadOnly import osr from osgeo import gdalconst from Hapi.raster import Raster SourceRasterPath = "00inputs/GIS/4000/acc4000.tif" RasterTobeClippedPath = Comp + "/Documents/01Algorithms/HAPI/Hapi/Parameters/01/Par_BETA.tif" output = 'F:/Users/mofarrag/coello/Hapi/Data/output.tif' #%% Raster.ClipRasterWithRaster(RasterTobeClippedPath, SourceRasterPath, output, Save=True)