예제 #1
0
def PrepareInputs(Raster,InputFolder,FolderName):
    """
    ================================================================
        PrepareInputs(Raster,InputFolder,FolderName)
    ================================================================
    this function prepare downloaded raster data to have the same align and 
    nodatavalue from a GIS raster (DEM, flow accumulation, flow direction raster)
    and return a folder with the output rasters with a name "New_Rasters"
    
    Inputs:
        1-Raster:
            [String] path to the spatial information source raster to get the spatial information 
            (coordinate system, no of rows & columns) A_path should include the name of the raster 
            and the extension like "data/dem.tif"
        2-InputFolder:
            [String] path of the folder of the rasters you want to adjust their 
            no of rows, columns and resolution (alignment) like raster A 
            the folder should not have any other files except the rasters
        3-FolderName:
            [String] name to create a folder to store resulted rasters
    Example:
        Ex1:
            dem_path="01GIS/inputs/4000/acc4000.tif"
            prec_in_path="02Precipitation/CHIRPS/Daily/"
            Inputs.PrepareInputs(dem_path,prec_in_path,"prec")
        Ex2:
            dem_path="01GIS/inputs/4000/acc4000.tif"
            outputpath="00inputs/meteodata/4000/"
            evap_in_path="03Weather_Data/evap/"
            Inputs.PrepareInputs(dem_path,evap_in_path,outputpath+"evap")
    """
    # input data validation
    # data type
    assert type(FolderName)== str, "FolderName input should be string type"
    # create a new folder for new created alligned rasters in temp
    # check if you can create the folder 
    try:
        os.makedirs(os.path.join(os.environ['TEMP'],"AllignedRasters"))
    except WindowsError : 
        # if not able to create the folder delete the folder with the same name and create one empty
        shutil.rmtree(os.path.join(os.environ['TEMP']+"/AllignedRasters"))
        os.makedirs(os.path.join(os.environ['TEMP'],"AllignedRasters"))
        
    temp=os.environ['TEMP']+"/AllignedRasters/"
    
    # match alignment 
    Raster.MatchDataAlignment(Raster,InputFolder,temp)
    # create new folder in the current directory for alligned and nodatavalue matched cells
    try:
        os.makedirs(os.path.join(os.getcwd(),FolderName))
    except WindowsError:
        print("please function is trying to create a folder with a name New_Rasters to complete the process if there is a folder with the same name please rename it to other name")    
    # match nodata value 
    Raster.MatchDataNoValuecells(Raster,temp,FolderName+"/")
    # delete the processing folder from temp
    shutil.rmtree(temp)
예제 #2
0
def DeleteBasins(basins, pathout):
    """
    ===========================================================
         DeleteBasins(basins,pathout)
    ===========================================================
    this function deletes all the basins in a basin raster created when delineating
    a catchment and leave only the first basin which is the biggest basin in the raster
    
    Inputs:
    ----------
        1- basins:
            [gdal.dataset] raster you create during delineation of a catchment 
            values of its cells are the number of the basin it belongs to
        2- pathout:
            [String] path you want to save the resulted raster to it should include
            the extension ".tif"
    Outputs:
    ----------
        1- raster with only one basin (the basin that its name is 1 )
    
    Example:
    ----------
        basins=gdal.Open("Data/basins.tif")    
        pathout="mask.tif"
        DeleteBasins(basins,pathout)
    """
    # input data validation
    # data type
    assert type(pathout) == str, "A_path input should be string type"
    assert type(
        basins
    ) == gdal.Dataset, "basins raster should be read using gdal (gdal dataset please read it using gdal library) "

    # input values
    # check wether the user wrote the extension of the raster or not
    ext = pathout[-4:]
    assert ext == ".tif", "please add the extension at the end of the path input"

    # get number of rows
    rows = basins.RasterYSize
    # get number of columns
    cols = basins.RasterXSize
    # array
    basins_A = basins.ReadAsArray()
    # no data value
    no_val = np.float32(basins.GetRasterBand(1).GetNoDataValue())
    # get number of basins and there names
    basins_val = list(
        set([
            int(basins_A[i, j]) for i in range(rows) for j in range(cols)
            if basins_A[i, j] != no_val
        ]))

    # keep the first basin and delete the others by filling their cells by nodata value
    for i in range(rows):
        for j in range(cols):
            if basins_A[i, j] != no_val and basins_A[i, j] != basins_val[0]:
                basins_A[i, j] = no_val

    Raster.RasterLike(basins, basins_A, pathout)
예제 #3
0
Sim.Q.plot(color=[(0,0.3,0.7)],linewidth=2.5,label="Observed data", zorder = 10)
ax1=lakeCalib['Q'].plot(color='#DC143C',linewidth=2.8,label='Simulated Calibration data')
ax1.annotate("Model performance" ,xy=('2012-12-01 00:00:00',20),fontsize=15)
ax1.annotate("RMSE = " + str(round(ModelMetrics['CalibRMSE'],3)),xy=('2012-12-01 00:00:00',20-1.5),fontsize=15)
ax1.annotate("NSE = " + str(round(ModelMetrics['CalibNSEHf'],2)),xy=('2012-12-01 00:00:00',20-3),fontsize=15)
plt.legend()
#ax1.annotate("RMSELF = " + str(round(committee['c_rmself'],3)),xy=('2013-01-01 00:00:00',max(calib['Q'])-3),fontsize=15)

#ax2=single_valid['Q'].plot(color='orange',linewidth=2.8,label='Simulated Validation')
#ax2.annotate("Model performance" ,xy=('2014-01-01 00:00:00',20),fontsize=15)
#ax2.annotate("RMSE = " +str(round(single['v_rmse'],3)),xy=('2014-01-01 00:00:00',20-1.5),fontsize=15)
#ax1.annotate("NSE = " + str(round(single['v_nsehf'],2)),xy=('2014-01-01 00:00:00',20-3),fontsize=15)
#ax2.annotate("RMSELF = " +str(round(committee['v_rmself'],3)),xy=('2014-12-01 00:00:00',max(calib['Q'])-3),fontsize=15)
#%% store the result into rasters
# create list of names
src=gdal.Open(FlowAccPath)

index=pd.date_range(start,calib_end,freq="1H")

resultspath="results/upper_zone_discharge/4000/"
names=[resultspath+str(i)[:-6] for i in index]
names=[i.replace("-","_") for i in names]
names=[i.replace(" ","_") for i in names]
names=[i+".tif" for i in names]

"""
to save the upper zone discharge distributerd discharge in a raster forms
uncomment the next line
"""
Raster.RastersLike(src,q_uz_routed[:,:,:-1],names)
예제 #4
0
def RunFW1withLake(ConceptualModel,
                   Paths,
                   ParPath,
                   p2,
                   init_st,
                   snow,
                   lakeCalibArray,
                   StageDischargeCurve,
                   LakeParameters,
                   lakecell,
                   Lake_init_st,
                   LumpedPar=True):
    """
    =======================================================================
        RunDistwithLake(PrecPath, Evap_Path, TempPath, DemPath, FlowAccPath, FlowDPath, ParPath, p2)
    =======================================================================
    this function runs the conceptual distributed hydrological model
    
    Inputs:
    ----------
        1-Paths:
            1-PrecPath:
                [String] path to the Folder contains precipitation rasters
            2-Evap_Path:
                [String] path to the Folder contains Evapotranspiration rasters
            3-TempPath:
                [String] path to the Folder contains Temperature rasters
            4-FlowAccPath:
                [String] path to the Flow Accumulation raster of the catchment (it should
                include the raster name and extension)
            5-FlowDPath:
                [String] path to the Flow Direction raster of the catchment (it should
                include the raster name and extension)
        7-ParPath:
            [String] path to the Folder contains parameters rasters of the catchment 
        8-p2:
            [List] list of unoptimized parameters
            p2[0] = tfac, 1 for hourly, 0.25 for 15 min time step and 24 for daily time step
            p2[1] = catchment area in km2
    
    Outputs:
    ----------
        1- st:
            [4D array] state variables
        2- q_out:
            [1D array] calculated Discharge at the outlet of the catchment
        3- q_uz:
            [3D array] Distributed discharge for each cell
    
    Example:
    ----------
        PrecPath = prec_path="meteodata/4000/calib/prec"
        Evap_Path = evap_path="meteodata/4000/calib/evap"
        TempPath = temp_path="meteodata/4000/calib/temp"
        DemPath = "GIS/4000/dem4000.tif"
        FlowAccPath = "GIS/4000/acc4000.tif"
        FlowDPath = "GIS/4000/fd4000.tif"
        ParPath = "meteodata/4000/parameters"
        p2=[1, 227.31]
        st, q_out, q_uz_routed = RunModel(PrecPath,Evap_Path,TempPath,DemPath,
                                          FlowAccPath,FlowDPath,ParPath,p2)
    """
    # input data validation
    assert len(Paths) == 4, "Paths should include 5 folder pathes " + str(
        len(Paths)) + " paths are only provided"

    PrecPath = Paths[0]
    Evap_Path = Paths[1]
    TempPath = Paths[2]
    FlowPathLengthPath = Paths[3]

    # data type
    assert type(PrecPath) == str, "PrecPath input should be string type"
    assert type(Evap_Path) == str, "Evap_Path input should be string type"
    assert type(TempPath) == str, "TempPath input should be string type"
    assert type(FlowPathLengthPath
                ) == str, "Flow Path Length Path input should be string type"
    assert type(ParPath) == str, "ParPath input should be string type"

    # input values
    FPL_ext = FlowPathLengthPath[-4:]
    assert FPL_ext == ".tif", "please add the extension at the end of the Flow accumulation raster path input"
    # check wether the path exists or not
    assert os.path.exists(
        PrecPath), PrecPath + " you have provided does not exist"
    assert os.path.exists(
        Evap_Path), Evap_Path + " path you have provided does not exist"
    assert os.path.exists(
        TempPath), TempPath + " path you have provided does not exist"
    assert os.path.exists(
        FlowPathLengthPath
    ), FlowPathLengthPath + " you have provided does not exist"
    # check wether the folder has the rasters or not
    assert len(os.listdir(
        PrecPath)) > 0, PrecPath + " folder you have provided is empty"
    assert len(os.listdir(
        Evap_Path)) > 0, Evap_Path + " folder you have provided is empty"
    assert len(os.listdir(
        TempPath)) > 0, TempPath + " folder you have provided is empty"

    # read data
    ### meteorological data
    prec = Raster.ReadRastersFolder(PrecPath)
    evap = Raster.ReadRastersFolder(Evap_Path)
    temp = Raster.ReadRastersFolder(TempPath)
    print("meteorological data are read successfully")

    #### GIS data
    #    dem= gdal.Open(DemPath)
    FPL = gdal.Open(FlowPathLengthPath)
    print("GIS data are read successfully")

    # parameters
    #    if LumpedPar == True:
    #        parameters = np.loadtxt(ParPath)#.tolist()
    #    else:
    parameters = Raster.ReadRastersFolder(ParPath)

    print("Parameters are read successfully")

    #run the model
    st, q_out, q_uz, q_lz = Wrapper.FW1Withlake(ConceptualModel, FPL, prec,
                                                evap, temp, parameters, p2,
                                                snow, init_st, lakeCalibArray,
                                                StageDischargeCurve,
                                                LakeParameters, lakecell,
                                                Lake_init_st)

    return st, q_out, q_uz, q_lz
예제 #5
0
def Dist_HBV2(ConceptualModel,
              lakecell,
              q_lake,
              DEM,
              flow_acc,
              flow_acc_plan,
              sp_prec,
              sp_et,
              sp_temp,
              sp_pars,
              p2,
              init_st=None,
              ll_temp=None,
              q_0=None):
    '''
    Make spatially distributed HBV in the SM and UZ
    interacting cells
    '''

    n_steps = sp_prec.shape[2] + 1  # no of time steps =length of time series +1
    # intiialise vector of nans to fill states
    dummy_states = np.empty([n_steps, 5])  # [sp,sm,uz,lz,wc]
    dummy_states[:] = np.nan

    # Get the mask
    mask, no_val = Raster.get_mask(DEM)
    x_ext, y_ext = mask.shape  # shape of the fpl raster (rows, columns)-------------- rows are x and columns are y
    #    y_ext, x_ext = mask.shape # shape of the fpl raster (rows, columns)------------ should change rows are y and columns are x

    # Get deltas of pixel
    geo_trans = DEM.GetGeoTransform(
    )  # get the coordinates of the top left corner and cell size [x,dx,y,dy]
    dx = np.abs(geo_trans[1]) / 1000.0  # dx in Km
    dy = np.abs(geo_trans[-1]) / 1000.0  # dy in Km
    px_area = dx * dy  # area of the cell

    # Enumerate the total number of pixels in the catchment
    tot_elem = np.sum(
        np.sum([[1 for elem in mask_i if elem != no_val] for mask_i in mask
                ]))  # get row by row and search [mask_i for mask_i in mask]

    # total pixel area
    px_tot_area = tot_elem * px_area  # total area of pixels

    # Get number of non-value data

    st = []  # Spatially distributed states
    q_lz = []
    q_uz = []
    #------------------------------------------------------------------------------
    for x in range(x_ext):  # no of rows
        st_i = []
        q_lzi = []
        q_uzi = []
        #        q_out_i = []
        # run all cells in one row ----------------------------------------------------
        for y in range(y_ext):  # no of columns
            if mask[x, y] != no_val:  # only for cells in the domain
                # Calculate the states per cell
                # TODO optimise for multiprocessing these loops
                #                _, _st, _uzg, _lzg = ConceptualModel.simulate_new_model(avg_prec = sp_prec[x, y,:],
                _, _st, _uzg, _lzg = ConceptualModel.Simulate(
                    prec=sp_prec[x, y, :],
                    temp=sp_temp[x, y, :],
                    et=sp_et[x, y, :],
                    par=sp_pars[x, y, :],
                    p2=p2,
                    init_st=init_st,
                    ll_temp=None,
                    q_0=q_0,
                    snow=0)  #extra_out = True
                # append column after column in the same row -----------------
                st_i.append(np.array(_st))
                #calculate upper zone Q = K1*(LZ_int_1)
                q_lz_temp = np.array(sp_pars[x, y, 6]) * _lzg
                q_lzi.append(q_lz_temp)
                # calculate lower zone Q = k*(UZ_int_3)**(1+alpha)
                q_uz_temp = np.array(sp_pars[x, y, 5]) * (np.power(
                    _uzg, (1.0 + sp_pars[x, y, 7])))
                q_uzi.append(q_uz_temp)

                #print("total = "+str(fff)+"/"+str(tot_elem)+" cell, row= "+str(x+1)+" column= "+str(y+1) )
            else:  # if the cell is novalue-------------------------------------
                # Fill the empty cells with a nan vector
                st_i.append(
                    dummy_states
                )  # fill all states(5 states) for all time steps = nan
                q_lzi.append(dummy_states[:, 0]
                             )  # q lower zone =nan  for all time steps = nan
                q_uzi.append(dummy_states[:, 0]
                             )  # q upper zone =nan  for all time steps = nan

        # store row by row-------- ----------------------------------------------------
        #st.append(st_i) # state variables
        st.append(st_i)  # state variables
        q_lz.append(np.array(q_lzi))  # lower zone discharge mm/timestep
        q_uz.append(np.array(q_uzi))  # upper zone routed discharge mm/timestep
        #------------------------------------------------------------------------------
        # convert to arrays
    st = np.array(st)
    q_lz = np.array(q_lz)
    q_uz = np.array(q_uz)
    #%% convert quz from mm/time step to m3/sec
    area_coef = p2[1] / px_tot_area
    q_uz = q_uz * px_area * area_coef / (p2[0] * 3.6)

    no_cells = list(
        set([
            flow_acc_plan[i, j] for i in range(x_ext) for j in range(y_ext)
            if not np.isnan(flow_acc_plan[i, j])
        ]))
    #    no_cells=list(set([int(flow_acc_plan[i,j]) for i in range(x_ext) for j in range(y_ext) if flow_acc_plan[i,j] != no_val]))
    no_cells.sort()

    #%% routing lake discharge with DS cell k & x and adding to cell Q
    q_lake = Routing.muskingum(q_lake, q_lake[0], sp_pars[lakecell[0],
                                                          lakecell[1], 10],
                               sp_pars[lakecell[0], lakecell[1], 11], p2[0])
    q_lake = np.append(q_lake, q_lake[-1])
    # both lake & Quz are in m3/s
    #new
    q_uz[lakecell[0],
         lakecell[1], :] = q_uz[lakecell[0], lakecell[1], :] + q_lake
    #%% cells at the divider
    q_uz_routed = np.zeros_like(q_uz) * np.nan
    # for all cell with 0 flow acc put the q_uz
    for x in range(x_ext):  # no of rows
        for y in range(y_ext):  # no of columns
            if mask[x, y] != no_val and flow_acc_plan[x, y] == 0:
                q_uz_routed[x, y, :] = q_uz[x, y, :]
    #%% new
    for j in range(1, len(no_cells)):  #2):#
        for x in range(x_ext):  # no of rows
            for y in range(y_ext):  # no of columns
                # check from total flow accumulation
                if mask[x, y] != no_val and flow_acc_plan[x, y] == no_cells[j]:
                    #                        print(no_cells[j])
                    q_r = np.zeros(n_steps)
                    for i in range(len(flow_acc[str(x) + "," +
                                                str(y)])):  #  no_cells[j]
                        # bring the indexes of the us cell
                        x_ind = flow_acc[str(x) + "," + str(y)][i][0]
                        y_ind = flow_acc[str(x) + "," + str(y)][i][1]
                        # sum the Q of the US cells (already routed for its cell)
                        # route first with there own k & xthen sum
                        q_r = q_r + Routing.muskingum(
                            q_uz_routed[x_ind, y_ind, :],
                            q_uz_routed[x_ind, y_ind, 0],
                            sp_pars[x_ind, y_ind, 10], sp_pars[x_ind, y_ind,
                                                               11], p2[0])
#                        q=q_r
# add the routed upstream flows to the current Quz in the cell
                    q_uz_routed[x, y, :] = q_uz[x, y, :] + q_r
    #%% check if the max flow _acc is at the outlet
#    if tot_elem != np.nanmax(flow_acc_plan):
#        raise ("flow accumulation plan is not correct")
# outlet is the cell that has the max flow_acc
    outlet = np.where(
        flow_acc_plan == np.nanmax(flow_acc_plan))  #np.nanmax(flow_acc_plan)
    outletx = outlet[0][0]
    outlety = outlet[1][0]
    #%%
    q_lz = np.array([np.nanmean(q_lz[:, :, i]) for i in range(n_steps)
                     ])  # average of all cells (not routed mm/timestep)
    # convert Qlz to m3/sec
    q_lz = q_lz * p2[1] / (p2[0] * 3.6)  # generation

    q_out = q_lz + q_uz_routed[outletx, outlety, :]

    return q_out, st, q_uz_routed, q_lz, q_uz
예제 #6
0
def RunLumpedRRP(ConceptualModel,
                 Raster,
                 sp_prec,
                 sp_et,
                 sp_temp,
                 sp_pars,
                 p2,
                 snow,
                 init_st=None,
                 ll_temp=None,
                 q_init=None):
    """
    ========================================================================
      RunLumpedRRP(Raster,sp_prec,sp_et,sp_temp,sp_pars,p2,init_st,ll_temp,q_init)
    ========================================================================

    this function runs the rainfall runoff lumped model (HBV, GR4,...) separately
    for each cell and return a time series of arrays

    Inputs:
    ----------
        1-ConceptualModel:
            [function] conceptual model function
        2-Raster:
            [gdal.dataset] raster to get the spatial information (nodata cells)
            raster input could be dem, flow accumulation or flow direction raster of the catchment
            but the nodata value stored in the raster should be far from the
            range of values that could result from the calculation
        3-sp_prec:
            [numpy array] 3d array of the precipitation data, sp_prec should
            have the same 2d dimension of raster input
        4-sp_et:
            [numpy array] 3d array of the evapotranspiration data, sp_et should
            have the same 2d dimension of raster input
        5-sp_temp:
            [numpy array] 3d array of the temperature data, sp_temp should
            have the same 2d dimension of raster input
        6-sp_pars:
            [numpy array] number of 2d arrays of the catchment properties spatially
            distributed in 2d and the third dimension is the number of parameters,
            sp_pars should have the same 2d dimension of raster input
        7-p2:
            [List] list of unoptimized parameters
            p2[0] = tfac, 1 for hourly, 0.25 for 15 min time step and 24 for daily time step
            p2[1] = catchment area in km2
        8-init_st:
            [list] initial state variables values [sp, sm, uz, lz, wc]. default=None
        9-ll_temp:
            [numpy array] 3d array of the long term average temperature data
        10-q_init:
            [float] initial discharge m3/s
    Outputs:
    ----------
        1-st:
            [numpy ndarray] 4D array (rows,cols,time,states) states are [sp,wc,sm,uz,lv]
        2-q_lz:
            [numpy ndarray] 3D array of the lower zone discharge
        3-q_uz:
            [numpy ndarray] 3D array of the upper zone discharge
    Example:
    ----------
        ### meteorological data
        prec=GIS.ReadRastersFolder(PrecPath)
        evap=GIS.ReadRastersFolder(Evap_Path)
        temp=GIS.ReadRastersFolder(TempPath)
        sp_pars=GIS.ReadRastersFolder(parPath)

        #### GIS data
        dem= gdal.Open(DemPath)

        p2=[1, 227.31]
        init_st=[0,5,5,5,0]

        st, q_lz, q_uz = DistRRM.RunLumpedRRP(DEM,sp_prec=sp_prec, sp_et=sp_et,
                               sp_temp=sp_temp, sp_pars=sp_par, p2=p2,
                               init_st=init_st)
    """
    ### input data validation
    # data type
    assert isinstance(
        ConceptualModel, ModuleType
    ), "ConceptualModel should be a module or a python file contains functions "
    assert type(
        Raster
    ) == gdal.Dataset, "Raster should be read using gdal (gdal dataset please read it using gdal library) "
    assert type(sp_prec) == np.ndarray, "array should be of type numpy array"
    assert type(sp_et) == np.ndarray, "array should be of type numpy array"
    assert type(sp_temp) == np.ndarray, "array should be of type numpy array"
    assert type(sp_pars) == np.ndarray, "array should be of type numpy array"
    assert type(p2) == list, "p2 should be of type list"

    if init_st != None:
        assert type(init_st) == list, "init_st should be of type list"
    if ll_temp != None:
        assert type(ll_temp) == np.ndarray, "init_st should be of type list"
    if q_init != None:
        assert type(q_init) == float, "init_st should be of type list"

    # input dimensions
    [rows, cols] = Raster.ReadAsArray().shape
    assert np.shape(sp_prec)[0] == rows and np.shape(
        sp_et)[0] == rows and np.shape(sp_temp)[0] == rows and np.shape(
            sp_pars
        )[0] == rows, "all input data should have the same number of rows"
    assert np.shape(sp_prec)[1] == cols and np.shape(
        sp_et)[1] == cols and np.shape(sp_temp)[1] == cols and np.shape(
            sp_pars
        )[1] == cols, "all input data should have the same number of columns"
    assert np.shape(sp_prec)[2] == np.shape(sp_et)[2] and np.shape(
        sp_temp)[2] == np.shape(sp_prec)[
            2], "all meteorological input data should have the same length"

    n_steps = sp_prec.shape[2] + 1  # no of time steps =length of time series +1
    # intiialise vector of nans to fill states
    dummy_states = np.empty([n_steps, 5])  # [sp,sm,uz,lz,wc]
    dummy_states[:] = np.nan
    dummy_states = np.float32(dummy_states)

    # Get the mask
    no_val = np.float32(Raster.GetRasterBand(1).GetNoDataValue())
    raster = Raster.ReadAsArray()

    # calculate area covered by cells
    geo_trans = Raster.GetGeoTransform(
    )  # get the coordinates of the top left corner and cell size [x,dx,y,dy]
    dx = np.abs(geo_trans[1]) / 1000.0  # dx in Km
    dy = np.abs(geo_trans[-1]) / 1000.0  # dy in Km
    px_area = dx * dy  # area of the cell
    no_cells = np.size(raster[:, :]) - np.count_nonzero(
        raster[raster == no_val])
    px_tot_area = no_cells * px_area  # total area of pixels

    st = []  # Spatially distributed states
    q_lz = []
    q_uz = []

    for x in range(rows):  # no of rows
        st_row = []
        q_lz_row = []
        q_uz_row = []

        for y in range(cols):  # no of columns
            if raster[x, y] != no_val:  # only for cells in the domain
                # Calculate the states per cell
                try:
                    uzg, lzg, stvar = ConceptualModel.Simulate(
                        prec=sp_prec[x, y, :],
                        temp=sp_temp[x, y, :],
                        et=sp_et[x, y, :],
                        par=sp_pars[x, y, :],
                        p2=p2,
                        init_st=init_st,
                        ll_temp=None,
                        q_init=q_init,
                        snow=0)
                except:
                    print("conceptual model argument are not correct")

                # append column after column in the same row


#                st_i.append(np.array(_st))
                st_row.append(stvar)

                # calculate upper zone Q = k*(UZ_int_3)**(1+alpha)
                #                q_uz_temp = np.array(sp_pars[x, y, 5])*(np.power(_uzg, (1.0 + sp_pars[x, y, 7])))
                #                q_uzi.append(q_uz_temp)
                q_uz_row.append(uzg)

                #calculate lower zone Q = K1*(LZ_int_1)
                #                q_lz_temp=np.array(sp_pars[x, y, 6])*_lzg
                #                q_lzi.append(q_lz_temp)
                q_lz_row.append(lzg)

    #                print("total = "+str(fff)+"/"+str(tot_elem)+" cell, row= "+str(x+1)+" column= "+str(y+1) )

            else:  # if the cell is novalue-------------------------------------
                # Fill the empty cells with a nan vector
                st_row.append(
                    dummy_states
                )  # fill all states(5 states) for all time steps = nan
                q_lz_row.append(
                    dummy_states[:, 0]
                )  # q lower zone =nan  for all time steps = nan
                q_uz_row.append(
                    dummy_states[:, 0]
                )  # q upper zone =nan  for all time steps = nan

    # store row by row-------- ----------------------------------------------------
    #        st.append(st_i) # state variables
        st.append(st_row)  # state variables
        q_lz.append(q_lz_row)  # lower zone discharge mm/timestep
        q_uz.append(q_uz_row)  # upper zone routed discharge mm/timestep
    #------------------------------------------------------------------------------
    # convert to arrays
    st = np.array(st)
    q_lz = np.array(q_lz)
    q_uz = np.array(q_uz)
    # convert quz from mm/time step to m3/sec
    area_coef = p2[1] / px_tot_area
    q_uz = q_uz * px_area * area_coef / (p2[0] * 3.6)

    #    # convert QLZ to 1D time series
    #    q_lz = np.array([np.nanmean(q_lz[:,:,i]) for i in range(n_steps)]) # average of all cells (not routed mm/timestep)
    #    # convert Qlz to m3/sec
    #    q_lz = q_lz* p2[1]/ (p2[0]*3.6) # generation

    q_lz = q_lz * px_area * area_coef / (p2[0] * 3.6)

    # convert all to float32 to save storage
    q_lz = np.float32(q_lz)
    q_uz = np.float32(q_uz)
    st = np.float32(st)
    return st, q_lz, q_uz
예제 #7
0
def NearestCell(Raster, StCoord):
    """
    ======================================================
       NearestCell(Raster,StCoord)
    ======================================================
    this function calculates the the indices (row, col) of nearest cell in a given 
    raster to a station 
    coordinate system of the raster has to be projected to be able to calculate
    the distance
    
    Inputs:
    ----------
        1-Raster:
            [gdal.dataset] raster to get the spatial information (coordinates of each cell)
        2-StCoord:
            [Dataframe] dataframe with two columns "x", "y" contains the coordinates
            of each station
    
    Output:
    ----------
        1-StCoord:the same input dataframe with two extra columns "cellx","celly"
    
    Examples:
        soil_type=gdal.Open("DEM.tif")
        coordinates=stations[['id','x','y']][:]
        coordinates.loc[:,["cell_row","cell_col"]]=NearestCell(Raster,StCoord)
    """
    # input data validation
    # data type
    assert type(
        Raster
    ) == gdal.Dataset, "raster should be read using gdal (gdal dataset please read it using gdal library) "
    assert type(
        StCoord
    ) == pd.core.frame.DataFrame, "please check StCoord input it should be pandas dataframe "

    # check if the user has stored the coordinates in the dataframe with the right names or not
    assert "x" in StCoord.columns, "please check the StCoord x coordinates of the stations should be stored in a column name 'x'"
    assert "y" in StCoord.columns, "please check the StCoord y coordinates of the stations should be stored in a column name 'y'"

    StCoord['cell_row'] = np.nan
    StCoord['cell_col'] = np.nan

    rows = Raster.RasterYSize
    cols = Raster.RasterXSize
    geo_trans = Raster.GetGeoTransform(
    )  # get the coordinates of the top left corner and cell size [x,dx,y,dy]
    # X_coordinate= upperleft corner x+ index* cell size+celsize/2
    coox = np.ones((rows, cols))
    cooy = np.ones((rows, cols))
    for i in range(rows):  # iteration by row
        for j in range(cols):  # iteration by column
            coox[i, j] = geo_trans[0] + geo_trans[1] / 2 + j * geo_trans[
                1]  # calculate x
            cooy[i, j] = geo_trans[3] + geo_trans[5] / 2 + i * geo_trans[
                5]  # calculate y

    Dist = np.ones((rows, cols))
    for no in range(len(StCoord['x'])):
        # calculate the distance from the station to all cells
        for i in range(rows):  # iteration by row
            for j in range(cols):  # iteration by column
                Dist[i, j] = np.sqrt(
                    np.power((StCoord.loc[StCoord.index[no], 'x'] -
                              coox[i, j]), 2) +
                    np.power((StCoord.loc[StCoord.index[no], 'y'] -
                              cooy[i, j]), 2))

        StCoord.loc[no, 'cell_row'], StCoord.loc[no, 'cell_col'] = np.where(
            Dist == np.min(Dist))

    return StCoord.loc[:, ["cell_row", "cell_col"]]
예제 #8
0
def SaveParameters(DistParFn,
                   Raster,
                   Par,
                   No_parameters,
                   no_lumped_par,
                   lumped_par_pos,
                   snow,
                   kub,
                   klb,
                   Path=None):
    """
    ============================================================
        SaveParameters(DistParFn, Raster, Par, No_parameters, snow, kub, klb, Path=None)
    ============================================================
    this function takes generated parameters by the calibration algorithm, 
    distributed them with a given function and save them as a rasters
    
    Inputs:
    ----------
        1-DistParFn:
            [function] function to distribute the parameters (all functions are
            in Hapi.DistParameters )
        2-Raster:
            [gdal.dataset] raster to get the spatial information
        3-Par
            [list or numpy ndarray] parameters as 1D array or list
        4-no_parameters:
            [int] number of the parameters in the conceptual model
        5-snow:
            [integer] number to define whether to take parameters of 
            the conceptual model with snow subroutine or without
        5-kub:
            [numeric] upper bound for k parameter in muskingum function
        6-klb:
            [numeric] lower bound for k parameter in muskingum function
         7-Path:
             [string] path to the folder you want to save the parameters in
             default value is None (parameters are going to be saved in the
             current directory)
     
    Outputs:
    ----------
         Rasters for parameters of the distributed model
     
   Examples:     
   ----------
        DemPath = path+"GIS/4000/dem4000.tif"
        Raster=gdal.Open(DemPath)
        ParPath = "par15_7_2018.txt"
        par=np.loadtxt(ParPath)
        klb=0.5
        kub=1
        no_parameters=12
        DistParFn=DP.par3dLumped
        Path="parameters/"
        snow=0
        
        SaveParameters(DistParFn, Raster, par, no_parameters,snow ,kub, klb,Path)
    """
    assert callable(
        DistParFn), " please check the function to distribute your parameters"
    assert type(
        Raster
    ) == gdal.Dataset, "raster should be read using gdal (gdal dataset please read it using gdal library) "
    assert type(Par) == np.ndarray or type(
        Par) == list, "par_g should be of type 1d array or list"
    assert type(No_parameters) == int, "No of parameters should be integer"
    assert isinstance(kub, numbers.Number), " kub should be a number"
    assert isinstance(klb, numbers.Number), " klb should be a number"
    assert type(Path) == str, "path should be of type string"
    assert os.path.exists(Path), Path + " you have provided does not exist"

    par2d = DistParFn(Par, Raster, No_parameters, no_lumped_par,
                      lumped_par_pos, kub, klb)

    # save
    if snow == 0:  # now snow subroutine
        pnme = [
            "01_rfcf.tif", "02_FC.tif", "03_BETA.tif", "04_ETF.tif",
            "05_LP.tif", "06_CFLUX.tif", "07_K.tif", "08_K1.tif",
            "09_ALPHA.tif", "10_PERC.tif", "11_Kmuskingum.tif",
            "12_Xmuskingum.tif"
        ]
    else:  # there is snow subtoutine
        pnme = [
            "01_ltt.tif", "02_utt.tif", "03_rfcf.tif", "04_sfcf.tif",
            "05_ttm.tif", "06_cfmax.tif", "07_cwh.tif", "08_cfr.tif",
            "09_fc.tif", "10_fc.tif", "11_beta.tif", "12_etf.tif", "13_lp.tif",
            "14_cflux.tif", "15_k.tif", "16_k1.tif", "17_alpha.tif",
            "18_perc.tif"
        ]

    if Path != None:
        pnme = [Path + i for i in pnme]

    for i in range(np.shape(par2d)[2]):
        Raster.RasterLike(Raster, par2d[:, :, i], pnme[i])
예제 #9
0
    def Histogram(self,
                  Day,
                  ExcludeValue,
                  OccupiedCellsOnly,
                  Map=1,
                  filter1=0.2,
                  filter2=15):
        """
        ==================================================================
           Histogram(Day, ExcludeValue, OccupiedCellsOnly, Map = 1)
        ==================================================================
        Histogram method extract values fro the event MaxDepth map and plot the histogram
        th emethod check first if you already extracted the values before then
        plot the histogram
        Parameters
        ----------
            1-Day : [Integer]
                DESCRIPTION.
            2-ExcludeValue : [Integer]
                DESCRIPTION.
            3-OccupiedCellsOnly : TYPE
                DESCRIPTION.
            4-Map : TYPE, optional
                DESCRIPTION. The default is 1.

        Returns
        -------
        None.

        """
        # check if the object has the attribute ExtractedValues
        if hasattr(self, 'ExtractedValues'):
            # get the list of event that then object has their Extractedvalues
            if Day not in list(self.ExtractedValues.keys()):
                # depth map
                if Map == 1:
                    Path = self.TwoDResultPath + self.DepthPrefix + str(
                        Day) + ".zip"
                elif Map == 2:
                    Path = self.TwoDResultPath + self.DurationPrefix + str(
                        Day) + ".zip"
                else:
                    Path = self.TwoDResultPath + self.ReturnPeriodPrefix + str(
                        Day) + ".zip"

                ExtractedValues, NonZeroCells = Raster.ExtractValues(
                    Path, ExcludeValue, self.Compressed, OccupiedCellsOnly)
                self.ExtractedValues[Day] = ExtractedValues

        ExtractedValues = self.ExtractedValues[Day]
        # filter values
        ExtractedValues = [j for j in ExtractedValues if j > filter1]
        ExtractedValues = [j for j in ExtractedValues if j < filter2]
        #plot
        # fig, ax1 = plt.subplots(figsize=(10,8))
        # ax1.hist(ExtractedValues, bins=15, alpha = 0.4) #width = 0.2,

        n, bins, patches = plt.hist(x=ExtractedValues,
                                    bins=15,
                                    color="#0504aa",
                                    alpha=0.7,
                                    rwidth=0.85)
        plt.grid(axis='y', alpha=0.75)
        plt.xlabel('Value', fontsize=15)
        plt.ylabel('Frequency', fontsize=15)
        plt.xticks(fontsize=15)
        plt.yticks(fontsize=15)

        plt.ylabel('Frequency', fontsize=15)
        plt.tight_layout()
        # plt.title('Normal Distribution Histogram matplotlib',fontsize=15)
        plt.show()
        return n, bins, patches
예제 #10
0
    def OverlayMaps(self, Path, BaseMapF, ExcludedValue, OccupiedCellsOnly,
                    SavePath):
        """
        ==================================================================
          OverlayMaps(self, Path, BaseMapF, FilePrefix, ExcludedValue,
                      Compressed, OccupiedCellsOnly, SavePath)
        ==================================================================
        OverlayMaps method reads all the maps in the folder given by Path
        input and overlay them with the basemap and for each value in the basemap
        it create a dictionary with the intersected values from all maps

        Inputs:
            1-Path
                [String] a path to the folder includng the maps.
            2-BaseMapF:
                [String] a path includng the name of the ASCII and extention like
                path="data/cropped.asc"
            3-FilePrefix:
                [String] a string that make the files you want to filter in the folder
                uniq
            3-ExcludedValue:
                [Numeric] values you want to exclude from exteacted values
            4-Compressed:
                [Bool] if the map you provided is compressed
            5-OccupiedCellsOnly:
                [Bool] if you want to count only cells that is not zero
            6-SavePath:
                [String] a path to the folder to save a text file for each
                value in the base map including all the intersected values
                from other maps.
        Outputs:
            1- ExtractedValues:
                [Dict] dictonary with a list of values in the basemap as keys
                    and for each key a list of all the intersected values in the
                    maps from the path
            2- NonZeroCells:
                [dataframe] dataframe with the first column as the "file" name
                and the second column is the number of cells in each map
        """

        self.DepthValues, NonZeroCells = Raster.OverlayMaps(
            Path, BaseMapF, self.DepthPrefix, ExcludedValue, self.Compressed,
            OccupiedCellsOnly)

        # NonZeroCells dataframe with the first column as the "file" name and the second column
        # is the number of cells in each map

        NonZeroCells['days'] = [
            int(i[len(self.DepthPrefix):-4])
            for i in NonZeroCells['files'].tolist()
        ]
        # get the numbe of inundated cells in the Event index data frame
        self.EventIndex['cells'] = 0
        for i in range(len(NonZeroCells)):
            # get the location in the EventIndex dataframe
            loc = np.where(
                NonZeroCells.loc[i, 'days'] == self.EventIndex.loc[:,
                                                                   "ID"])[0][0]
            # store number of cells
            self.EventIndex.loc[loc, 'cells'] = NonZeroCells.loc[i, 'cells']

        # save depths of each sub-basin
        inundatedSubs = list(self.DepthValues.keys())
        for i in range(len(inundatedSubs)):
            np.savetxt(SavePath + "/" + str(inundatedSubs[i]) + ".txt",
                       self.DepthValues[inundatedSubs[i]],
                       fmt="%4.2f")