Beispiel #1
0
Fm = Fmat(Nx, -1, 1, tseq.dt)


def step(x, t, dt):
    assert dt == tseq.dt
    return x @ Fm.T


Dyn = {
    'M': Nx,
    'model': lambda x, t, dt: damp * step(x, t, dt),
    'linear': lambda x, t, dt: damp * Fm,
    'noise': modelling.GaussRV(C=modelling.CovMat(L, 'Left')),
}

HMM = modelling.HiddenMarkovModel(Dyn, Obs, tseq, X0, LP=LPs(jj))

####################
# Suggested tuning
####################

# Expected rmse.a = 0.3
# xp = EnKF('PertObs',N=30,infl=3.2)
# Note that infl=1 may yield approx optimal rmse, even though then rmv << rmse.
# Why is rmse so INsensitive to inflation, especially for PertObs?

# Reproduce raanes'2015 "extending sqrt method to model noise":
# xp = EnKF('Sqrt',fnoise_treatm='XXX',N=30,infl=1.0),
# where XXX is one of:
# - Stoch
# - Mult-1
Beispiel #2
0
                   dkObs=2,
                   KObs=2 * 10**4,
                   BurnIn=2 * 10**3,
                   Tplot=Tplot)

Dyn = {'M': Nx, 'model': KS.step, 'linear': KS.dstep_dx, 'noise': 0}

X0 = dpr.GaussRV(mu=KS.x0, C=0.001)

Obs = Id_Obs(Nx)
Obs['noise'] = 1
Obs['localizer'] = nd_Id_localization((Nx, ), (4, ))

HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0)

HMM.liveplotters = LPs(np.arange(Nx))

####################
# Suggested tuning
####################

# Reproduce (top-right panel) of Fig. 4 of bocquet2019consistency    # Expected rmse.a:
# --------------------------------------------------------------------------------
# xps += LETKF(N=4 , loc_rad=15/1.82, infl=1.11,rot=True,taper='GC') # 0.18
# xps += LETKF(N=6,  loc_rad=25/1.82, infl=1.06,rot=True,taper='GC') # 0.14
# xps += LETKF(N=16, loc_rad=51/1.82, infl=1.02,rot=True,taper='GC') # 0.11
#
# Other:
# xps += Climatology()                                               # 1.3
# xps += OptInterp()                                                 # 0.5
# xps += EnKF('Sqrt', N=13,           infl=1.60,rot=True)            # 0.5
Beispiel #3
0
Nx = 100

# def step(x,t,dt):
#   return np.roll(x,1,axis=x.ndim-1)
Fm = Fmat(Nx, -1, 1, tseq.dt)


def step(x, t, dt):
    assert dt == tseq.dt
    return x @ Fm.T


Dyn = {'M': Nx, 'model': step, 'linear': lambda x, t, dt: Fm, 'noise': 0}

X0 = dpr.GaussRV(mu=np.zeros(Nx),
                 C=homogeneous_1D_cov(Nx, Nx / 8, kind='Gauss'))

Ny = 4
jj = dpr.linspace_int(Nx, Ny)
Obs = dpr.partial_Id_Obs(Nx, jj)
Obs['noise'] = 0.01

HMM = dpr.HiddenMarkovModel(Dyn, Obs, tseq, X0, LP=LPs(jj))

####################
# Suggested tuning
####################
# xps += EnKF('PertObs',N=16 ,infl=1.02)
# xps += EnKF('Sqrt'   ,N=16 ,infl=1.0)
Beispiel #4
0
    'M': Nx,
    'model': step,
    'linear': dstep_dx,
    'noise': 0,
}

X0 = modelling.GaussRV(mu=x0, C=0.001)

jj = np.arange(Nx)  # obs_inds
Obs = modelling.partial_Id_Obs(Nx, jj)
Obs['noise'] = 1
Obs['localizer'] = nd_Id_localization((Nx,), (2,))

HMM = modelling.HiddenMarkovModel(Dyn, Obs, tseq, X0)

HMM.liveplotters = LPs(jj)


####################
# Suggested tuning
####################

# Reproduce Table1 of sakov2008deterministic        # Expected rmse.a:
# --------------------------------------------------------------------------------
# xps += EnKF('PertObs'        ,N=40, infl=1.06)               # 0.22
# xps += EnKF('DEnKF'          ,N=40, infl=1.01)               # 0.18
# xps += EnKF('PertObs'        ,N=28, infl=1.08)               # 0.24
# xps += EnKF('Sqrt'           ,N=24, infl=1.013,rot=True)     # 0.18
#
# Other analysis schemes:
# xps += EnKF('Serial'         ,N=28, infl=1.02,rot=True)      # 0.18