Nx = 100 # def step(x,t,dt): # return np.roll(x,1,axis=x.ndim-1) Fm = Fmat(Nx, -1, 1, tseq.dt) def step(x, t, dt): assert dt == tseq.dt return x @ Fm.T Dyn = {'M': Nx, 'model': step, 'linear': lambda x, t, dt: Fm, 'noise': 0} X0 = dpr.GaussRV(mu=np.zeros(Nx), C=homogeneous_1D_cov(Nx, Nx / 8, kind='Gauss')) Ny = 4 jj = dpr.linspace_int(Nx, Ny) Obs = dpr.partial_Id_Obs(Nx, jj) Obs['noise'] = 0.01 HMM = dpr.HiddenMarkovModel(Dyn, Obs, tseq, X0, LP=LPs(jj)) #################### # Suggested tuning #################### # xps += EnKF('PertObs',N=16 ,infl=1.02) # xps += EnKF('Sqrt' ,N=16 ,infl=1.0)
from dapper.tools.localization import nd_Id_localization from dapper.tools.math import Id_Obs KS = Model(dt=0.5) Nx = KS.Nx # nRepeat=10 t = dpr.Chronology(KS.dt, dkObs=2, KObs=2 * 10**4, BurnIn=2 * 10**3, Tplot=Tplot) Dyn = {'M': Nx, 'model': KS.step, 'linear': KS.dstep_dx, 'noise': 0} X0 = dpr.GaussRV(mu=KS.x0, C=0.001) Obs = Id_Obs(Nx) Obs['noise'] = 1 Obs['localizer'] = nd_Id_localization((Nx, ), (4, )) HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0) HMM.liveplotters = LPs(np.arange(Nx)) #################### # Suggested tuning #################### # Reproduce (top-right panel) of Fig. 4 of bocquet2019consistency # Expected rmse.a: # --------------------------------------------------------------------------------
# - FREI, M. & KUNSCH H. R. (2013). # "Mixture ensemble Kalman filters" # Comp. Statist. Data Anal. 58, 127–38. import numpy as np import dapper as dpr from dapper.mods.Lorenz96 import dstep_dx, step from dapper.tools.localization import nd_Id_localization t = dpr.Chronology(0.05, dtObs=0.4, T=4**5, BurnIn=20) Nx = 40 Dyn = {'M': Nx, 'model': step, 'linear': dstep_dx, 'noise': 0} X0 = dpr.GaussRV(M=Nx, C=0.001) jj = 1 + np.arange(0, Nx, 2) Obs = dpr.partial_Id_Obs(Nx, jj) Obs['noise'] = 0.5 Obs['localizer'] = nd_Id_localization((Nx, ), (2, ), jj) HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0) #################### # Suggested tuning #################### # Compare to Table 1 and 3 from frei2013bridging. Note: # - N is too large to be very interesting. # - We obtain better EnKF scores than they report, # and use inflation and sqrt updating,
# Settings not taken from anywhere import dapper as dpr from dapper.mods.LotkaVolterra import step, dstep_dx, x0, LP_setup, Tplot # dt has been chosen after noting that # using dt up to 0.7 does not change the chaotic properties much, # as adjudged with eye-ball and Lyapunov measures. t = dpr.Chronology(0.5, dtObs=10, T=1000, BurnIn=Tplot, Tplot=Tplot) Nx = len(x0) Dyn = {'M': Nx, 'model': step, 'linear': dstep_dx, 'noise': 0} X0 = dpr.GaussRV(mu=x0, C=0.01**2) jj = [1, 3] Obs = dpr.partial_Id_Obs(Nx, jj) Obs['noise'] = 0.04**2 HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0, LP=LP_setup(jj)) #################### # Suggested tuning #################### # Not carefully tuned: # xps += EnKF_N(N=6) # xps += ExtKF(infl=1.02)
"""Setup parameters for twin experiments.""" import numpy as np import dapper as dpr from dapper.mods.Ikeda import step, x0, Tplot, LPs t = dpr.Chronology(1, dkObs=1, KObs=1000, Tplot=Tplot, BurnIn=4 * Tplot) Nx = len(x0) Dyn = {'M': Nx, 'model': step, 'noise': 0} X0 = dpr.GaussRV(C=.1, mu=x0) jj = np.arange(Nx) # obs_inds Obs = dpr.partial_Id_Obs(Nx, jj) Obs['noise'] = .1 # dpr.GaussRV(C=CovMat(1*eye(Nx))) HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0) HMM.liveplotters = LPs(jj) #################### # Suggested tuning ####################
Nx = len(x0) Ny = Nx day = 0.05/6 * 24 # coz dt=0.05 <--> 6h in "model time scale" t = dpr.Chronology(0.05, dkObs=1, T=200*day, BurnIn=10*day) Dyn = { 'M': Nx, 'model': step, 'linear': dstep_dx, 'noise': 0 } # X0 = dpr.GaussRV(C=0.01,M=Nx) # Decreased from Pajonk's C=1. X0 = dpr.GaussRV(C=0.01, mu=x0) jj = np.arange(Nx) Obs = dpr.partial_Id_Obs(Nx, jj) Obs['noise'] = 0.1 HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0, LP=LPs(jj)) #################### # Suggested tuning #################### # xps += ExtKF(infl=2) # xps += EnKF('Sqrt',N=3,infl=1.01) # xps += PartFilt(reg=1.0, N=100, NER=0.4) # add reg! # xps += PartFilt(reg=1.0, N=1000, NER=0.1) # add reg!
################ # Full ################ t = dpr.Chronology(dt=0.005, dtObs=0.05, T=4**3, BurnIn=6) Dyn = { 'M': LUV.M, 'model': dpr.with_rk4(LUV.dxdt, autonom=True), 'noise': 0, 'linear': LUV.dstep_dx, } X0 = dpr.GaussRV(mu=LUV.x0, C=0.01) R = 1.0 jj = np.arange(nU) Obs = dpr.partial_Id_Obs(LUV.M, jj) Obs['noise'] = R other = {'name': utils.rel2mods(__file__)+'_full'} HMM_full = dpr.HiddenMarkovModel(Dyn, Obs, t, X0, **other) ################ # Truncated ################ # Just change dt from 005 to 05
# Localization. batch_shape = [2, 2] # width (in grid points) of each state batch. # Increasing the width # => quicker analysis (but less rel. speed-up by parallelzt., depending on NPROC) # => worse (increased) rmse (but width 4 is only slightly worse than 1); # if inflation is applied locally, then rmse might actually improve. localizer = nd_Id_localization(shape[::-1], batch_shape[::-1], obs_inds, periodic=False) Obs = { 'M': Ny, 'model': hmod, 'noise': dpr.GaussRV(C=4 * np.eye(Ny)), 'localizer': localizer, } # Moving localization mask for smoothers: Obs['loc_shift'] = lambda ii, dt: ii # no movement (suboptimal, but easy) # Jacobian left unspecified coz it's (usually) employed by methods that # compute full cov, which in this case is too big. ############################ # Other ############################ HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0, LP=LP_setup(obs_inds)) ####################
from dapper.tools.localization import nd_Id_localization # Sakov uses K=300000, BurnIn=1000*0.05 t = dpr.Chronology(0.05, dkObs=1, KObs=1000, Tplot=Tplot, BurnIn=2*Tplot) Nx = 40 x0 = x0(Nx) Dyn = { 'M': Nx, 'model': step, 'linear': dstep_dx, 'noise': 0 } X0 = dpr.GaussRV(mu=x0, C=0.001) jj = np.arange(Nx) # obs_inds Obs = dpr.partial_Id_Obs(Nx, jj) Obs['noise'] = 1 Obs['localizer'] = nd_Id_localization((Nx,), (2,)) HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0) HMM.liveplotters = LPs(jj) #################### # Suggested tuning ####################
"""Reproduce results from Table 1 of Sakov, Oliver, Bertino (2012): 'An Iterative EnKF for Strongly Nonlinear Systems'""" import numpy as np import dapper as dpr from dapper.mods.Lorenz63 import LPs, Tplot, dstep_dx, step, x0 t = dpr.Chronology(0.01, dkObs=25, KObs=1000, Tplot=Tplot, BurnIn=4 * Tplot) Nx = len(x0) Dyn = {'M': Nx, 'model': step, 'linear': dstep_dx, 'noise': 0} X0 = dpr.GaussRV(C=2, mu=x0) jj = np.arange(Nx) # obs_inds Obs = dpr.partial_Id_Obs(Nx, jj) Obs['noise'] = 2 # dpr.GaussRV(C=CovMat(2*eye(Nx))) HMM = dpr.HiddenMarkovModel(Dyn, Obs, t, X0) HMM.liveplotters = LPs(jj) #################### # Suggested tuning #################### # from dapper.mods.Lorenz63.sakov2012 import HMM # rmse.a: # xps += Climatology() # 7.6 # xps += OptInterp() # 1.25 # xps += Var3D(xB=0.1) # 1.04
# Furthermore, experiments do not seem to indicate that I can push # Ny much lower than for the case H = Identity, # even though the rmse is a lot lower with spectral H. # Am I missing something? import numpy as np import dapper as dpr from dapper.mods.Lorenz96.sakov2008 import Dyn, Nx, t # The (Nx-Ny) highest frequency observation modes are left out of H below. # If Ny>Nx, then H no longer has independent (let alone orthogonal) columns, # yet more information is gained, since the observations are noisy. Ny = 12 X0 = dpr.GaussRV(M=Nx, C=0.001) def make_H(Ny, Nx): xx = np.linspace(-1, 1, Nx + 1)[1:] H = np.zeros((Ny, Nx)) H[0] = 1 / np.sqrt(2) for k in range(-(Ny // 2), (Ny + 1) // 2): ind = 2 * abs(k) - (k < 0) H[ind] = np.sin(np.pi * k * xx + np.pi / 4) H /= np.sqrt(Nx / 2) return H H = make_H(Ny, Nx) # plt.figure(1).gca().matshow(H)
try: # Load pre-generated L = np.load(sample_filename)['Left'] except FileNotFoundError: # First-time use print('Did not find sample file', sample_filename, 'for experiment initialization. Generating...') NQ = 20000 # Must have NQ > (2*wnumQ+1) A = sinusoidal_sample(Nx, wnumQ, NQ) A = 1 / 10 * center(A)[0] / np.sqrt(NQ) Q = A.T @ A U, s, _ = tsvd(Q) L = U * np.sqrt(s) np.savez(sample_filename, Left=L) X0 = dpr.GaussRV(C=dpr.CovMat(np.sqrt(5) * L, 'Left')) ################### # Forward model # ################### damp = 0.98 Fm = Fmat(Nx, -1, 1, tseq.dt) def step(x, t, dt): assert dt == tseq.dt return x @ Fm.T Dyn = { 'M': Nx,
"""Like todter2015 but with Gaussian likelihood.""" import dapper as dpr from dapper.mods.Lorenz96.todter2015 import HMM HMM.Obs.noise = dpr.GaussRV(C=HMM.Obs.noise.C) #################### # Suggested tuning #################### # rmse.a # xps += LETKF(N=40,rot=True,infl=1.04 ,loc_rad=5) # 0.42 # xps += LETKF(N=80,rot=True,infl=1.04 ,loc_rad=5) # 0.42 # xps += LNETF(N=40,rot=True,infl=1.10,Rs=1.9,loc_rad=5) # 0.54 # xps += LNETF(N=80,rot=True,infl=1.06,Rs=1.4,loc_rad=5) # 0.47
def Q(dkObs): return dpr.GaussRV(M=HMM.Nx, C=0.01 / (dkObs * HMM.t.dt))