def test_cholesky_update(spdmat1, spdmat2): expected = np.linalg.cholesky(spdmat1 + spdmat2) S1 = np.linalg.cholesky(spdmat1) S2 = np.linalg.cholesky(spdmat2) received = utlin.cholesky_update(S1, S2) np.testing.assert_allclose(expected, received)
def _forward_rv_sqrt( self, rv, t, compute_gain=False, _diffusion=1.0) -> Tuple[randvars.RandomVariable, typing.Dict]: if config.matrix_free: raise NotImplementedError( "Sqrt-implementation does not work with linops for now.") H = self.transition_matrix_fun(t) noise = self.noise_fun(t) shift, SR = noise.mean, noise.cov_cholesky new_mean = H @ rv.mean + shift new_cov_cholesky = cholesky_update(H @ rv.cov_cholesky, np.sqrt(_diffusion) * SR) new_cov = new_cov_cholesky @ new_cov_cholesky.T crosscov = rv.cov @ H.T info = {"crosscov": crosscov} if compute_gain: info["gain"] = scipy.linalg.cho_solve((new_cov_cholesky, True), crosscov.T).T return ( randvars.Normal(new_mean, cov=new_cov, cov_cholesky=new_cov_cholesky), info, )
def _forward_rv_sqrt( self, rv, t, compute_gain=False, _diffusion=1.0) -> (randvars.RandomVariable, typing.Dict): H = self.state_trans_mat_fun(t) SR = self.proc_noise_cov_cholesky_fun(t) shift = self.shift_vec_fun(t) new_mean = H @ rv.mean + shift new_cov_cholesky = cholesky_update(H @ rv.cov_cholesky, np.sqrt(_diffusion) * SR) new_cov = new_cov_cholesky @ new_cov_cholesky.T crosscov = rv.cov @ H.T info = {"crosscov": crosscov} if compute_gain: info["gain"] = scipy.linalg.cho_solve((new_cov_cholesky, True), crosscov.T).T return ( randvars.Normal(new_mean, cov=new_cov, cov_cholesky=new_cov_cholesky), info, )
def test_cholesky_optional(spdmat1, even_ndim): """Assert that cholesky_update() transforms a non-square matrix square-root into a correct Cholesky factor.""" H = np.random.rand(even_ndim // 2, even_ndim) expected = np.linalg.cholesky(H @ spdmat1 @ H.T) S1 = np.linalg.cholesky(spdmat1) received = utlin.cholesky_update(H @ S1) np.testing.assert_allclose(expected, received)
def _project_rv(projmat, rv): # There is no way of checking whether `rv` has its Cholesky factor computed already or not. # Therefore, since we need to update the Cholesky factor for square-root filtering, # we also update the Cholesky factor for non-square-root algorithms here, # which implies additional cost. # See Issues #319 and #329. # When they are resolved, this function here will hopefully be superfluous. new_mean = projmat @ rv.mean new_cov = projmat @ rv.cov @ projmat.T new_cov_cholesky = cholesky_update(projmat @ rv.cov_cholesky) return randvars.Normal(new_mean, new_cov, cov_cholesky=new_cov_cholesky)
def _forward_rv_sqrt(self, rv, t, compute_gain=False, _diffusion=1.0) -> (pnrv.RandomVariable, typing.Dict): H = self.state_trans_mat_fun(t) SR = self.proc_noise_cov_cholesky_fun(t) shift = self.shift_vec_fun(t) new_mean = H @ rv.mean + shift new_cov_cholesky = cholesky_update(H @ rv.cov_cholesky, np.sqrt(_diffusion) * SR) new_cov = new_cov_cholesky @ new_cov_cholesky.T crosscov = rv.cov @ H.T info = {"crosscov": crosscov} if compute_gain: gain = crosscov @ np.linalg.inv(new_cov) info["gain"] = gain return pnrv.Normal(new_mean, cov=new_cov, cov_cholesky=new_cov_cholesky), info