def test_regularize_cov(lorenz_dataset): _, _, X, _, _ = lorenz_dataset X = X[:, :3] calc_cross_cov_mats_from_data(X, 5, regularization='kron', reg_ops={'num_folds': 3})
def test_T_too_large(): """Test that an error is raised when T is too large. """ X = np.random.randn(100, 5) T = 99 calc_cross_cov_mats_from_data(X, T) T = 100 with pytest.raises(ValueError): calc_cross_cov_mats_from_data(X, T)
def test_cross_cov_mats_from_data_chunks_2d(): """Test whether chunking the lagged matrix gives the same cross covariance matrices with 2d inputs.""" np.random.seed(0) cov = np.random.randn(10, 10) cov = cov.T.dot(cov) + np.eye(10) X = np.random.multivariate_normal(np.zeros(10), cov, size=10000) ccms = calc_cross_cov_mats_from_data(X, 3, chunks=10) ccms2 = calc_cross_cov_mats_from_data(X, 3) assert_allclose(ccms, ccms2, rtol=1e-2)
def test_projected_cov_calc(lorenz_dataset): """Test the project_cross_cov_mats function by also directly projecting the data.""" rng = np.random.RandomState(20200226) T, d, X, _, _ = lorenz_dataset X = X[:, :3] N = 3 d = 2 T = 6 V = init_coef(N, d, rng, 'random_ortho') tV = torch.tensor(V) ccms = calc_cross_cov_mats_from_data(X, T) tccms = torch.tensor(ccms) pccms = project_cross_cov_mats(ccms, V) cov = calc_cov_from_cross_cov_mats(pccms) XL = form_lag_matrix(X, T) big_V = np.zeros((T * N, T * d)) for ii in range(T): big_V[ii * N:(ii + 1) * N, ii * d:(ii + 1) * d] = V Xp = XL.dot(big_V) cov2 = np.cov(Xp, rowvar=False) cov2 = toeplitzify(cov2, T, d) assert_allclose(cov, cov2) tpccms = project_cross_cov_mats(tccms, tV) tcov = calc_cov_from_cross_cov_mats(tpccms) assert torch.allclose(tcov, torch.tensor(cov2)) assert_allclose(tcov.numpy(), cov2)
def lorenz_dataset(): rng = np.random.RandomState(20200129) T, d = 20, 31 X = rng.randn(10000, d) XL = gen_lorenz_data(10000) X[:, :3] += XL ccms = calc_cross_cov_mats_from_data(X, T=T) ccov = calc_cov_from_cross_cov_mats(ccms) return T, d, X, ccms, ccov