Пример #1
0
def symmetric_matrixvariate_normal(
    shape: ShapeArgType, precompute_cov_cholesky: bool, rng: np.random.Generator
) -> randvars.Normal:
    rv = randvars.Normal(
        mean=random_spd_matrix(dim=shape[0], rng=rng),
        cov=linops.SymmetricKronecker(A=random_spd_matrix(dim=shape[0], rng=rng)),
    )
    if precompute_cov_cholesky:
        rv.precompute_cov_cholesky()
    return rv
Пример #2
0
def matrixvariate_normal(shape: ShapeLike, precompute_cov_cholesky: bool,
                         rng: np.random.Generator) -> randvars.Normal:
    rv = randvars.Normal(
        mean=rng.normal(size=shape),
        cov=linops.Kronecker(
            A=random_spd_matrix(dim=shape[0], rng=rng),
            B=random_spd_matrix(dim=shape[1], rng=rng),
        ),
    )
    if precompute_cov_cholesky:
        rv.precompute_cov_cholesky()
    return rv
Пример #3
0
    def test_cov_cholesky_cov_cholesky_passed(self):
        """A value for cov_cholesky is passed in init.

        In this case, the "is_precomputed" flag is True, the cov_cholesky returns the
        argument that has been passed, but (p)recomputing overwrites the argument with a
        new factor.
        """
        # This is purposely not the correct Cholesky factor for test reasons
        cov_cholesky = np.random.rand(4, 4)

        rv = randvars.Normal(
            mean=np.random.uniform(size=(2, 2)),
            cov=random_spd_matrix(rng=self.rng, dim=4),
            cov_cholesky=cov_cholesky,
        )

        with self.subTest("Cholesky precomputed"):
            self.assertTrue(rv.cov_cholesky_is_precomputed)

        with self.subTest("Returns correct cov_cholesky"):
            self.assertAllClose(rv.cov_cholesky, cov_cholesky)

        with self.subTest("self.precompute raises exception"):
            with self.assertRaises(Exception):
                rv.precompute_cov_cholesky()
Пример #4
0
def multivariate_normal(shape: ShapeLike, precompute_cov_cholesky: bool,
                        rng: np.random.Generator) -> randvars.Normal:
    rv = randvars.Normal(
        mean=rng.normal(size=shape),
        cov=random_spd_matrix(rng=rng, dim=shape[0]),
    )
    if precompute_cov_cholesky:
        rv.precompute_cov_cholesky()
    return rv
Пример #5
0
    def setUp(self):

        self.seed = 42
        self.rng = np.random.default_rng(self.seed)

        self.params = (
            self.rng.uniform(size=10),
            random_spd_matrix(rng=self.rng, dim=10),
        )
Пример #6
0
    def setUp(self):
        """Resources for tests."""
        # Seed
        np.random.seed(seed=42)

        # Parameters
        m = 7
        n = 3
        self.constants = [-1, -2.4, 0, 200, np.pi]
        sparsemat = scipy.sparse.rand(m=m, n=n, density=0.1, random_state=1)
        self.normal_params = [
            # Univariate
            (-1.0, 3.0),
            (1, 3),
            # Multivariate
            (np.random.uniform(size=10), np.eye(10)),
            (np.random.uniform(size=10), random_spd_matrix(10)),
            # Matrixvariate
            (
                np.random.uniform(size=(2, 2)),
                linops.SymmetricKronecker(
                    A=np.array([[1.0, 2.0], [2.0, 1.0]]),
                    B=np.array([[5.0, -1.0], [-1.0, 10.0]]),
                ).todense(),
            ),
            # Operatorvariate
            (
                np.array([1.0, -5.0]),
                linops.Matrix(A=np.array([[2.0, 1.0], [1.0, -0.1]])),
            ),
            (
                linops.Matrix(A=np.array([[0.0, -5.0]])),
                linops.Identity(shape=(2, 2)),
            ),
            (
                np.array([[1.0, 2.0], [-3.0, -0.4], [4.0, 1.0]]),
                linops.Kronecker(A=np.eye(3), B=5 * np.eye(2)),
            ),
            (
                linops.Matrix(A=sparsemat.todense()),
                linops.Kronecker(0.1 * linops.Identity(m), linops.Identity(n)),
            ),
            (
                linops.Matrix(A=np.random.uniform(size=(2, 2))),
                linops.SymmetricKronecker(
                    A=np.array([[1.0, 2.0], [2.0, 1.0]]),
                    B=np.array([[5.0, -1.0], [-1.0, 10.0]]),
                ),
            ),
            # Symmetric Kronecker Identical Factors
            (
                linops.Identity(shape=25),
                linops.SymmetricKronecker(A=linops.Identity(25)),
            ),
        ]
Пример #7
0
def test_spectrum_matches_given(rng: np.random.Generator):
    """Test whether the spectrum of the test problem matches the provided spectrum."""
    dim = 10
    spectrum = np.sort(rng.uniform(0.1, 1, size=dim))
    spdmat = random_spd_matrix(rng=rng, dim=dim, spectrum=spectrum)
    eigvals = np.sort(np.linalg.eigvals(spdmat))
    np.testing.assert_allclose(
        spectrum,
        eigvals,
        err_msg="Provided spectrum doesn't match actual.",
    )
Пример #8
0
def test_induced_norm_array(array0: np.ndarray, axis: int):
    inprod_mat = random_spd_matrix(
        rng=np.random.default_rng(254), dim=array0.shape[axis]
    )
    array0_moved_axis = np.moveaxis(array0, axis, -1)
    A_array_0_moved_axis = np.squeeze(
        inprod_mat @ array0_moved_axis[..., :, None], axis=-1
    )

    assert np.sqrt(
        np.sum(array0_moved_axis * A_array_0_moved_axis, axis=-1)
    ) == pytest.approx(induced_norm(v=array0, A=inprod_mat, axis=axis))
Пример #9
0
    def test_reshape(self):
        rv = randvars.Normal(
            mean=np.random.uniform(size=(4, 3)),
            cov=linops.Kronecker(A=random_spd_matrix(4),
                                 B=random_spd_matrix(3)).todense(),
        )

        newshape = (2, 6)
        reshaped_rv = rv.reshape(newshape)

        self.assertArrayEqual(reshaped_rv.mean, rv.mean.reshape(newshape))
        self.assertArrayEqual(reshaped_rv.cov, rv.cov)

        # Test sampling
        rv.random_state = 42
        dist_sample = rv.sample(size=5)

        reshaped_rv.random_state = 42
        dist_reshape_sample = reshaped_rv.sample(size=5)

        self.assertArrayEqual(dist_reshape_sample,
                              dist_sample.reshape((-1, ) + newshape))
 def test_spectrum_matches_given(self):
     """Test whether the spectrum of the test problem matches the provided
     spectrum."""
     dim = 10
     spectrum = np.sort(self.rng.uniform(0.1, 1, size=dim))
     spdmat = random_spd_matrix(dim=dim,
                                spectrum=spectrum,
                                random_state=self.rng)
     eigvals = np.sort(np.linalg.eigvals(spdmat))
     self.assertAllClose(
         spectrum,
         eigvals,
         msg="Provided spectrum doesn't match actual.",
     )
Пример #11
0
    def test_transpose(self):
        rv = randvars.Normal(mean=np.random.uniform(size=(2, 2)),
                             cov=random_spd_matrix(4))
        transposed_rv = rv.transpose()

        self.assertArrayEqual(transposed_rv.mean, rv.mean.T)

        # Test covariance
        for ii, ij in itertools.product(range(2), range(2)):
            for ji, jj in itertools.product(range(2), range(2)):
                idx = (2 * ii + ij, 2 * ji + jj)
                idx_t = (2 * ij + ii, 2 * jj + ji)

                self.assertEqual(transposed_rv.cov[idx_t], rv.cov[idx])
Пример #12
0
    def test_reshape(self):
        rv = randvars.Normal(
            mean=np.random.uniform(size=(4, 3)),
            cov=linops.Kronecker(
                A=random_spd_matrix(rng=self.rng, dim=4),
                B=random_spd_matrix(rng=self.rng, dim=3),
            ).todense(),
        )

        newshape = (2, 6)
        reshaped_rv = rv.reshape(newshape)

        self.assertArrayEqual(reshaped_rv.mean, rv.mean.reshape(newshape))
        self.assertArrayEqual(reshaped_rv.cov, rv.cov)

        # Test sampling
        fixed_rng = np.random.default_rng(seed=self.seed)
        dist_sample = rv.sample(rng=fixed_rng, size=5)
        fixed_rng = np.random.default_rng(seed=self.seed)
        dist_reshape_sample = reshaped_rv.sample(rng=fixed_rng, size=5)

        self.assertArrayEqual(dist_reshape_sample,
                              dist_sample.reshape((-1, ) + newshape))
Пример #13
0
def test_induced_solution_belief(rng: np.random.Generator):
    """Test whether a consistent belief over the solution is inferred from a belief over
    the inverse."""
    n = 5
    A = randvars.Constant(random_spd_matrix(dim=n, rng=rng))
    Ainv = randvars.Normal(
        mean=linops.Scaling(factors=1 / np.diag(A.mean)),
        cov=linops.SymmetricKronecker(linops.Identity(n)),
    )
    b = randvars.Constant(rng.normal(size=(n, 1)))
    prior = LinearSystemBelief(A=A, Ainv=Ainv, x=None, b=b)

    x_infer = Ainv @ b
    np.testing.assert_allclose(prior.x.mean, x_infer.mean)
    np.testing.assert_allclose(prior.x.cov.todense(), x_infer.cov.todense())
Пример #14
0
    def test_precompute_cov_cholesky(self):
        rv = randvars.Normal(mean=np.random.uniform(size=(2, 2)),
                             cov=random_spd_matrix(4))

        with self.subTest("No Cholesky precomputed"):
            self.assertFalse(rv.cov_cholesky_is_precomputed)

        with self.subTest("Damping factor check"):
            rv.precompute_cov_cholesky(damping_factor=10.0)
            self.assertAllClose(
                rv.cov_cholesky,
                np.linalg.cholesky(rv.cov + 10.0 * np.eye(len(rv.cov))))

        with self.subTest("Cholesky is precomputed"):
            self.assertTrue(rv.cov_cholesky_is_precomputed)
 def setUp(self) -> None:
     """Define parameters and define test problems."""
     self.rng = np.random.default_rng(42)
     self.dim_list = [1, 2, 25, 100, 250]
     self.spd_matrices = [
         random_spd_matrix(dim=n, random_state=self.rng)
         for n in self.dim_list
     ]
     self.density = 0.01
     self.sparse_spd_matrices = [
         random_sparse_spd_matrix(dim=n,
                                  density=self.density,
                                  random_state=self.rng)
         for n in self.dim_list
     ]
     self.matrices = self.spd_matrices + self.sparse_spd_matrices
Пример #16
0
def test_same_backward_outputs(both_transitions, diffusion):
    trans1, trans2 = both_transitions
    real = 1 + 0.1 * np.random.rand(trans1.dimension)
    real2 = 1 + 0.1 * np.random.rand(trans1.dimension)
    cov = random_spd_matrix(trans1.dimension)
    rv = randvars.Normal(real2, cov)
    out_1, info1 = trans1.backward_realization(
        real, rv, t=0.0, dt=0.5, compute_gain=True, _diffusion=diffusion
    )
    out_2, info2 = trans2.backward_realization(
        real, rv, t=0.0, dt=0.5, compute_gain=True, _diffusion=diffusion
    )
    np.testing.assert_allclose(out_1.mean, out_2.mean)
    np.testing.assert_allclose(out_1.cov, out_2.cov)

    # Both dicts are empty?
    assert not info1
    assert not info2
Пример #17
0
    def test_cov_cholesky_cov_cholesky_not_passed(self):
        """No cov_cholesky is passed in init.

        In this case, the "is_precomputed" flag is False, a cov_cholesky
        is computed on demand, but can also be computed manually with
        any damping factor.
        """
        rv = randvars.Normal(mean=np.random.uniform(size=(2, 2)),
                             cov=random_spd_matrix(4))

        with self.subTest("No Cholesky precomputed"):
            self.assertFalse(rv.cov_cholesky_is_precomputed)

        with self.subTest("Cholesky factor is computed correctly"):
            # The default damping factor 1e-12 does not mess up this test
            self.assertAllClose(rv.cov_cholesky, np.linalg.cholesky(rv.cov))

        with self.subTest("Cholesky is precomputed"):
            self.assertTrue(rv.cov_cholesky_is_precomputed)
Пример #18
0
def get_linear_system(name: str, dim: int):
    rng = np.random.default_rng(0)

    if name == "dense":
        if dim > 1000:
            raise NotImplementedError()
        A = random_spd_matrix(rng=rng, dim=dim)
    elif name == "sparse":
        A = random_sparse_spd_matrix(rng=rng,
                                     dim=dim,
                                     density=np.minimum(1.0, 1000 / dim**2))
    elif name == "linop":
        if dim > 100:
            raise NotImplementedError()
            # TODO: Larger benchmarks currently fail. Remove once PLS refactor (https://github.com/probabilistic-numerics/probnum/issues/51) is resolved
        A = linops.Scaling(factors=rng.normal(size=(dim, )))
    else:
        raise NotImplementedError()

    solution = rng.normal(size=(dim, ))
    b = A @ solution
    return problems.LinearSystem(A=A, b=b, solution=solution)
Пример #19
0
def spdmat3x3(rng):
    return linalg_zoo.random_spd_matrix(rng, dim=3)
Пример #20
0
def spdmat3x3():
    return random_spd_matrix(3)
Пример #21
0
    orthogonal_basis = orthogonal_basis.T

    # Orthogonalize vector
    ortho_vector = orthogonalization_fn(v=vector,
                                        orthogonal_basis=orthogonal_basis,
                                        normalize=True)

    assert np.inner(ortho_vector, ortho_vector) == pytest.approx(1.0)


@pytest.mark.parametrize(
    "inner_product_matrix",
    [
        np.diag(np.random.default_rng(123).standard_gamma(1.0, size=(n, ))),
        5 * np.eye(n),
        random_spd_matrix(rng=np.random.default_rng(46), dim=n),
    ],
)
def test_noneuclidean_innerprod(
    vector: np.ndarray,
    basis_size: int,
    inner_product_matrix: np.ndarray,
    orthogonalization_fn: Callable[[np.ndarray, np.ndarray], np.ndarray],
):
    evals, evecs = np.linalg.eigh(inner_product_matrix)
    orthogonal_basis = evecs * 1 / np.sqrt(evals)
    orthogonal_basis = orthogonal_basis[:, 0:basis_size].T

    # Orthogonalize vector
    ortho_vector = orthogonalization_fn(
        v=vector,
Пример #22
0
def spdmat4(test_ndim, rng):
    return random_spd_matrix(rng, dim=test_ndim)
Пример #23
0
 def setUp(self):
     self.params = (np.random.uniform(size=10), random_spd_matrix(10))
Пример #24
0
def case_random_spd_matrix(n: int, rng: np.random.Generator) -> np.ndarray:
    return random_spd_matrix(dim=n, rng=rng)
Пример #25
0
def rand_spd_mat(rng):
    return Matrix(random_spd_matrix(rng, dim=4))
Пример #26
0
def rnd_dense_spd_mat(n_cols: int, rng: np.random.Generator) -> np.ndarray:
    """Random spd matrix generated from :meth:`random_spd_matrix`."""
    return random_spd_matrix(rng=rng, dim=n_cols)
Пример #27
0
def test_negative_eigenvalues_throws_error(rng: np.random.Generator):
    """Test whether a non-positive spectrum throws an error."""
    with pytest.raises(ValueError):
        random_spd_matrix(rng=rng, dim=3, spectrum=[-1, 1, 2])
def spdmat2(even_ndim):
    return random_spd_matrix(even_ndim)
Пример #29
0
def spdmat4(test_ndim):
    return random_spd_matrix(test_ndim)
Пример #30
0
def case_random_spd_linsys(ncols: int, ) -> problems.LinearSystem:
    rng = np.random.default_rng(1)
    A = random_spd_matrix(rng=rng, dim=ncols)
    x = rng.normal(size=(ncols, ))
    b = A @ x
    return problems.LinearSystem(A=A, b=b, solution=x)