def make_data_noisy(self, eigenspace, eigenvalues, var, var_eigenvalues): """Generate noisy Gaussian data from mean matrix and variance. Parameters ---------- eigenspace : array-like, shape = [n, n] Data eigenvectors. eigenvalues : array-like, shape = [n, n] Eigenvalues matrix (diagonal matrix). var : float Variance of the wanted distribution. var_eigenvalues : float Noise within the distribution. Returns ------- spd_data : array-like, shape = [n, n] Output data. """ spd = SPDMatrices(n=self.n_features) eigensummary = EigenSummary(eigenspace, eigenvalues) spd_data = spd.random_gaussian_rotation_orbit_noisy( eigensummary=eigensummary, var_rotations=var, var_eigenvalues=var_eigenvalues, n_samples=self.n_samples) return spd_data
def setUp(self): """Set up the test""" self.n = 3 self.spd_cov_n = (self.n * (self.n + 1)) // 2 self.samples = 5 self.SPDManifold = SPDMatrices(self.n) self.Euclidean = Euclidean(self.n)
def setUp(self): gs.random.seed(0) n = 3 self.base = SPDMatrices(n) self.base_metric = SPDMetricBuresWasserstein(n) self.group = SpecialOrthogonal(n) self.bundle = FiberBundle(GeneralLinear(n), base=self.base, group=self.group) self.quotient_metric = QuotientMetric(self.bundle, ambient_metric=MatricesMetric( n, n)) def submersion(point): return GeneralLinear.mul(point, GeneralLinear.transpose(point)) def tangent_submersion(tangent_vec, base_point): product = GeneralLinear.mul(base_point, GeneralLinear.transpose(tangent_vec)) return 2 * GeneralLinear.to_symmetric(product) def horizontal_lift(tangent_vec, point, base_point=None): if base_point is None: base_point = submersion(point) sylvester = gs.linalg.solve_sylvester(base_point, base_point, tangent_vec) return GeneralLinear.mul(sylvester, point) self.bundle.submersion = submersion self.bundle.tangent_submersion = tangent_submersion self.bundle.horizontal_lift = horizontal_lift self.bundle.lift = gs.linalg.cholesky
def test_estimate_spd_two_samples(self): space = SPDMatrices(3) metric = SPDMetricAffine(3) point = space.random_point(2) mean = FrechetMean(metric) mean.fit(point) result = mean.estimate_ expected = metric.exp(metric.log(point[0], point[1]) / 2, point[1]) self.assertAllClose(expected, result)
def setup_method(self): """Set up the test""" self.n = 3 self.spd_cov_n = (self.n * (self.n + 1)) // 2 self.samples = 5 self.spd = SPDMatrices(self.n) self.log_euclidean = SPDMetricLogEuclidean(self.n) self.affine_invariant = SPDMetricAffine(self.n) self.euclidean = Euclidean(self.n)
def setUp(self): self.so3 = SpecialOrthogonal(n=3) self.spd = SPDMatrices(3) self.spd_metric = SPDMetricAffine(3) self.n_samples = 10 self.X = self.so3.random_uniform(n_samples=self.n_samples) self.metric = self.so3.bi_invariant_metric self.n_components = 2
def setUp(self): warnings.simplefilter('ignore', category=ImportWarning) gs.random.seed(1234) self.n = 3 self.space = SPDMatrices(n=self.n) self.metric_affine = SPDMetricAffine(n=self.n) self.metric_procrustes = SPDMetricProcrustes(n=self.n) self.metric_euclidean = SPDMetricEuclidean(n=self.n) self.metric_logeuclidean = SPDMetricLogEuclidean(n=self.n) self.n_samples = 4
def test_ifm_affine_invariant_belongs(self): mean = 2 * gs.eye(self.n) cov = gs.eye(self.spd_cov_n) spd = SPDMatrices(self.n) LogNormalSampler = LogNormal(self.spd, mean, cov) data = LogNormalSampler.sample(20) ifm = IncrementalFrechetMean(self.affine_invariant).fit(data) ifm_mean = ifm.estimate_ result = gs.all(spd.belongs(ifm_mean)) expected = gs.array(True) self.assertAllClose(result, expected)
def setUp(self): """Set up the test.""" warnings.simplefilter('ignore', category=ImportWarning) gs.random.seed(1234) self.n = 3 self.space = SPDMatrices(n=self.n) self.metric_affine = SPDMetricAffine(n=self.n) self.metric_bureswasserstein = SPDMetricBuresWasserstein(n=self.n) self.metric_euclidean = SPDMetricEuclidean(n=self.n) self.metric_logeuclidean = SPDMetricLogEuclidean(n=self.n) self.n_samples = 4
def setUp_alt(self, n=3, n_samples=4): """Set up the test, flexible parameters.""" warnings.simplefilter('ignore', category=ImportWarning) gs.random.seed(1234) self.n = n self.space = SPDMatrices(n=self.n) self.metric_affine = SPDMetricAffine(n=self.n) self.metric_procrustes = SPDMetricProcrustes(n=self.n) self.metric_euclidean = SPDMetricEuclidean(n=self.n) self.metric_logeuclidean = SPDMetricLogEuclidean(n=self.n) self.n_samples = n_samples
def test_load_connectomes(self): """Test that the connectomes belong to SPD.""" spd = SPDMatrices(28) data, _, _ = data_utils.load_connectomes(as_vectors=True) result = data.shape expected = (86, 27 * 14) self.assertAllClose(result, expected) data, _, labels = data_utils.load_connectomes() result = spd.belongs(data) self.assertTrue(gs.all(result)) result = gs.logical_and(labels >= 0, labels <= 1) self.assertTrue(gs.all(result))
def test_differential_cholesky_factor(self, n, tangent_vec, base_point, expected): result = SPDMatrices.differential_cholesky_factor( gs.array(tangent_vec), gs.array(base_point)) self.assertAllClose(result, gs.array(expected)) self.assertAllClose(gs.all(LowerTriangularMatrices(n).belongs(result)), gs.array(True))
def test_inverse_transform_spd(self): point = SPDMatrices(3).random_uniform(10) mean = FrechetMean(metric=SPDMetricAffine(3), point_type='matrix') X = mean.fit_transform(X=point) result = mean.inverse_transform(X) expected = point self.assertAllClose(expected, result)
def test_belongs(self): """Test of belongs method.""" mats = gs.array( [[1., 1.], [1., 1.]]) result = SPDMatrices.belongs(mats) expected = False self.assertAllClose(result, expected)
def test_inverse_transform_spd(self): point = SPDMatrices(3).random_uniform(10) transformer = ToTangentSpace(geometry=SPDMetricAffine(3)) X = transformer.fit_transform(X=point) result = transformer.inverse_transform(X) expected = point self.assertAllClose(expected, result, atol=1e-4)
def cholesky_factor_belongs_test_data(self): list_n = random.sample(range(1, 100), 10) n_samples = 10 random_data = [ dict(n=n, mat=SPDMatrices(n).random_point(n_samples)) for n in list_n ] return self.generate_tests([], random_data)
def test_belongs(self): """Test of belongs method.""" mats = gs.array([[3., -1.], [-1., 3.]]) result = SPDMatrices(2).belongs(mats) expected = True self.assertAllClose(result, expected) mats = gs.array([[-1., -1.], [-1., 3.]]) result = SPDMatrices(2).belongs(mats) expected = False self.assertAllClose(result, expected) mats = gs.eye(3) result = SPDMatrices(2).belongs(mats) expected = False self.assertAllClose(result, expected)
def test_belongs_vectorization(self): """Test of belongs method.""" mats = gs.array([[[1., 0], [0, 1.]], [[1., 2.], [2., 1.]], [[1., 0.], [1., 1.]]]) result = SPDMatrices(2).belongs(mats) expected = gs.array([True, False, False]) self.assertAllClose(result, expected)
def __init__(self, n): super(BuresWassersteinBundle, self).__init__( n=n, base=SPDMatrices(n), group=SpecialOrthogonal(n), ambient_metric=MatricesMetric(n, n), )
def test_batched(self): space = SPDMatrices(3) metric = SPDMetricAffine(3) point = space.random_point(4) mean_batch = FrechetMean(metric, method="batch", verbose=True) data = gs.stack([point[:2], point[2:]], axis=1) mean_batch.fit(data) result = mean_batch.estimate_ mean = FrechetMean(metric) mean.fit(data[:, 0]) expected_1 = mean.estimate_ mean.fit(data[:, 1]) expected_2 = mean.estimate_ expected = gs.stack([expected_1, expected_2]) self.assertAllClose(expected, result)
def test_belongs(self): mats = gs.array([ [[1., 1.], [1., 1.]], [[1., 2.], [2., 1.]], [[1., 0.], [1., 1.]]]) result = SPDMatrices.belongs(mats) expected = gs.array([True, True, False]) self.assertAllClose(result, expected)
def test_estimate_spd(self): point = SPDMatrices(3).random_point() points = gs.array([point, point]) mean = FrechetMean(metric=SPDMetricAffine(3), point_type='matrix') mean.fit(X=points) result = mean.estimate_ expected = point self.assertAllClose(expected, result)
def test_estimate_transform_spd(self): point = SPDMatrices(3).random_uniform() points = gs.stack([point, point]) transformer = ToTangentSpace(geometry=SPDMetricAffine(3)) transformer.fit(X=points) result = transformer.transform(points) expected = gs.zeros((2, 6)) self.assertAllClose(expected, result, atol=1e-5)
def setup_method(self): self.n_samples = 10 self.SO3_GROUP = SpecialOrthogonal(n=3, point_type="vector") self.SE3_GROUP = SpecialEuclidean(n=3, point_type="vector") self.S1 = Hypersphere(dim=1) self.S2 = Hypersphere(dim=2) self.H2 = Hyperbolic(dim=2) self.H2_half_plane = PoincareHalfSpace(dim=2) self.M32 = Matrices(m=3, n=2) self.S32 = PreShapeSpace(k_landmarks=3, m_ambient=2) self.KS = visualization.KendallSphere() self.M33 = Matrices(m=3, n=3) self.S33 = PreShapeSpace(k_landmarks=3, m_ambient=3) self.KD = visualization.KendallDisk() self.spd = SPDMatrices(n=2) plt.figure()
def test_estimate_transform_spd(self): point = SPDMatrices(3).random_uniform() points = gs.array([point, point]) mean = FrechetMean(metric=SPDMetricAffine(3), point_type='matrix') mean.fit(X=points) result = mean.transform(points) expected = gs.zeros((2, 6)) self.assertAllClose(expected, result)
def test_cholesky_factor(self, n, spd_mat, cf): result = SPDMatrices.cholesky_factor(gs.array(spd_mat)) self.assertAllClose(result, gs.array(cf)) self.assertAllClose( gs.all(PositiveLowerTriangularMatrices(n).belongs(result)), gs.array(True), )
def __init__(self, n): super(FullRankCorrelationMatrices, self).__init__( dim=int(n * (n - 1) / 2), embedding_space=SPDMatrices(n=n), submersion=Matrices.diagonal, value=gs.ones(n), tangent_submersion=lambda v, x: Matrices.diagonal(v), ) self.n = n
def unary_op_like_np_test_data(self): smoke_data = [ dict(func_name="trace", a=rand(2, 2)), dict(func_name="trace", a=rand(3, 3)), dict(func_name="linalg.cholesky", a=SPDMatrices(3).random_point()), dict(func_name="linalg.eigvalsh", a=SymmetricMatrices(3).random_point()), ] return self.generate_tests(smoke_data)
def __init__(self, n, **kwargs): kwargs.setdefault("metric", FullRankCorrelationAffineQuotientMetric(n)) super(FullRankCorrelationMatrices, self).__init__( dim=int(n * (n - 1) / 2), embedding_space=SPDMatrices(n=n), submersion=Matrices.diagonal, value=gs.ones(n), tangent_submersion=lambda v, x: Matrices.diagonal(v), **kwargs) self.n = n
def __new__( cls, n, k, **kwargs, ): if n > k: return RankKPSDMatrices(n, k, **kwargs) if n == k: return SPDMatrices(n, **kwargs) raise NotImplementedError('The PSD matrices is not implemented yet')