def test_inner_product(self, n, power_affine, tangent_vec_a, tangent_vec_b, base_point, expected): metric = SPDMetricAffine(n, power_affine) result = metric.inner_product(gs.array(tangent_vec_a), gs.array(tangent_vec_b), gs.array(base_point)) self.assertAllClose(result, expected)
def test_power_affine_inner_product(self): base_point = gs.array([[1., 0., 0.], [0., 2.5, 1.5], [0., 1.5, 2.5]]) tangent_vec = gs.array([[2., 1., 1.], [1., .5, .5], [1., .5, .5]]) metric = SPDMetricAffine(3, power_affine=.5) result = metric.inner_product(tangent_vec, tangent_vec, base_point) expected = [[713 / 144]] self.assertAllClose(result, expected)
def test_log_and_exp_power_affine(self): base_point = gs.array([[5., 0., 0.], [0., 7., 2.], [0., 2., 8.]]) point = gs.array([[9., 0., 0.], [0., 5., 0.], [0., 0., 1.]]) metric = SPDMetricAffine(3, power_affine=.5) log = metric.log(point, base_point) result = metric.exp(log, base_point) expected = point self.assertAllClose(result, expected)
def test_log_and_exp_power_affine(self): """Test of SPDMetricAffine.log and exp methods with power!=1.""" base_point = gs.array([[5.0, 0.0, 0.0], [0.0, 7.0, 2.0], [0.0, 2.0, 8.0]]) point = gs.array([[9.0, 0.0, 0.0], [0.0, 5.0, 0.0], [0.0, 0.0, 1.0]]) metric = SPDMetricAffine(3, power_affine=0.5) log = metric.log(point, base_point) result = metric.exp(log, base_point) expected = point self.assertAllClose(result, expected)
def test_power_affine_inner_product(self): """Test of SPDMetricAffine.inner_product method.""" base_point = gs.array([[1.0, 0.0, 0.0], [0.0, 2.5, 1.5], [0.0, 1.5, 2.5]]) tangent_vec = gs.array([[2.0, 1.0, 1.0], [1.0, 0.5, 0.5], [1.0, 0.5, 0.5]]) metric = SPDMetricAffine(3, power_affine=0.5) result = metric.inner_product(tangent_vec, tangent_vec, base_point) expected = 713 / 144 self.assertAllClose(result, expected)
def test_estimate_spd_two_samples(self): space = SPDMatrices(3) metric = SPDMetricAffine(3) point = space.random_point(2) mean = FrechetMean(metric) mean.fit(point) result = mean.estimate_ expected = metric.exp(metric.log(point[0], point[1]) / 2, point[1]) self.assertAllClose(expected, result)
def __init__(self, n): super(CorrelationMatricesBundle, self).__init__( n=n, ambient_metric=SPDMetricAffine(n), group_dim=n, group_action=FullRankCorrelationMatrices.diag_action, )
def test_inverse_transform_spd(self): point = SPDMatrices(3).random_uniform(10) mean = FrechetMean(metric=SPDMetricAffine(3), point_type='matrix') X = mean.fit_transform(X=point) result = mean.inverse_transform(X) expected = point self.assertAllClose(expected, result)
def test_inverse_transform_spd(self): point = SPDMatrices(3).random_uniform(10) transformer = ToTangentSpace(geometry=SPDMetricAffine(3)) X = transformer.fit_transform(X=point) result = transformer.inverse_transform(X) expected = point self.assertAllClose(expected, result, atol=1e-4)
def test_estimate_transform_spd(self): point = SPDMatrices(3).random_uniform() points = gs.stack([point, point]) transformer = ToTangentSpace(geometry=SPDMetricAffine(3)) transformer.fit(X=points) result = transformer.transform(points) expected = gs.zeros((2, 6)) self.assertAllClose(expected, result, atol=1e-5)
def test_estimate_spd(self): point = SPDMatrices(3).random_point() points = gs.array([point, point]) mean = FrechetMean(metric=SPDMetricAffine(3), point_type='matrix') mean.fit(X=points) result = mean.estimate_ expected = point self.assertAllClose(expected, result)
def test_estimate_transform_spd(self): point = SPDMatrices(3).random_uniform() points = gs.array([point, point]) mean = FrechetMean(metric=SPDMetricAffine(3), point_type='matrix') mean.fit(X=points) result = mean.transform(points) expected = gs.zeros((2, 6)) self.assertAllClose(expected, result)
def fit_test_data(self): estimator = GeometricMedian(SPDMetricAffine(n=2)) X = gs.array([[[1.0, 0.0], [0.0, 1.0]], [[1.0, 0.0], [0.0, 1.0]]]) expected = gs.array([[1.0, 0.0], [0.0, 1.0]]) smoke_data = [dict(estimator=estimator, X=X, expected=expected)] return self.generate_tests(smoke_data)
def setup_method(self): """Set up the test""" self.n = 3 self.spd_cov_n = (self.n * (self.n + 1)) // 2 self.samples = 5 self.spd = SPDMatrices(self.n) self.log_euclidean = SPDMetricLogEuclidean(self.n) self.affine_invariant = SPDMetricAffine(self.n) self.euclidean = Euclidean(self.n)
def setUp(self): self.so3 = SpecialOrthogonal(n=3) self.spd = SPDMatrices(3) self.spd_metric = SPDMetricAffine(3) self.n_samples = 10 self.X = self.so3.random_uniform(n_samples=self.n_samples) self.metric = self.so3.bi_invariant_metric self.n_components = 2
def setUp(self): warnings.simplefilter('ignore', category=ImportWarning) gs.random.seed(1234) self.n = 3 self.space = SPDMatrices(n=self.n) self.metric_affine = SPDMetricAffine(n=self.n) self.metric_procrustes = SPDMetricProcrustes(n=self.n) self.metric_euclidean = SPDMetricEuclidean(n=self.n) self.metric_logeuclidean = SPDMetricLogEuclidean(n=self.n) self.n_samples = 4
def setUp_alt(self, n=3, n_samples=4): """Set up the test, flexible parameters.""" warnings.simplefilter('ignore', category=ImportWarning) gs.random.seed(1234) self.n = n self.space = SPDMatrices(n=self.n) self.metric_affine = SPDMetricAffine(n=self.n) self.metric_procrustes = SPDMetricProcrustes(n=self.n) self.metric_euclidean = SPDMetricEuclidean(n=self.n) self.metric_logeuclidean = SPDMetricLogEuclidean(n=self.n) self.n_samples = n_samples
def setUp(self): """Set up the test.""" warnings.simplefilter('ignore', category=ImportWarning) gs.random.seed(1234) self.n = 3 self.space = SPDMatrices(n=self.n) self.metric_affine = SPDMetricAffine(n=self.n) self.metric_bureswasserstein = SPDMetricBuresWasserstein(n=self.n) self.metric_euclidean = SPDMetricEuclidean(n=self.n) self.metric_logeuclidean = SPDMetricLogEuclidean(n=self.n) self.n_samples = 4
def main(): """Execute illustration of MDM supervised classifier.""" n_samples = 100 n_features = 2 n_classes = 3 # generate toy dataset of 2D SPD matrices dataset_generator = geomstats.datasets.sample_sdp_2d.DatasetSPD2D( n_samples, n_features, n_classes) data, labels = dataset_generator.generate_sample_dataset() # plot dataset as ellipses ellipsis = visualization.Ellipsis2D() for i in range(n_samples): x = data[i] y = geomstats.datasets.sample_sdp_2d.get_label_at_index(i, labels) ellipsis.draw(x, color=ellipsis.colors[y], alpha=.1) # define and fit MDM classifier to data metric = SPDMetricAffine(n=n_features) MDMEstimator = RiemannianMinimumDistanceToMeanClassifier( metric, n_classes, point_type='matrix') MDMEstimator.fit(data, labels) # plot Frechet means computed in the MDM for i in range(n_classes): ellipsis.draw(MDMEstimator.mean_estimates_[i], color=ellipsis.colors_alt[i], linewidth=5, label='Barycenter of class ' + str(i)) # generate random test samples, and predict with MDM classifier data_test = SPDMatrices(n=n_features).random_uniform(n_samples=3) predictions = MDMEstimator.predict(data_test) for i in range(data_test.shape[0]): c = list(predictions[i] == 1).index(True) x_from, y_from = ellipsis.draw(data_test[i], color=ellipsis.colors[c], linewidth=5) _, _, x_to, y_to = ellipsis.compute_coordinates( MDMEstimator.mean_estimates_[c]) arrow = visualization.DataArrow(ellipsis.fig) arrow.draw(x_from, y_from, x_to, y_to) ellipsis.fig.axes[0].set_title( 'Example plot of the MDM classifier in dimension 2\n' '3-class fit and 3 test sample prediction\n' '(black arrows denote assignement)') ellipsis.plot()
def fit_sanity_test_data(self): n = 4 estimator_1 = GeometricMedian(SPDMetricAffine(n)) space_1 = SPDMatrices(n) space_2 = Hypersphere(2) estimator_2 = GeometricMedian(space_2.metric) smoke_data = [ dict(estimator=estimator_1, space=space_1), dict(estimator=estimator_2, space=space_2), ] return self.generate_tests(smoke_data)
def test_batched(self): space = SPDMatrices(3) metric = SPDMetricAffine(3) point = space.random_point(4) mean_batch = FrechetMean(metric, method="batch", verbose=True) data = gs.stack([point[:2], point[2:]], axis=1) mean_batch.fit(data) result = mean_batch.estimate_ mean = FrechetMean(metric) mean.fit(data[:, 0]) expected_1 = mean.estimate_ mean.fit(data[:, 1]) expected_2 = mean.estimate_ expected = gs.stack([expected_1, expected_2]) self.assertAllClose(expected, result)
def test_predict(self): """Test the predict method.""" n_clusters = 2 bary_a = gs.array([[EULER, 0], [0, 1]]) bary_b = gs.array([[EULER**4, 0], [0, 1]]) MDMEstimator = RiemannianMinimumDistanceToMeanClassifier( SPDMetricAffine(n=2), n_clusters, point_type='matrix') MDMEstimator.mean_estimates_ = gs.concatenate( [bary_a[None, ...], bary_b[None, ...]]) X = gs.array([[EULER**3, 0], [0, 1]])[None, ...] y_expected = gs.array([[0, 1]]) y_result = MDMEstimator.predict(X) self.assertAllClose(y_result, y_expected)
def test_fit(self): """Test the fit method.""" n_clusters = 2 MDMEstimator = RiemannianMinimumDistanceToMeanClassifier( SPDMetricAffine(n=2), n_clusters, point_type='matrix') points_a = gs.array([[[EULER**2, 0], [0, 1]], [[1, 0], [0, 1]]]) labels_a = gs.array([[1, 0], [1, 0]]) bary_a_expected = gs.array([[EULER, 0], [0, 1]]) points_b = gs.array([[[EULER**8, 0], [0, 1]], [[1, 0], [0, 1]]]) labels_b = gs.array([[0, 1], [0, 1]]) bary_b_expected = gs.array([[EULER**4, 0], [0, 1]]) train_data = gs.concatenate([points_a, points_b]) train_labels = gs.concatenate([labels_a, labels_b]) MDMEstimator.fit(train_data, train_labels) bary_a_result = MDMEstimator.mean_estimates_[0] bary_b_result = MDMEstimator.mean_estimates_[1] self.assertAllClose(bary_a_result, bary_a_expected) self.assertAllClose(bary_b_result, bary_b_expected)
def test_log(self, n, power_affine, point, base_point, expected): metric = SPDMetricAffine(n, power_affine) self.assertAllClose( metric.log(gs.array(point), gs.array(base_point)), gs.array(expected) )
def test_exp(self, n, power_affine, tangent_vec, base_point, expected): metric = SPDMetricAffine(n, power_affine) self.assertAllClose( metric.exp(gs.array(tangent_vec), gs.array(base_point)), gs.array(expected) )