def test_left_exp_coincides(self):
     vector_group = SpecialEuclidean(n=2, point_type='vector')
     theta = gs.pi / 3
     initial_vec = gs.array([theta, 2., 2.])
     initial_matrix_vec = self.group.lie_algebra.matrix_representation(
         initial_vec)
     vector_exp = vector_group.left_canonical_metric.exp(initial_vec)
     result = self.group.left_canonical_metric.exp(initial_matrix_vec)
     expected = vector_group.matrix_from_vector(vector_exp)
     self.assertAllClose(result, expected)
    def test_left_metric_wrong_group(self):
        group = self.group.rotations
        self.assertRaises(
            ValueError,
            lambda: SpecialEuclideanMatrixCannonicalLeftMetric(group))

        group = SpecialEuclidean(3, point_type='vector')
        self.assertRaises(
            ValueError,
            lambda: SpecialEuclideanMatrixCannonicalLeftMetric(group))
Beispiel #3
0
    def setup_method(self):
        gs.random.seed(1234)
        self.n_samples = 20

        # Set up for hypersphere
        self.dim_sphere = 4
        self.shape_sphere = (self.dim_sphere + 1, )
        self.sphere = Hypersphere(dim=self.dim_sphere)
        X = gs.random.rand(self.n_samples)
        self.X_sphere = X - gs.mean(X)
        self.intercept_sphere_true = self.sphere.random_point()
        self.coef_sphere_true = self.sphere.projection(
            gs.random.rand(self.dim_sphere + 1))

        self.y_sphere = self.sphere.metric.exp(
            self.X_sphere[:, None] * self.coef_sphere_true,
            base_point=self.intercept_sphere_true,
        )

        self.param_sphere_true = gs.vstack(
            [self.intercept_sphere_true, self.coef_sphere_true])
        self.param_sphere_guess = gs.vstack([
            self.y_sphere[0],
            self.sphere.to_tangent(gs.random.normal(size=self.shape_sphere),
                                   self.y_sphere[0]),
        ])

        # Set up for special euclidean
        self.se2 = SpecialEuclidean(n=2)
        self.metric_se2 = self.se2.left_canonical_metric
        self.metric_se2.default_point_type = "matrix"

        self.shape_se2 = (3, 3)
        X = gs.random.rand(self.n_samples)
        self.X_se2 = X - gs.mean(X)

        self.intercept_se2_true = self.se2.random_point()
        self.coef_se2_true = self.se2.to_tangent(
            5.0 * gs.random.rand(*self.shape_se2), self.intercept_se2_true)

        self.y_se2 = self.metric_se2.exp(
            self.X_se2[:, None, None] * self.coef_se2_true[None],
            self.intercept_se2_true,
        )

        self.param_se2_true = gs.vstack([
            gs.flatten(self.intercept_se2_true),
            gs.flatten(self.coef_se2_true),
        ])
        self.param_se2_guess = gs.vstack([
            gs.flatten(self.y_se2[0]),
            gs.flatten(
                self.se2.to_tangent(gs.random.normal(size=self.shape_se2),
                                    self.y_se2[0])),
        ])
Beispiel #4
0
 def inner_product_mat_at_identity_shape_test_data(self):
     group = SpecialEuclidean(n=3, point_type="vector")
     sym_mat_at_identity = gs.eye(group.dim)
     smoke_data = [
         dict(
             group=group,
             metric_mat_at_identity=sym_mat_at_identity,
             left_or_right="left",
         )
     ]
     return self.generate_tests(smoke_data)
Beispiel #5
0
 def test_fit_matrix_se(self):
     se_mat = SpecialEuclidean(n=3, default_point_type='matrix')
     X = se_mat.random_uniform(self.n_samples)
     estimator = ExponentialBarycenter(se_mat)
     estimator.fit(X)
     mean = estimator.estimate_
     tpca = TangentPCA(metric=se_mat, point_type='matrix')
     tangent_projected_data = tpca.fit_transform(X, base_point=mean)
     result = tpca.inverse_transform(tangent_projected_data)
     expected = X
     self.assertAllClose(result, expected)
 def inverse_shape_test_data(self):
     n_list = random.sample(range(2, 50), 10)
     n_samples = 10
     random_data = [
         dict(
             n=n,
             points=SpecialEuclidean(n).random_point(n_samples),
             expected=(n_samples, n + 1, n + 1),
         ) for n in n_list
     ]
     return self.generate_tests([], random_data)
Beispiel #7
0
    def test_value_and_grad_dist(self):
        space = SpecialEuclidean(3)
        metric = space.metric
        point = space.random_point()
        id = space.identity
        result_loss, result_grad = gs.autodiff.value_and_grad(
            lambda v: metric.squared_dist(v, id)
        )(point)

        expected_loss = metric.squared_dist(point, id)
        expected_grad = -2 * metric.log(id, point)

        self.assertAllClose(result_loss, expected_loss)
        self.assertAllClose(result_grad, expected_grad)
 def exp_after_log_right_with_angles_close_to_pi_test_data(self):
     smoke_data = []
     for metric in list(
             self.metrics.values()) + [SpecialEuclidean(3, "vector")]:
         for base_point in self.elements.values():
             for element_type in self.angles_close_to_pi:
                 point = self.elements[element_type]
                 smoke_data.append(
                     dict(
                         metric=metric,
                         point=point,
                         base_point=base_point,
                     ))
     return self.generate_tests(smoke_data)
 def test_exp_after_log(self, metric, point, base_point):
     """
     Test that the Riemannian right exponential and the
     Riemannian right logarithm are inverse.
     Expect their composition to give the identity function.
     """
     group = SpecialEuclidean(3, "vector")
     result = metric.exp(metric.log(point, base_point), base_point)
     expected = group.regularize(point)
     expected = gs.cast(expected, gs.float64)
     norm = gs.linalg.norm(expected)
     atol = ATOL
     if norm != 0:
         atol = ATOL * norm
     self.assertAllClose(result, expected, atol=atol)
    def test_exp_after_log_right_with_angles_close_to_pi(
            self, metric, point, base_point):
        group = SpecialEuclidean(3, "vector")
        result = metric.exp(metric.log(point, base_point), base_point)
        expected = group.regularize(point)

        inv_expected = gs.concatenate([-expected[:3], expected[3:6]])

        norm = gs.linalg.norm(expected)
        atol = ATOL
        if norm != 0:
            atol = ATOL * norm

        self.assertTrue(
            gs.allclose(result, expected, atol=atol)
            or gs.allclose(result, inv_expected, atol=atol))
    def setUp(self):
        self.n_samples = 10
        self.SO3_GROUP = SpecialOrthogonal(n=3, point_type='vector')
        self.SE3_GROUP = SpecialEuclidean(n=3, point_type='vector')
        self.S1 = Hypersphere(dim=1)
        self.S2 = Hypersphere(dim=2)
        self.H2 = Hyperbolic(dim=2)
        self.H2_half_plane = PoincareHalfSpace(dim=2)
        self.M32 = Matrices(m=3, n=2)
        self.S32 = PreShapeSpace(k_landmarks=3, m_ambient=2)
        self.KS = visualization.KendallSphere()
        self.M33 = Matrices(m=3, n=3)
        self.S33 = PreShapeSpace(k_landmarks=3, m_ambient=3)
        self.KD = visualization.KendallDisk()

        plt.figure()
    def test_log_after_exp(self, metric, tangent_vec, base_point):
        """
        Test that the Riemannian left exponential and the
        Riemannian left logarithm are inverse.
        Expect their composition to give the identity function.
        """
        group = SpecialEuclidean(3, "vector")
        result = metric.log(metric.exp(tangent_vec, base_point), base_point)

        expected = group.regularize_tangent_vec(
            tangent_vec=tangent_vec, base_point=base_point, metric=metric
        )

        norm = gs.linalg.norm(expected)
        atol = ATOL
        if norm != 0:
            atol = ATOL * norm
        self.assertAllClose(result, expected, atol=atol)
Beispiel #13
0
    def test_custom_gradient_in_action(self):
        space = SpecialEuclidean(n=2)
        const_metric = space.left_canonical_metric
        const_point_b = space.random_point()

        def func(x):
            return const_metric.squared_dist(x, const_point_b)

        arg_point_a = space.random_point()
        func_with_grad = gs.autodiff.value_and_grad(func)
        result_value, result_grad = func_with_grad(arg_point_a)
        expected_value = const_metric.squared_dist(arg_point_a, const_point_b)
        expected_grad = -2 * const_metric.log(const_point_b, arg_point_a)

        self.assertAllClose(result_value, expected_value)
        self.assertAllClose(result_grad, expected_grad)

        loss, grad = func_with_grad(const_point_b)
        self.assertAllClose(loss, 0.0)
        self.assertAllClose(grad, gs.zeros_like(grad))
Beispiel #14
0
    def test_orthonormal_basis_se3(self):
        group = SpecialEuclidean(3)
        lie_algebra = group.lie_algebra
        metric = InvariantMetric(group=group)
        basis = metric.normal_basis(lie_algebra.basis)
        for i, x in enumerate(basis):
            for y in basis[i:]:
                result = metric.inner_product_at_identity(x, y)
                expected = 0.0 if gs.any(x != y) else 1.0
                self.assertAllClose(result, expected)

        metric_mat = from_vector_to_diagonal_matrix(
            gs.cast(gs.arange(1, group.dim + 1), gs.float32)
        )
        metric = InvariantMetric(group=group, metric_mat_at_identity=metric_mat)
        basis = metric.normal_basis(lie_algebra.basis)
        for i, x in enumerate(basis):
            for y in basis[i:]:
                result = metric.inner_product_at_identity(x, y)
                expected = 0.0 if gs.any(x != y) else 1.0
                self.assertAllClose(result, expected)
Beispiel #15
0
    def setUp(self):
        warnings.simplefilter('ignore', category=ImportWarning)
        gs.random.seed(1234)

        group = SpecialEuclidean(n=2, point_type='vector')

        point_1 = gs.array([0.1, 0.2, 0.3])
        point_2 = gs.array([0.5, 5., 60.])

        translation_large = gs.array([0., 5., 6.])
        translation_small = gs.array([0., 0.6, 0.7])

        elements_all = {
            'translation_large': translation_large,
            'translation_small': translation_small,
            'point_1': point_1,
            'point_2': point_2
        }
        elements = elements_all
        if geomstats.tests.tf_backend():
            # Tf is extremely slow
            elements = {'point_1': point_1, 'point_2': point_2}

        elements_matrices_all = {
            key: group.matrix_from_vector(elements_all[key])
            for key in elements_all
        }
        elements_matrices = elements_matrices_all

        self.group = group
        self.elements_all = elements_all
        self.elements = elements
        self.elements_matrices_all = elements_matrices_all
        self.elements_matrices = elements_matrices

        self.n_samples = 4
    def test_log_after_exp_with_angles_close_to_pi(self, metric, tangent_vec,
                                                   base_point):
        """
        Test that the Riemannian left exponential and the
        Riemannian left logarithm are inverse.
        Expect their composition to give the identity function.
        """
        group = SpecialEuclidean(3, "vector")
        result = metric.log(metric.exp(tangent_vec, base_point), base_point)

        expected = group.regularize_tangent_vec(tangent_vec=tangent_vec,
                                                base_point=base_point,
                                                metric=metric)

        inv_expected = gs.concatenate([-expected[:3], expected[3:6]])

        norm = gs.linalg.norm(expected)
        atol = ATOL
        if norm != 0:
            atol = ATOL * norm

        self.assertTrue(
            gs.allclose(result, expected, atol=atol)
            or gs.allclose(result, inv_expected, atol=atol))
Beispiel #17
0
    def setUp(self):
        warnings.simplefilter("ignore", category=ImportWarning)
        gs.random.seed(1234)

        group = SpecialEuclidean(n=2, point_type="vector")

        point_1 = gs.array([0.1, 0.2, 0.3])
        point_2 = gs.array([0.5, 5.0, 60.0])

        translation_large = gs.array([0.0, 5.0, 6.0])
        translation_small = gs.array([0.0, 0.6, 0.7])

        elements_all = {
            "translation_large": translation_large,
            "translation_small": translation_small,
            "point_1": point_1,
            "point_2": point_2,
        }
        elements = elements_all
        if geomstats.tests.tf_backend():
            # Tf is extremely slow
            elements = {"point_1": point_1, "point_2": point_2}

        elements_matrices_all = {
            key: group.matrix_from_vector(elements_all[key])
            for key in elements_all
        }
        elements_matrices = elements_matrices_all

        self.group = group
        self.elements_all = elements_all
        self.elements = elements
        self.elements_matrices_all = elements_matrices_all
        self.elements_matrices = elements_matrices

        self.n_samples = 4
Beispiel #18
0
import logging

import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D  # NOQA

import geomstats.backend as gs
from geomstats.geometry.hyperboloid import Hyperboloid
from geomstats.geometry.hypersphere import Hypersphere
from geomstats.geometry.matrices import Matrices
from geomstats.geometry.poincare_half_space import PoincareHalfSpace
from geomstats.geometry.pre_shape import KendallShapeMetric, PreShapeSpace
from geomstats.geometry.special_euclidean import SpecialEuclidean
from geomstats.geometry.special_orthogonal import SpecialOrthogonal

SE3_GROUP = SpecialEuclidean(n=3, point_type="vector")
SE2_GROUP = SpecialEuclidean(n=2, point_type="matrix")
SE2_VECT = SpecialEuclidean(n=2, point_type="vector")
SO3_GROUP = SpecialOrthogonal(n=3, point_type="vector")
S1 = Hypersphere(dim=1)
S2 = Hypersphere(dim=2)
H2 = Hyperboloid(dim=2)
POINCARE_HALF_PLANE = PoincareHalfSpace(dim=2)
M32 = Matrices(m=3, n=2)
S32 = PreShapeSpace(k_landmarks=3, m_ambient=2)
METRIC_S32 = KendallShapeMetric(k_landmarks=3, m_ambient=2)
M33 = Matrices(m=3, n=3)
S33 = PreShapeSpace(k_landmarks=3, m_ambient=3)
METRIC_S33 = KendallShapeMetric(k_landmarks=3, m_ambient=3)

AX_SCALE = 1.2
Beispiel #19
0
"""Visualization for Geometric Statistics."""

import matplotlib
import matplotlib.pyplot as plt

import geomstats.backend as gs
from geomstats.geometry.hyperboloid import Hyperboloid
from geomstats.geometry.hypersphere import Hypersphere
from geomstats.geometry.poincare_half_space import PoincareHalfSpace
from geomstats.geometry.special_euclidean import SpecialEuclidean
from geomstats.geometry.special_orthogonal import SpecialOrthogonal
from mpl_toolkits.mplot3d import Axes3D  # NOQA

SE3_GROUP = SpecialEuclidean(n=3, point_type='vector')
SO3_GROUP = SpecialOrthogonal(n=3, point_type='vector')
S1 = Hypersphere(dim=1)
S2 = Hypersphere(dim=2)
H2 = Hyperboloid(dim=2)
POINCARE_HALF_PLANE = PoincareHalfSpace(dim=2)

AX_SCALE = 1.2

IMPLEMENTED = ['SO3_GROUP', 'SE3_GROUP', 'S1', 'S2',
               'H2_poincare_disk', 'H2_poincare_half_plane', 'H2_klein_disk',
               'poincare_polydisk']


def tutorial_matplotlib():
    fontsize = 12
    matplotlib.rc('font', size=fontsize)
    matplotlib.rc('text')
Beispiel #20
0
"""
Predict on SE3: losses.
"""

import geomstats.backend as gs
import geomstats.geometry.lie_group as lie_group
from geomstats.geometry.special_euclidean import SpecialEuclidean
from geomstats.geometry.special_orthogonal import SpecialOrthogonal


SE3 = SpecialEuclidean(n=3)
SO3 = SpecialOrthogonal(n=3)


def loss(y_pred, y_true,
         metric=SE3.left_canonical_metric,
         representation='vector'):
    """
    Loss function given by a riemannian metric on a Lie group,
    by default the left-invariant canonical metric.
    """
    if gs.ndim(y_pred) == 1:
        y_pred = gs.expand_dims(y_pred, axis=0)
    if gs.ndim(y_true) == 1:
        y_true = gs.expand_dims(y_true, axis=0)

    if representation == 'quaternion':
        y_pred_rot_vec = SO3.rotation_vector_from_quaternion(y_pred[:, :4])
        y_pred = gs.hstack([y_pred_rot_vec, y_pred[:, 4:]])
        y_true_rot_vec = SO3.rotation_vector_from_quaternion(y_true[:, :4])
        y_true = gs.hstack([y_true_rot_vec, y_true[:, 4:]])
Beispiel #21
0
    def setup_method(self):
        gs.random.seed(1234)
        self.n_samples = 20

        # Set up for euclidean
        self.dim_eucl = 3
        self.shape_eucl = (self.dim_eucl, )
        self.eucl = Euclidean(dim=self.dim_eucl)
        X = gs.random.rand(self.n_samples)
        self.X_eucl = X - gs.mean(X)
        self.intercept_eucl_true = self.eucl.random_point()
        self.coef_eucl_true = self.eucl.random_point()

        self.y_eucl = (self.intercept_eucl_true +
                       self.X_eucl[:, None] * self.coef_eucl_true)
        self.param_eucl_true = gs.vstack(
            [self.intercept_eucl_true, self.coef_eucl_true])
        self.param_eucl_guess = gs.vstack([
            self.y_eucl[0],
            self.y_eucl[0] + gs.random.normal(size=self.shape_eucl)
        ])

        # Set up for hypersphere
        self.dim_sphere = 4
        self.shape_sphere = (self.dim_sphere + 1, )
        self.sphere = Hypersphere(dim=self.dim_sphere)
        X = gs.random.rand(self.n_samples)
        self.X_sphere = X - gs.mean(X)
        self.intercept_sphere_true = self.sphere.random_point()
        self.coef_sphere_true = self.sphere.projection(
            gs.random.rand(self.dim_sphere + 1))

        self.y_sphere = self.sphere.metric.exp(
            self.X_sphere[:, None] * self.coef_sphere_true,
            base_point=self.intercept_sphere_true,
        )

        self.param_sphere_true = gs.vstack(
            [self.intercept_sphere_true, self.coef_sphere_true])
        self.param_sphere_guess = gs.vstack([
            self.y_sphere[0],
            self.sphere.to_tangent(gs.random.normal(size=self.shape_sphere),
                                   self.y_sphere[0]),
        ])

        # Set up for special euclidean
        self.se2 = SpecialEuclidean(n=2)
        self.metric_se2 = self.se2.left_canonical_metric
        self.metric_se2.default_point_type = "matrix"

        self.shape_se2 = (3, 3)
        X = gs.random.rand(self.n_samples)
        self.X_se2 = X - gs.mean(X)

        self.intercept_se2_true = self.se2.random_point()
        self.coef_se2_true = self.se2.to_tangent(
            5.0 * gs.random.rand(*self.shape_se2), self.intercept_se2_true)

        self.y_se2 = self.metric_se2.exp(
            self.X_se2[:, None, None] * self.coef_se2_true[None],
            self.intercept_se2_true,
        )

        self.param_se2_true = gs.vstack([
            gs.flatten(self.intercept_se2_true),
            gs.flatten(self.coef_se2_true),
        ])
        self.param_se2_guess = gs.vstack([
            gs.flatten(self.y_se2[0]),
            gs.flatten(
                self.se2.to_tangent(gs.random.normal(size=self.shape_se2),
                                    self.y_se2[0])),
        ])

        # Set up for discrete curves
        n_sampling_points = 8
        self.curves_2d = DiscreteCurves(R2)
        self.metric_curves_2d = self.curves_2d.srv_metric
        self.metric_curves_2d.default_point_type = "matrix"

        self.shape_curves_2d = (n_sampling_points, 2)
        X = gs.random.rand(self.n_samples)
        self.X_curves_2d = X - gs.mean(X)

        self.intercept_curves_2d_true = self.curves_2d.random_point(
            n_sampling_points=n_sampling_points)
        self.coef_curves_2d_true = self.curves_2d.to_tangent(
            5.0 * gs.random.rand(*self.shape_curves_2d),
            self.intercept_curves_2d_true)

        # Added because of GitHub issue #1575
        intercept_curves_2d_true_repeated = gs.tile(
            gs.expand_dims(self.intercept_curves_2d_true, axis=0),
            (self.n_samples, 1, 1),
        )
        self.y_curves_2d = self.metric_curves_2d.exp(
            self.X_curves_2d[:, None, None] * self.coef_curves_2d_true[None],
            intercept_curves_2d_true_repeated,
        )

        self.param_curves_2d_true = gs.vstack([
            gs.flatten(self.intercept_curves_2d_true),
            gs.flatten(self.coef_curves_2d_true),
        ])
        self.param_curves_2d_guess = gs.vstack([
            gs.flatten(self.y_curves_2d[0]),
            gs.flatten(
                self.curves_2d.to_tangent(
                    gs.random.normal(size=self.shape_curves_2d),
                    self.y_curves_2d[0])),
        ])
Beispiel #22
0
import logging

import matplotlib
import matplotlib.pyplot as plt

import geomstats.backend as gs
from geomstats.geometry.hyperboloid import Hyperboloid
from geomstats.geometry.hypersphere import Hypersphere
from geomstats.geometry.matrices import Matrices
from geomstats.geometry.poincare_half_space import PoincareHalfSpace
from geomstats.geometry.pre_shape import KendallShapeMetric, PreShapeSpace
from geomstats.geometry.special_euclidean import SpecialEuclidean
from geomstats.geometry.special_orthogonal import SpecialOrthogonal
from mpl_toolkits.mplot3d import Axes3D  # NOQA

SE3_GROUP = SpecialEuclidean(n=3, point_type='vector')
SE2_GROUP = SpecialEuclidean(n=2, point_type='matrix')
SE2_VECT = SpecialEuclidean(n=2, point_type='vector')
SO3_GROUP = SpecialOrthogonal(n=3, point_type='vector')
S1 = Hypersphere(dim=1)
S2 = Hypersphere(dim=2)
H2 = Hyperboloid(dim=2)
POINCARE_HALF_PLANE = PoincareHalfSpace(dim=2)
M32 = Matrices(m=3, n=2)
S32 = PreShapeSpace(k_landmarks=3, m_ambient=2)
METRIC_S32 = KendallShapeMetric(k_landmarks=3, m_ambient=2)
M33 = Matrices(m=3, n=3)
S33 = PreShapeSpace(k_landmarks=3, m_ambient=3)
METRIC_S33 = KendallShapeMetric(k_landmarks=3, m_ambient=3)

AX_SCALE = 1.2
translation_small = gs.array([0.0, 0.6, 0.7])

elements_all = {
    "translation_large": translation_large,
    "translation_small": translation_small,
    "point_1": point_1,
    "point_2": point_2,
}
elements = elements_all
if tf_backend():
    # Tf is extremely slow
    elements = {"point_1": point_1, "point_2": point_2}

elements_matrices_all = {
    key:
    SpecialEuclidean(2,
                     point_type="vector").matrix_from_vector(elements_all[key])
    for key in elements_all
}
elements_matrices = elements_matrices_all


class SpecialEuclideanTestData(_LieGroupTestData):
    n_list = random.sample(range(2, 4), 2)
    space_args_list = [(n, ) for n in n_list] + [(2, "vector"), (3, "vector")]
    shape_list = [(n + 1, n + 1) for n in n_list] + [(3, )] + [(6, )]
    n_tangent_vecs_list = [2, 3] * 2
    n_points_list = [2, 3] * 2
    n_vecs_list = [2, 3] * 2

    def belongs_test_data(self):
        smoke_data = [
Beispiel #24
0
"""Predict on SE3: losses."""

import logging

import geomstats.backend as gs
import geomstats.geometry.lie_group as lie_group
from geomstats.geometry.special_euclidean import SpecialEuclidean
from geomstats.geometry.special_orthogonal import SpecialOrthogonal

SE3 = SpecialEuclidean(n=3, point_type="vector")
SO3 = SpecialOrthogonal(n=3, point_type="vector")


def loss(y_pred,
         y_true,
         metric=SE3.left_canonical_metric,
         representation="vector"):
    """Loss function given by a Riemannian metric on a Lie group.

    Parameters
    ----------
    y_pred : array-like
        Prediction on SE(3).
    y_true : array-like
        Ground-truth on SE(3).
    metric : RiemannianMetric
        Metric used to compute the loss and gradient.
    representation : str, {'vector', 'matrix'}
        Representation chosen for points in SE(3).

    Returns
    def setUp(self):
        logger = logging.getLogger()
        logger.disabled = True
        warnings.simplefilter('ignore', category=ImportWarning)

        gs.random.seed(1234)

        n = 3
        group = SpecialEuclidean(n=n, point_type='vector')
        matrix_so3 = SpecialOrthogonal(n=n)
        vector_so3 = SpecialOrthogonal(n=n, point_type='vector')

        # Diagonal left and right invariant metrics
        diag_mat_at_identity = gs.eye(group.dim)

        left_diag_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=None,
            left_or_right='left')
        right_diag_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=diag_mat_at_identity,
            left_or_right='right')

        # General left and right invariant metrics
        # FIXME (nina): This is valid only for bi-invariant metrics
        sym_mat_at_identity = gs.eye(group.dim)

        left_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=sym_mat_at_identity,
            left_or_right='left')

        right_metric = InvariantMetric(
            group=group,
            inner_product_mat_at_identity=sym_mat_at_identity,
            left_or_right='right')

        matrix_left_metric = InvariantMetric(group=matrix_so3)

        matrix_right_metric = InvariantMetric(
            group=matrix_so3,
            left_or_right='right')

        # General case for the point
        point_1 = gs.array([[-0.2, 0.9, 0.5, 5., 5., 5.]])
        point_2 = gs.array([[0., 2., -0.1, 30., 400., 2.]])
        point_1_matrix = vector_so3.matrix_from_rotation_vector(
            point_1[:, :3])
        point_2_matrix = vector_so3.matrix_from_rotation_vector(
            point_2[:, :3])
        # Edge case for the point, angle < epsilon,
        point_small = gs.array([[-1e-7, 0., -7 * 1e-8, 6., 5., 9.]])

        self.group = group
        self.matrix_so3 = matrix_so3

        self.left_diag_metric = left_diag_metric
        self.right_diag_metric = right_diag_metric
        self.left_metric = left_metric
        self.right_metric = right_metric
        self.matrix_left_metric = matrix_left_metric
        self.matrix_right_metric = matrix_right_metric
        self.point_1 = point_1
        self.point_2 = point_2
        self.point_1_matrix = point_1_matrix
        self.point_2_matrix = point_2_matrix
        self.point_small = point_small
class SpecialEuclideanMatrixCanonicalLeftMetricTestData(
        _InvariantMetricTestData):
    n_list = random.sample(range(2, 5), 2)
    metric_args_list = [(SpecialEuclidean(n), ) for n in n_list]
    shape_list = [(n + 1, n + 1) for n in n_list]
    space_list = [SpecialEuclidean(n) for n in n_list]
    n_points_list = [2, 3]
    n_tangent_vecs_list = [2, 3]
    n_points_a_list = [2, 3]
    n_points_b_list = [1]
    alpha_list = [1] * 2
    n_rungs_list = [1] * 2
    scheme_list = ["pole"] * 2

    def left_metric_wrong_group_test_data(self):
        smoke_data = [
            dict(group=SpecialEuclidean(2), expected=does_not_raise()),
            dict(group=SpecialEuclidean(3), expected=does_not_raise()),
            dict(
                group=SpecialEuclidean(2, point_type="vector"),
                expected=pytest.raises(ValueError),
            ),
            dict(group=SpecialOrthogonal(3),
                 expected=pytest.raises(ValueError)),
        ]
        return self.generate_tests(smoke_data)

    def exp_shape_test_data(self):
        return self._exp_shape_test_data(self.metric_args_list,
                                         self.space_list, self.shape_list)

    def log_shape_test_data(self):
        return self._log_shape_test_data(
            self.metric_args_list,
            self.space_list,
        )

    def squared_dist_is_symmetric_test_data(self):
        return self._squared_dist_is_symmetric_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
            atol=gs.atol * 1000,
        )

    def exp_belongs_test_data(self):
        return self._exp_belongs_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            belongs_atol=gs.atol * 1000,
        )

    def log_is_tangent_test_data(self):
        return self._log_is_tangent_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            is_tangent_atol=gs.atol * 1000,
        )

    def geodesic_ivp_belongs_test_data(self):
        return self._geodesic_ivp_belongs_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_points_list,
            belongs_atol=gs.atol * 100,
        )

    def geodesic_bvp_belongs_test_data(self):
        return self._geodesic_bvp_belongs_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            belongs_atol=gs.atol * 100,
        )

    def exp_after_log_test_data(self):
        return self._exp_after_log_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            rtol=gs.rtol * 100,
            atol=gs.atol * 100,
        )

    def log_after_exp_test_data(self):
        return self._log_after_exp_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            amplitude=10,
            rtol=gs.rtol * 100,
            atol=gs.atol * 100,
        )

    def exp_ladder_parallel_transport_test_data(self):
        return self._exp_ladder_parallel_transport_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            self.n_rungs_list,
            self.alpha_list,
            self.scheme_list,
        )

    def exp_geodesic_ivp_test_data(self):
        return self._exp_geodesic_ivp_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            self.n_points_list,
            rtol=gs.rtol * 100,
            atol=gs.atol * 100,
        )

    def parallel_transport_ivp_is_isometry_test_data(self):
        return self._parallel_transport_ivp_is_isometry_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_points_list,
            is_tangent_atol=gs.atol * 1000,
            atol=gs.atol * 1000,
        )

    def parallel_transport_bvp_is_isometry_test_data(self):
        return self._parallel_transport_bvp_is_isometry_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_points_list,
            is_tangent_atol=gs.atol * 1000,
            atol=gs.atol * 1000,
        )

    def dist_is_symmetric_test_data(self):
        return self._dist_is_symmetric_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def dist_is_positive_test_data(self):
        return self._dist_is_positive_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def squared_dist_is_positive_test_data(self):
        return self._squared_dist_is_positive_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def dist_is_norm_of_log_test_data(self):
        return self._dist_is_norm_of_log_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def dist_point_to_itself_is_zero_test_data(self):
        return self._dist_point_to_itself_is_zero_test_data(
            self.metric_args_list, self.space_list, self.n_points_list)

    def inner_product_is_symmetric_test_data(self):
        return self._inner_product_is_symmetric_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
        )

    def triangle_inequality_of_dist_test_data(self):
        return self._triangle_inequality_of_dist_test_data(
            self.metric_args_list, self.space_list, self.n_points_list)

    def exp_at_identity_of_lie_algebra_belongs_test_data(self):
        return self._exp_at_identity_of_lie_algebra_belongs_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_tangent_vecs_list,
            belongs_atol=gs.atol * 1000,
        )

    def log_at_identity_belongs_to_lie_algebra_test_data(self):
        return self._log_at_identity_belongs_to_lie_algebra_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            belongs_atol=gs.atol * 1000,
        )

    def exp_after_log_at_identity_test_data(self):
        return self._exp_after_log_at_identity_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            atol=gs.atol * 100,
        )

    def log_after_exp_at_identity_test_data(self):
        return self._log_after_exp_at_identity_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            amplitude=10.0,
            atol=gs.atol * 100,
        )
 def test_plot_points_se2():
     points = SpecialEuclidean(n=2, point_type='vector').random_point(4)
     visu = visualization.SpecialEuclidean2(points, point_type='vector')
     ax = visu.set_ax()
     visu.draw(ax)
class SpecialEuclideanMatrixCanonicalRightMetricTestData(
        _InvariantMetricTestData):
    n_list = [2]
    metric_args_list = [(SpecialEuclidean(n), gs.eye(SpecialEuclidean(n).dim),
                         "right") for n in n_list]
    shape_list = [(n + 1, n + 1) for n in n_list]
    space_list = [SpecialEuclidean(n) for n in n_list]
    n_points_list = random.sample(range(1, 3), 1)
    n_tangent_vecs_list = random.sample(range(1, 3), 1)
    n_points_a_list = random.sample(range(1, 3), 1)
    n_points_b_list = [1]
    alpha_list = [1] * 1
    n_rungs_list = [1] * 1
    scheme_list = ["pole"] * 1

    def exp_shape_test_data(self):
        return self._exp_shape_test_data(self.metric_args_list,
                                         self.space_list, self.shape_list)

    def log_shape_test_data(self):
        return self._log_shape_test_data(
            self.metric_args_list,
            self.space_list,
        )

    def squared_dist_is_symmetric_test_data(self):
        return self._squared_dist_is_symmetric_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
            atol=gs.atol * 1000,
        )

    def exp_belongs_test_data(self):
        return self._exp_belongs_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            belongs_atol=1e-3,
        )

    def log_is_tangent_test_data(self):
        return self._log_is_tangent_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            is_tangent_atol=gs.atol * 1000,
        )

    def geodesic_ivp_belongs_test_data(self):
        return self._geodesic_ivp_belongs_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_points_list,
            belongs_atol=1e-3,
        )

    def geodesic_bvp_belongs_test_data(self):
        return self._geodesic_bvp_belongs_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            belongs_atol=1e-3,
        )

    def exp_after_log_test_data(self):
        return self._exp_after_log_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            rtol=gs.rtol * 100000,
            atol=gs.atol * 100000,
        )

    def log_after_exp_test_data(self):
        return self._log_after_exp_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            amplitude=100.0,
            rtol=gs.rtol * 10000,
            atol=gs.atol * 100000,
        )

    def exp_ladder_parallel_transport_test_data(self):
        return self._exp_ladder_parallel_transport_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            self.n_rungs_list,
            self.alpha_list,
            self.scheme_list,
        )

    def exp_geodesic_ivp_test_data(self):
        return self._exp_geodesic_ivp_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            self.n_points_list,
            rtol=gs.rtol * 100,
            atol=gs.atol * 100,
        )

    def parallel_transport_ivp_is_isometry_test_data(self):
        return self._parallel_transport_ivp_is_isometry_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_points_list,
            is_tangent_atol=gs.atol * 1000,
            atol=gs.atol * 1000,
        )

    def parallel_transport_bvp_is_isometry_test_data(self):
        return self._parallel_transport_bvp_is_isometry_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_points_list,
            is_tangent_atol=gs.atol * 1000,
            atol=gs.atol * 1000,
        )

    def dist_is_symmetric_test_data(self):
        return self._dist_is_symmetric_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def dist_is_positive_test_data(self):
        return self._dist_is_positive_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def squared_dist_is_positive_test_data(self):
        return self._squared_dist_is_positive_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def dist_is_norm_of_log_test_data(self):
        return self._dist_is_norm_of_log_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_a_list,
            self.n_points_b_list,
        )

    def dist_point_to_itself_is_zero_test_data(self):
        return self._dist_point_to_itself_is_zero_test_data(
            self.metric_args_list, self.space_list, self.n_points_list)

    def inner_product_is_symmetric_test_data(self):
        return self._inner_product_is_symmetric_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
        )

    def triangle_inequality_of_dist_test_data(self):
        return self._triangle_inequality_of_dist_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            atol=gs.atol * 1000,
        )

    def exp_at_identity_of_lie_algebra_belongs_test_data(self):
        return self._exp_at_identity_of_lie_algebra_belongs_test_data(
            self.metric_args_list, self.space_list, self.n_tangent_vecs_list)

    def log_at_identity_belongs_to_lie_algebra_test_data(self):
        return self._log_at_identity_belongs_to_lie_algebra_test_data(
            self.metric_args_list, self.space_list, self.n_points_list)

    def exp_after_log_at_identity_test_data(self):
        return self._exp_after_log_at_identity_test_data(
            self.metric_args_list,
            self.space_list,
            self.n_points_list,
            atol=1e-3,
        )

    def log_after_exp_at_identity_test_data(self):
        return self._log_after_exp_at_identity_test_data(
            self.metric_args_list,
            self.space_list,
            self.shape_list,
            self.n_tangent_vecs_list,
            amplitude=100.0,
            atol=1e-1,
        )

    def right_exp_coincides_test_data(self):
        smoke_data = [
            dict(
                n=2,
                initial_vec=gs.array([gs.pi / 2, 1.0, 1.0]),
            )
        ]
        return self.generate_tests(smoke_data)
Beispiel #29
0
"""Visualization for Geometric Statistics."""

import matplotlib.pyplot as plt

import geomstats.backend as gs
from geomstats.geometry.hyperbolic import Hyperbolic
from geomstats.geometry.hypersphere import Hypersphere
from geomstats.geometry.special_euclidean import SpecialEuclidean
from geomstats.geometry.special_orthogonal import SpecialOrthogonal
from mpl_toolkits.mplot3d import Axes3D  # NOQA

SE3_GROUP = SpecialEuclidean(n=3)
SO3_GROUP = SpecialOrthogonal(n=3)
S1 = Hypersphere(dimension=1)
S2 = Hypersphere(dimension=2)
H2 = Hyperbolic(dimension=2)

AX_SCALE = 1.2

IMPLEMENTED = [
    'SO3_GROUP', 'SE3_GROUP', 'S1', 'S2', 'H2_poincare_disk',
    'H2_poincare_half_plane', 'H2_klein_disk', 'poincare_polydisk'
]


class Arrow3D():
    """An arrow in 3d, i.e. a point and a vector."""
    def __init__(self, point, vector):
        self.point = point
        self.vector = vector
class SpecialEuclidean3VectorsTestData(TestData):
    group = SpecialEuclidean(n=3, point_type="vector")
    angle_0 = gs.zeros(6)
    angle_close_0 = 1e-10 * gs.array([1.0, -1.0, 1.0, 0.0, 0.0, 0.0
                                      ]) + gs.array(
                                          [0.0, 0.0, 0.0, 1.0, 5.0, 2])
    angle_close_pi_low = (gs.pi - 1e-9) / gs.sqrt(2.0) * gs.array(
        [0.0, 1.0, -1.0, 0.0, 0.0, 0.0]) + gs.array(
            [0.0, 0.0, 0.0, -100.0, 0.0, 2.0])
    angle_pi = gs.pi / gs.sqrt(3.0) * gs.array(
        [1.0, 1.0, -1.0, 0.0, 0.0, 0.0]) + gs.array(
            [0.0, 0.0, 0.0, -10.2, 0.0, 2.6])
    angle_close_pi_high = (gs.pi + 1e-9) / gs.sqrt(3.0) * gs.array(
        [-1.0, 1.0, -1.0, 0.0, 0.0, 0.0]) + gs.array(
            [0.0, 0.0, 0.0, -100.0, 0.0, 2.0])
    angle_in_pi_2pi = (gs.pi + 0.3) / gs.sqrt(5.0) * gs.array(
        [-2.0, 1.0, 0.0, 0.0, 0.0, 0.0]) + gs.array(
            [0.0, 0.0, 0.0, -100.0, 0.0, 2.0])
    angle_close_2pi_low = (2 * gs.pi - 1e-9) / gs.sqrt(6.0) * gs.array(
        [2.0, 1.0, -1.0, 0.0, 0.0, 0.0]) + gs.array(
            [0.0, 0.0, 0.0, 8.0, 555.0, -2.0])
    angle_2pi = 2.0 * gs.pi / gs.sqrt(3.0) * gs.array(
        [1.0, 1.0, -1.0, 0.0, 0.0, 0.0]) + gs.array(
            [0.0, 0.0, 0.0, 1.0, 8.0, -10.0])
    angle_close_2pi_high = (2.0 * gs.pi + 1e-9) / gs.sqrt(2.0) * gs.array(
        [1.0, 0.0, -1.0, 0.0, 0.0, 0.0]) + gs.array(
            [0.0, 0.0, 0.0, 1.0, 8.0, -10.0])

    point_1 = gs.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
    point_2 = gs.array([0.5, 0.0, -0.3, 0.4, 5.0, 60.0])

    translation_large = gs.array([0.0, 0.0, 0.0, 0.4, 0.5, 0.6])
    translation_small = gs.array([0.0, 0.0, 0.0, 0.5, 0.6, 0.7])
    rot_with_parallel_trans = gs.array([gs.pi / 3.0, 0.0, 0.0, 1.0, 0.0, 0.0])

    elements_all = {
        "angle_0": angle_0,
        "angle_close_0": angle_close_0,
        "angle_close_pi_low": angle_close_pi_low,
        "angle_pi": angle_pi,
        "angle_close_pi_high": angle_close_pi_high,
        "angle_in_pi_2pi": angle_in_pi_2pi,
        "angle_close_2pi_low": angle_close_2pi_low,
        "angle_2pi": angle_2pi,
        "angle_close_2pi_high": angle_close_2pi_high,
        "translation_large": translation_large,
        "translation_small": translation_small,
        "point_1": point_1,
        "point_2": point_2,
        "rot_with_parallel_trans": rot_with_parallel_trans,
    }
    elements = elements_all
    if geomstats.tests.tf_backend():
        # Tf is extremely slow
        elements = {
            "point_1": point_1,
            "point_2": point_2,
            "angle_close_pi_low": angle_close_pi_low,
        }

    # Metrics - only diagonals
    diag_mat_at_identity = gs.eye(6) * gs.array([2.0, 2.0, 2.0, 3.0, 3.0, 3.0])

    left_diag_metric = InvariantMetric(
        group=group,
        metric_mat_at_identity=diag_mat_at_identity,
        left_or_right="left",
    )
    right_diag_metric = InvariantMetric(
        group=group,
        metric_mat_at_identity=diag_mat_at_identity,
        left_or_right="right",
    )

    metrics_all = {
        "left_canonical": group.left_canonical_metric,
        "right_canonical": group.right_canonical_metric,
        "left_diag": left_diag_metric,
        "right_diag": right_diag_metric,
    }
    # FIXME:
    # 'left': left_metric,
    # 'right': right_metric}
    metrics = metrics_all
    if geomstats.tests.tf_backend():
        metrics = {"left_diag": left_diag_metric}

    angles_close_to_pi_all = [
        "angle_close_pi_low",
        "angle_pi",
        "angle_close_pi_high",
    ]
    angles_close_to_pi = angles_close_to_pi_all
    if geomstats.tests.tf_backend():
        angles_close_to_pi = ["angle_close_pi_low"]

    def exp_after_log_right_with_angles_close_to_pi_test_data(self):
        smoke_data = []
        for metric in list(
                self.metrics.values()) + [SpecialEuclidean(3, "vector")]:
            for base_point in self.elements.values():
                for element_type in self.angles_close_to_pi:
                    point = self.elements[element_type]
                    smoke_data.append(
                        dict(
                            metric=metric,
                            point=point,
                            base_point=base_point,
                        ))
        return self.generate_tests(smoke_data)

    def exp_after_log_test_data(self):
        smoke_data = []
        for metric in list(
                self.metrics.values()) + [SpecialEuclidean(3, "vector")]:
            for base_point in self.elements.values():
                for element_type in self.elements:
                    if element_type in self.angles_close_to_pi:
                        continue
                    point = self.elements[element_type]
                    smoke_data.append(
                        dict(
                            metric=metric,
                            point=point,
                            base_point=base_point,
                        ))
        return self.generate_tests(smoke_data)

    def log_after_exp_with_angles_close_to_pi_test_data(self):
        smoke_data = []
        for metric in self.metrics_all.values():
            for base_point in self.elements.values():
                for element_type in self.angles_close_to_pi:
                    tangent_vec = self.elements_all[element_type]
                    smoke_data.append(
                        dict(
                            metric=metric,
                            tangent_vec=tangent_vec,
                            base_point=base_point,
                        ))
        return self.generate_tests(smoke_data)

    def log_after_exp_test_data(self):
        smoke_data = []
        for metric in [
                self.metrics_all["left_canonical"],
                self.metrics_all["left_diag"],
        ]:
            for base_point in self.elements.values():
                for element_type in self.elements:
                    if element_type in self.angles_close_to_pi:
                        continue
                    tangent_vec = self.elements[element_type]
                    smoke_data.append(
                        dict(
                            metric=metric,
                            tangent_vec=tangent_vec,
                            base_point=base_point,
                        ))
        return self.generate_tests(smoke_data)

    def exp_test_data(self):
        rot_vec_base_point = gs.array([0.0, 0.0, 0.0])
        translation_base_point = gs.array([4.0, -1.0, 10000.0])
        transfo_base_point = gs.concatenate(
            [rot_vec_base_point, translation_base_point], axis=0)

        # Tangent vector is a translation (no infinitesimal rotational part)
        # Expect the sum of the translation
        # with the translation of the reference point
        rot_vec = gs.array([0.0, 0.0, 0.0])
        translation = gs.array([1.0, 0.0, -3.0])
        tangent_vec = gs.concatenate([rot_vec, translation], axis=0)
        expected = gs.concatenate(
            [gs.array([0.0, 0.0, 0.0]),
             gs.array([5.0, -1.0, 9997.0])], axis=0)
        smoke_data = [
            dict(
                metric=self.metrics_all["left_canonical"],
                tangent_vec=tangent_vec,
                base_point=transfo_base_point,
                expected=expected,
            ),
            dict(
                metric=self.group,
                tangent_vec=self.elements_all["translation_small"],
                base_point=self.elements_all["translation_large"],
                expected=self.elements_all["translation_large"] +
                self.elements_all["translation_small"],
            ),
        ]
        return self.generate_tests(smoke_data)

    def log_test_data(self):
        rot_vec_base_point = gs.array([0.0, 0.0, 0.0])
        translation_base_point = gs.array([4.0, 0.0, 0.0])
        transfo_base_point = gs.concatenate(
            [rot_vec_base_point, translation_base_point], axis=0)

        # Point is a translation (no rotational part)
        # Expect the difference of the translation
        # by the translation of the reference point
        rot_vec = gs.array([0.0, 0.0, 0.0])
        translation = gs.array([-1.0, -1.0, -1.2])
        point = gs.concatenate([rot_vec, translation], axis=0)

        expected = gs.concatenate(
            [gs.array([0.0, 0.0, 0.0]),
             gs.array([-5.0, -1.0, -1.2])], axis=0)
        smoke_data = [
            dict(
                metric=self.metrics_all["left_canonical"],
                point=point,
                base_point=transfo_base_point,
                expected=expected,
            ),
            dict(
                metric=self.group,
                point=self.elements_all["translation_large"],
                base_point=self.elements_all["translation_small"],
                expected=self.elements_all["translation_large"] -
                self.elements_all["translation_small"],
            ),
        ]
        return self.generate_tests(smoke_data)

    def regularize_extreme_cases_test_data(self):
        smoke_data = []
        for angle_type in ["angle_close_0", "angle_close_pi_low", "angle_0"]:
            point = self.elements_all[angle_type]
            smoke_data += [dict(point=point, expected=point)]

        if not geomstats.tests.tf_backend():
            angle_type = "angle_pi"
            point = self.elements_all[angle_type]
            smoke_data += [dict(point=point, expected=point)]

            angle_type = "angle_close_pi_high"
            point = self.elements_all[angle_type]

            norm = gs.linalg.norm(point[:3])
            expected_rot = gs.concatenate(
                [point[:3] / norm * (norm - 2 * gs.pi),
                 gs.zeros(3)], axis=0)
            expected_trans = gs.concatenate([gs.zeros(3), point[3:6]], axis=0)
            expected = expected_rot + expected_trans
            smoke_data += [dict(point=point, expected=expected)]

            in_pi_2pi = ["angle_in_pi_2pi", "angle_close_2pi_low"]

            for angle_type in in_pi_2pi:
                point = self.elements_all[angle_type]
                angle = gs.linalg.norm(point[:3])
                new_angle = gs.pi - (angle - gs.pi)

                expected_rot = gs.concatenate(
                    [-new_angle * (point[:3] / angle),
                     gs.zeros(3)], axis=0)
                expected_trans = gs.concatenate([gs.zeros(3), point[3:6]],
                                                axis=0)
                expected = expected_rot + expected_trans
                smoke_data += [dict(point=point, expected=expected)]

            angle_type = "angle_2pi"
            point = self.elements_all[angle_type]

            expected = gs.concatenate([gs.zeros(3), point[3:6]], axis=0)
            smoke_data += [dict(point=point, expected=expected)]

            angle_type = "angle_close_2pi_high"
            point = self.elements_all[angle_type]
            angle = gs.linalg.norm(point[:3])
            new_angle = angle - 2 * gs.pi

            expected_rot = gs.concatenate(
                [new_angle * point[:3] / angle,
                 gs.zeros(3)], axis=0)
            expected_trans = gs.concatenate([gs.zeros(3), point[3:6]], axis=0)
            expected = expected_rot + expected_trans
            smoke_data += [dict(point=point, expected=expected)]
        return self.generate_tests(smoke_data)