Exemplo n.º 1
0
    def test_estimate_shape_default_gradient_descent_sphere(self):
        dim = 5
        point_a = gs.array([1., 0., 0., 0., 0.])
        point_b = gs.array([0., 1., 0., 0., 0.])
        points = gs.array([point_a, point_b])

        mean = FrechetMean(metric=self.sphere.metric, method='default')
        mean.fit(points)
        result = mean.estimate_

        self.assertAllClose(gs.shape(result), (dim, ))
Exemplo n.º 2
0
    def test_estimate_hyperbolic(self):
        point = gs.array([2., 1., 1., 1.])
        points = gs.array([point, point])

        mean = FrechetMean(metric=self.hyperbolic.metric)
        mean.fit(X=points)
        expected = point

        result = mean.estimate_

        self.assertAllClose(result, expected)
Exemplo n.º 3
0
    def test_mean_euclidean_shape(self):
        dim = 2
        point = gs.array([1., 4.])

        mean = FrechetMean(metric=self.euclidean.metric)
        points = [point, point, point]
        mean.fit(points)

        result = mean.estimate_

        self.assertAllClose(gs.shape(result), (dim, ))
Exemplo n.º 4
0
    def test_estimate_and_belongs_adaptive_gradient_descent_sphere(self):
        point_a = gs.array([1., 0., 0., 0., 0.])
        point_b = gs.array([0., 1., 0., 0., 0.])
        points = gs.array([point_a, point_b])

        mean = FrechetMean(metric=self.sphere.metric, method='adaptive')
        mean.fit(points)

        result = self.sphere.belongs(mean.estimate_)
        expected = True
        self.assertAllClose(result, expected)
Exemplo n.º 5
0
    def test_estimate_adaptive_gradient_descent_sphere(self):
        point = gs.array([0., 0., 0., 0., 1.])
        points = gs.array([point, point])

        mean = FrechetMean(metric=self.sphere.metric, method='adaptive')
        mean.fit(X=points)

        result = mean.estimate_
        expected = point

        self.assertAllClose(expected, result)
 def test_weighted_frechet_mean(self):
     """Test for weighted mean."""
     data = gs.array([[0.1, 0.2], [0.25, 0.35]])
     weights = gs.array([3.0, 1.0])
     mean_o = FrechetMean(metric=self.metric, point_type="vector", lr=1.0)
     mean_o.fit(data, weights=weights)
     result = mean_o.estimate_
     expected = self.metric.exp(
         weights[1] / gs.sum(weights) * self.metric.log(data[1], data[0]),
         data[0])
     self.assertAllClose(result, expected)
Exemplo n.º 7
0
    def test_estimate_default_gradient_descent_so3(self):
        points = self.so3.random_uniform(2)

        mean_vec = FrechetMean(metric=self.so3.bi_invariant_metric,
                               method='default')
        mean_vec.fit(points)

        logs = self.so3.bi_invariant_metric.log(points, mean_vec.estimate_)
        result = gs.sum(logs, axis=0)
        expected = gs.zeros_like(points[0])
        self.assertAllClose(result, expected)
Exemplo n.º 8
0
    def test_estimate_and_belongs_adaptive_gradient_descent_so_matrix(self):
        point = self.so_matrix.random_uniform(10)

        mean = FrechetMean(metric=self.so_matrix.bi_invariant_metric,
                           method='adaptive',
                           verbose=True,
                           lr=.5)
        mean.fit(point)

        result = self.so_matrix.belongs(mean.estimate_)
        self.assertTrue(result)
Exemplo n.º 9
0
 def test_stiefel_n_samples(self):
     space = Stiefel(3, 2)
     metric = space.metric
     point = space.random_point(2)
     mean = FrechetMean(metric,
                        method="default",
                        init_step_size=0.5,
                        verbose=True)
     mean.fit(point)
     result = space.belongs(mean.estimate_)
     self.assertTrue(result)
Exemplo n.º 10
0
 def test_coincides_with_frechet_so(self):
     gs.random.seed(0)
     point = self.so.random_uniform(self.n_samples)
     estimator = ExponentialBarycenter(self.so, max_iter=40, epsilon=1e-10)
     estimator.fit(point)
     result = estimator.estimate_
     frechet_estimator = FrechetMean(self.so.bi_invariant_metric,
                                     max_iter=40,
                                     epsilon=1e-10)
     frechet_estimator.fit(point)
     expected = frechet_estimator.estimate_
     self.assertAllClose(result, expected)
Exemplo n.º 11
0
    def test_mean_matrices_shape(self):
        m, n = (2, 2)
        point = gs.array([[1., 4.], [2., 3.]])

        metric = MatricesMetric(m, n)
        mean = FrechetMean(metric=metric, point_type='matrix')
        points = [point, point, point]
        mean.fit(points)

        result = mean.estimate_

        self.assertAllClose(gs.shape(result), (m, n))
Exemplo n.º 12
0
    def test_mean_minkowski(self):
        point = gs.array([2.0, -math.sqrt(3)])
        points = [point, point, point]

        mean = FrechetMean(metric=self.minkowski.metric)
        mean.fit(points)
        result = mean.estimate_

        expected = point

        self.assertAllClose(result, expected)

        points = gs.array([[1.0, 0.0], [2.0, math.sqrt(3)],
                           [3.0, math.sqrt(8)], [4.0, math.sqrt(24)]])
        weights = gs.array([1.0, 2.0, 1.0, 2.0])

        mean = FrechetMean(metric=self.minkowski.metric)
        mean.fit(points, weights=weights)
        result = self.minkowski.belongs(mean.estimate_)

        self.assertTrue(result)
Exemplo n.º 13
0
    def test_fit(self):
        X = self.data
        clustering = OnlineKMeans(
            metric=self.metric, n_clusters=1, n_repetitions=10)
        clustering.fit(X)

        center = clustering.cluster_centers_
        mean = FrechetMean(metric=self.metric, lr=1.)
        mean.fit(X)

        result = self.metric.dist(center, mean.estimate_)
        expected = 0.
        self.assertAllClose(expected, result, atol=TOLERANCE)
Exemplo n.º 14
0
    def __init__(self,
                 manifold,
                 metric,
                 bandwidth,
                 tol=1e-2,
                 **FrechetMean_kwargs):

        self.manifold = manifold
        self.metric = metric
        self.bandwidth = bandwidth
        self.tol = tol
        self.mean = FrechetMean(self.metric, **FrechetMean_kwargs)
        self.centers = None
Exemplo n.º 15
0
    def test_estimate_and_belongs_hyperbolic(self):
        point_a = self.hyperbolic.random_point()
        point_b = self.hyperbolic.random_point()
        point_c = self.hyperbolic.random_point()
        points = gs.stack([point_a, point_b, point_c], axis=0)

        mean = FrechetMean(metric=self.hyperbolic.metric)
        mean.fit(X=points)

        result = self.hyperbolic.belongs(mean.estimate_)
        expected = True

        self.assertAllClose(result, expected)
Exemplo n.º 16
0
    def test_estimate_and_belongs_sphere(self):
        point_a = gs.array([1., 0., 0., 0., 0.])
        point_b = gs.array([0., 1., 0., 0., 0.])
        points = gs.zeros((2, point_a.shape[0]))
        points[0, :] = point_a
        points[1, :] = point_b

        mean = FrechetMean(metric=self.sphere.metric)
        mean.fit(points)

        result = self.sphere.belongs(mean.estimate_)
        expected = gs.array([[True]])
        self.assertAllClose(result, expected)
Exemplo n.º 17
0
    def test_estimate_sphere(self):
        point = gs.array([0., 0., 0., 0., 1.])
        points = gs.zeros((2, point.shape[0]))
        points[0, :] = point
        points[1, :] = point

        mean = FrechetMean(metric=self.sphere.metric)
        mean.fit(X=points)

        result = mean.estimate_
        expected = helper.to_vector(point)

        self.assertAllClose(expected, result)
Exemplo n.º 18
0
    def test_estimate_and_belongs_hyperbolic(self):
        point_a = self.hyperbolic.random_uniform()
        point_b = self.hyperbolic.random_uniform()
        point_c = self.hyperbolic.random_uniform()
        points = gs.concatenate([point_a, point_b, point_c], axis=0)

        mean = FrechetMean(metric=self.hyperbolic.metric)
        mean.fit(X=points)

        result = self.hyperbolic.belongs(mean.estimate_)
        expected = gs.array([[True]])

        self.assertAllClose(result, expected)
Exemplo n.º 19
0
    def test_mean_matrices(self):
        m, n = (2, 2)
        point = gs.array([[1.0, 4.0], [2.0, 3.0]])

        metric = MatricesMetric(m, n)
        mean = FrechetMean(metric=metric, point_type="matrix")
        points = [point, point, point]
        mean.fit(points)

        result = mean.estimate_
        expected = point

        self.assertAllClose(result, expected)
Exemplo n.º 20
0
    def test_mean_euclidean(self):
        point = gs.array([1., 4.])

        mean = FrechetMean(metric=self.euclidean.metric)
        points = [point, point, point]
        mean.fit(points)

        result = mean.estimate_
        expected = point

        self.assertAllClose(result, expected)

        points = gs.array([[1., 2.], [2., 3.], [3., 4.], [4., 5.]])
        weights = gs.array([1., 2., 1., 2.])

        mean = FrechetMean(metric=self.euclidean.metric)
        mean.fit(points, weights=weights)

        result = mean.estimate_
        expected = gs.array([16. / 6., 22. / 6.])

        self.assertAllClose(result, expected)
Exemplo n.º 21
0
def main():
    """Perform tangent PCA at the mean on the sphere."""
    fig = plt.figure(figsize=(15, 5))

    sphere = Hypersphere(dim=2)

    data = sphere.random_von_mises_fisher(kappa=15, n_samples=140)

    mean = FrechetMean(metric=sphere.metric)
    mean.fit(data)

    mean_estimate = mean.estimate_

    tpca = TangentPCA(metric=sphere.metric, n_components=2)
    tpca = tpca.fit(data, base_point=mean_estimate)
    tangent_projected_data = tpca.transform(data)

    geodesic_0 = sphere.metric.geodesic(
        initial_point=mean_estimate, initial_tangent_vec=tpca.components_[0]
    )
    geodesic_1 = sphere.metric.geodesic(
        initial_point=mean_estimate, initial_tangent_vec=tpca.components_[1]
    )

    n_steps = 100
    t = np.linspace(-1, 1, n_steps)
    geodesic_points_0 = geodesic_0(t)
    geodesic_points_1 = geodesic_1(t)

    logging.info(
        "Coordinates of the Log of the first 5 data points at the mean, "
        "projected on the principal components:"
    )
    logging.info("\n{}".format(tangent_projected_data[:5]))

    ax_var = fig.add_subplot(121)
    xticks = np.arange(1, 2 + 1, 1)
    ax_var.xaxis.set_ticks(xticks)
    ax_var.set_title("Explained variance")
    ax_var.set_xlabel("Number of Principal Components")
    ax_var.set_ylim((0, 1))
    ax_var.plot(xticks, tpca.explained_variance_ratio_)

    ax = fig.add_subplot(122, projection="3d")

    visualization.plot(mean_estimate, ax, space="S2", color="darkgreen", s=10)
    visualization.plot(geodesic_points_0, ax, space="S2", linewidth=2)
    visualization.plot(geodesic_points_1, ax, space="S2", linewidth=2)
    visualization.plot(data, ax, space="S2", color="black", alpha=0.7)

    plt.show()
Exemplo n.º 22
0
    def test_estimate_default_gradient_descent_so_matrix(self):
        points = self.so_matrix.random_uniform(2)
        mean_vec = FrechetMean(
            metric=self.so_matrix.bi_invariant_metric,
            method="default",
            init_step_size=1.0,
        )
        mean_vec.fit(points)
        logs = self.so_matrix.bi_invariant_metric.log(points,
                                                      mean_vec.estimate_)
        result = gs.sum(logs, axis=0)
        expected = gs.zeros_like(points[0])

        self.assertAllClose(result, expected, atol=1e-5)
Exemplo n.º 23
0
    def test_mean_minkowski(self):
        point = gs.array([[2., -math.sqrt(3)]])
        points = [point, point, point]

        mean = FrechetMean(metric=self.minkowski.metric)
        mean.fit(points)
        result = mean.estimate_

        expected = point
        expected = helper.to_vector(expected)

        self.assertAllClose(result, expected)

        points = gs.array([[1., 0.], [2., math.sqrt(3)], [3., math.sqrt(8)],
                           [4., math.sqrt(24)]])
        weights = gs.array([1., 2., 1., 2.])

        mean = FrechetMean(metric=self.minkowski.metric)
        mean.fit(points, weights=weights)
        result = mean.estimate_
        result = self.minkowski.belongs(result)
        expected = gs.array([[True]])

        self.assertAllClose(result, expected)
    def test_spd_kmeans_fit(self):
        gs.random.seed(0)
        dim = 3
        n_points = 2
        space = spd_matrices.SPDMatrices(dim)
        data = space.random_point(n_samples=n_points)
        metric = spd_matrices.SPDMetricAffine(dim)

        kmeans = RiemannianKMeans(metric, n_clusters=1, lr=1.0)
        kmeans.fit(data)
        result = kmeans.centroids

        mean = FrechetMean(metric=metric, point_type="matrix", max_iter=100)
        mean.fit(data)
        expected = mean.estimate_
        self.assertAllClose(result, expected)
    def update_means(self, data, posterior_probabilities):
        """Update means."""
        n_gaussians = posterior_probabilities.shape[-1]

        mean = FrechetMean(metric=self.metric,
                           method=self.mean_method,
                           lr=self.lr_mean,
                           epsilon=self.tol_mean,
                           max_iter=self.max_iter_mean,
                           point_type=self.point_type)

        data_expand = gs.expand_dims(data, 1)
        data_expand = gs.repeat(data_expand, n_gaussians, axis=1)

        mean.fit(data_expand, weights=posterior_probabilities)
        self.means = gs.squeeze(mean.estimate_)
Exemplo n.º 26
0
    def test_logs_at_mean_adaptive_gradient_descent_sphere(self):
        n_tests = 100
        estimator = FrechetMean(metric=self.sphere.metric, method='adaptive')

        result = gs.zeros(n_tests)
        for i in range(n_tests):
            # take 2 random points, compute their mean, and verify that
            # log of each at the mean is opposite
            points = self.sphere.random_uniform(n_samples=2)
            estimator.fit(points)
            mean = estimator.estimate_

            logs = self.sphere.metric.log(point=points, base_point=mean)
            result[i] = gs.linalg.norm(logs[1, :] + logs[0, :])

        expected = gs.zeros(n_tests)
        self.assertAllClose(expected, result, rtol=1e-10, atol=1e-10)
Exemplo n.º 27
0
    def initialize_parameters(self, y):
        """Set initial values for the parameters of the model.

        Set initial parameters for the optimization, depending on the value
        of the attribute `initialization`. The options are:
            - `random` : pick random numbers from a normal distribution,
            then project them to the manifold and the tangent space.
            - `frechet` : compute the Frechet mean of the target points
            - `data` : pick a random sample from the target points and a
            tangent vector with random coefficients.
            - `warm_start`: pick previous values of the parameters if the
            model was fitted before, otherwise behaves as `random`.

        Parameters
        ----------
        y: array-like, shape=[n_samples, {dim, [n,n]}]
            The target data, used for the option `data` and 'frechet'.

        Returns
        -------
        intercept : array-like, shape=[{dim, [n,n]}]
            Initial value for the intercept.
        coef : array-like, shape=[{dim, [n,n]}]
            Initial value for the coefficient.
        """
        init = self.initialization
        shape = (
            y.shape[-1:] if self.space.default_point_type == "vector" else y.shape[-2:]
        )
        if isinstance(init, str):
            if init == "random":
                return gs.random.normal(size=(2,) + shape)
            if init == "frechet":
                mean = FrechetMean(self.metric, verbose=self.verbose).fit(y).estimate_
                return mean, gs.zeros(shape)
            if init == "data":
                return gs.random.choice(y, 1)[0], gs.random.normal(size=shape)
            if init == "warm_start":
                if self.intercept_ is not None:
                    return self.intercept_, self.coef_
                return gs.random.normal(size=(2,) + shape)
            raise ValueError(
                "The initialization string must be one of "
                "random, frechet, data or warm_start"
            )
        return init
Exemplo n.º 28
0
 def test_coincides_with_frechet_so(self):
     point = self.so.random_uniform(self.n_samples)
     estimator = ExponentialBarycenter(self.so, max_iter=32, epsilon=1e-12)
     estimator.fit(point)
     result = estimator.estimate_
     print(self.so.default_point_type)
     so_vector = SpecialOrthogonal(3, default_point_type='vector')
     frechet_estimator = FrechetMean(so_vector.bi_invariant_metric,
                                     max_iter=32,
                                     epsilon=1e-10,
                                     point_type='vector')
     vector_point = so_vector.rotation_vector_from_matrix(point)
     frechet_estimator.fit(vector_point)
     mean = frechet_estimator.estimate_
     expected = so_vector.matrix_from_rotation_vector(mean)
     result = estimator.estimate_
     self.assertAllClose(result, expected)
Exemplo n.º 29
0
    def test_logs_at_mean_default_gradient_descent_sphere(self):
        n_tests = 10
        estimator = FrechetMean(metric=self.sphere.metric, method="default", lr=1.0)

        result = []
        for _ in range(n_tests):
            # take 2 random points, compute their mean, and verify that
            # log of each at the mean is opposite
            points = self.sphere.random_uniform(n_samples=2)
            estimator.fit(points)
            mean = estimator.estimate_

            logs = self.sphere.metric.log(point=points, base_point=mean)
            result.append(gs.linalg.norm(logs[1, :] + logs[0, :]))
        result = gs.stack(result)
        expected = gs.zeros(n_tests)
        self.assertAllClose(expected, result)
    def test_hypersphere_kmeans_fit(self):
        gs.random.seed(55)

        manifold = hypersphere.Hypersphere(2)
        metric = hypersphere.HypersphereMetric(2)

        x = manifold.random_von_mises_fisher(kappa=100, n_samples=200)

        kmeans = RiemannianKMeans(metric, 1, tol=1e-3, lr=1.0)
        kmeans.fit(x)
        center = kmeans.centroids

        mean = FrechetMean(metric=metric, lr=1.0)
        mean.fit(x)

        result = metric.dist(center, mean.estimate_)
        expected = 0.0
        self.assertAllClose(expected, result)