Exemplo n.º 1
0
    def test_variance_hyperbolic(self):
        point = gs.array([2.0, 1.0, 1.0, 1.0])
        points = gs.array([point, point])
        result = variance(points, base_point=point, metric=self.hyperbolic.metric)
        expected = gs.array(0.0)

        self.assertAllClose(result, expected)
Exemplo n.º 2
0
    def test_variance_sphere(self):
        point = gs.array([0., 0., 0., 0., 1.])
        points = gs.array([point, point])

        result = variance(points, base_point=point, metric=self.sphere.metric)
        expected = gs.array(0.)

        self.assertAllClose(expected, result)
Exemplo n.º 3
0
    def test_variance_hyperbolic(self):
        point = gs.array([2., 1., 1., 1.])
        points = gs.array([point, point])
        result = variance(points,
                          base_point=point,
                          metric=self.hyperbolic.metric)
        expected = helper.to_scalar(0.)

        self.assertAllClose(result, expected)
Exemplo n.º 4
0
    def test_variance_sphere(self):
        point = gs.array([0., 0., 0., 0., 1.])
        points = gs.zeros((2, point.shape[0]))
        points[0, :] = point
        points[1, :] = point

        result = variance(points, base_point=point, metric=self.sphere.metric)
        expected = helper.to_scalar(0.)

        self.assertAllClose(expected, result)
Exemplo n.º 5
0
    def test_variance_euclidean(self):
        points = gs.array([[1.0, 2.0], [2.0, 3.0], [3.0, 4.0], [4.0, 5.0]])
        weights = gs.array([1.0, 2.0, 1.0, 2.0])
        base_point = gs.zeros(2)
        result = variance(
            points, weights=weights, base_point=base_point, metric=self.euclidean.metric
        )
        # we expect the average of the points' sq norms.
        expected = gs.array((1 * 5.0 + 2 * 13.0 + 1 * 25.0 + 2 * 41.0) / 6.0)

        self.assertAllClose(result, expected)
Exemplo n.º 6
0
 def test_variance_minkowski(self):
     points = gs.array([[1., 0.], [2., math.sqrt(3)], [3., math.sqrt(8)],
                        [4., math.sqrt(24)]])
     weights = gs.array([1., 2., 1., 2.])
     base_point = gs.array([-1., 0.])
     var = variance(points,
                    weights=weights,
                    base_point=base_point,
                    metric=self.minkowski.metric)
     result = helper.to_scalar(var != 0)
     # we expect the average of the points' Minkowski sq norms.
     expected = helper.to_scalar(gs.array([True]))
     self.assertAllClose(result, expected)
Exemplo n.º 7
0
    def test_variance_euclidean(self):
        points = gs.array([[1., 2.], [2., 3.], [3., 4.], [4., 5.]])
        weights = gs.array([1., 2., 1., 2.])
        base_point = gs.zeros(2)
        result = variance(points,
                          weights=weights,
                          base_point=base_point,
                          metric=self.euclidean.metric)
        # we expect the average of the points' sq norms.
        expected = (1 * 5. + 2 * 13. + 1 * 25. + 2 * 41.) / 6.
        expected = helper.to_scalar(expected)

        self.assertAllClose(result, expected)
Exemplo n.º 8
0
 def test_variance_minkowski(self):
     points = gs.array([[1.0, 0.0], [2.0, math.sqrt(3)],
                        [3.0, math.sqrt(8)], [4.0, math.sqrt(24)]])
     weights = gs.array([1.0, 2.0, 1.0, 2.0])
     base_point = gs.array([-1.0, 0.0])
     var = variance(points,
                    weights=weights,
                    base_point=base_point,
                    metric=self.minkowski.metric)
     result = var != 0
     # we expect the average of the points' Minkowski sq norms.
     expected = True
     self.assertAllClose(result, expected)
Exemplo n.º 9
0
def main():
    r"""Compute and visualize a geodesic regression on the sphere.

    The generative model of the data is:
    :math:`Z = Exp_{\beta_0}(\beta_1.X)` and :math:`Y = Exp_Z(\epsilon)`
    where:
    - :math:`Exp` denotes the Riemannian exponential,
    - :math:`\beta_0` is called the intercept,
    - :math:`\beta_1` is called the coefficient,
    - :math:`\epsilon \sim N(0, 1)` is a standard Gaussian noise,
    - :math:`X` is the input, :math:`Y` is the target.
    """
    # Generate noise-free data
    n_samples = 50
    X = gs.random.rand(n_samples)
    X -= gs.mean(X)

    intercept = SPACE.random_uniform()
    coef = SPACE.to_tangent(5.0 * gs.random.rand(EMBEDDING_DIM),
                            base_point=intercept)
    y = METRIC.exp(X[:, None] * coef, base_point=intercept)

    # Generate normal noise
    normal_noise = gs.random.normal(size=(n_samples, EMBEDDING_DIM))
    noise = SPACE.to_tangent(normal_noise, base_point=y) / gs.pi / 2

    rss = gs.sum(METRIC.squared_norm(noise, base_point=y)) / n_samples

    # Add noise
    y = METRIC.exp(noise, y)

    # True noise level and R2
    estimator = FrechetMean(METRIC)
    estimator.fit(y)
    variance_ = variance(y, estimator.estimate_, metric=METRIC)
    r2 = 1 - rss / variance_

    # Fit geodesic regression
    gr = GeodesicRegression(SPACE,
                            center_X=False,
                            method="extrinsic",
                            verbose=True)
    gr.fit(X, y, compute_training_score=True)
    intercept_hat, coef_hat = gr.intercept_, gr.coef_

    # Measure Mean Squared Error
    mse_intercept = METRIC.squared_dist(intercept_hat, intercept)

    tangent_vec_to_transport = coef_hat
    tangent_vec_of_transport = METRIC.log(intercept, base_point=intercept_hat)
    transported_coef_hat = METRIC.parallel_transport(
        tangent_vec=tangent_vec_to_transport,
        base_point=intercept_hat,
        direction=tangent_vec_of_transport,
    )
    mse_coef = METRIC.squared_norm(transported_coef_hat - coef,
                                   base_point=intercept)

    # Measure goodness of fit
    r2_hat = gr.training_score_

    print(f"MSE on the intercept: {mse_intercept:.2e}")
    print(f"MSE on the coef, i.e. initial velocity: {mse_coef:.2e}")
    print(f"Determination coefficient: R^2={r2_hat:.2f}")
    print(f"True R^2: {r2:.2f}")

    # Plot
    fitted_data = gr.predict(X)
    fig = plt.figure(figsize=(8, 8))
    ax = fig.add_subplot(111, projection="3d")
    sphere_visu = visualization.Sphere(n_meridians=30)
    ax = sphere_visu.set_ax(ax=ax)

    path = METRIC.geodesic(initial_point=intercept_hat,
                           initial_tangent_vec=coef_hat)
    regressed_geodesic = path(
        gs.linspace(0.0, 1.0, 100) * gs.pi * 2 / METRIC.norm(coef))
    regressed_geodesic = gs.to_numpy(gs.autodiff.detach(regressed_geodesic))

    size = 10
    marker = "o"
    sphere_visu.draw_points(ax,
                            gs.array([intercept_hat]),
                            marker=marker,
                            c="r",
                            s=size)
    sphere_visu.draw_points(ax, y, marker=marker, c="b", s=size)
    sphere_visu.draw_points(ax, fitted_data, marker=marker, c="g", s=size)

    ax.plot(
        regressed_geodesic[:, 0],
        regressed_geodesic[:, 1],
        regressed_geodesic[:, 2],
        c="gray",
    )
    sphere_visu.draw(ax, linewidth=1)
    ax.grid(False)
    plt.axis("off")

    plt.show()
    def fit(self, data):
        """Fit a Gaussian mixture model (GMM) given the data.

        Alternates between Expectation and Maximization steps
        for some number of iterations.

        Parameters
        ----------
        data : array-like, shape=[n_samples, n_features]
            Training data, where n_samples is the number of samples
            and n_features is the number of features.

        Returns
        -------
        self : object
            Return the components of the computed
            Gaussian mixture model: means, variances and mixture_coefficients.
        """
        self._dimension = data.shape[-1]
        if self.initialisation_method == 'kmeans':
            kmeans = RiemannianKMeans(metric=self.metric,
                                      n_clusters=self.n_gaussians,
                                      init='random',
                                      mean_method='batch',
                                      lr=self.lr_mean)

            centroids = kmeans.fit(X=data)
            labels = kmeans.predict(X=data)

            self.means = centroids
            self.variances = gs.zeros(self.n_gaussians)

            labeled_data = gs.vstack([labels, gs.transpose(data)])
            labeled_data = gs.transpose(labeled_data)
            for label, centroid in enumerate(centroids):
                label_mask = gs.where(labeled_data[:, 0] == label)
                grouped_by_label = labeled_data[label_mask][:, 1:]
                v = variance(grouped_by_label, centroid, self.metric)
                if grouped_by_label.shape[0] == 1:
                    v += MIN_VAR_INIT
                self.variances[label] = v
        else:
            self.means = (gs.random.rand(self.n_gaussians, self._dimension) -
                          0.5) / self._dimension
            self.variances = gs.random.rand(self.n_gaussians) / 10 + 0.8

        self.mixture_coefficients = \
            gs.ones(self.n_gaussians) / self.n_gaussians
        posterior_probabilities = gs.ones((data.shape[0], self.means.shape[0]))

        self.variances_range,\
            self.normalization_factor_var, \
            self.phi_inv_var =\
            self.normalization_factor_init(
                gs.arange(
                    ZETA_LOWER_BOUND, ZETA_UPPER_BOUND, ZETA_STEP))

        for epoch in range(self.max_iter):
            old_posterior_probabilities = posterior_probabilities

            posterior_probabilities = self._expectation(data)

            condition = gs.mean(
                gs.abs(old_posterior_probabilities - posterior_probabilities))

            if condition < EM_CONV_RATE and epoch > MINIMUM_EPOCHS:
                logging.info('EM converged in %s iterations', epoch)
                return self.means, self.variances, self.mixture_coefficients

            self._maximization(data, posterior_probabilities)

        logging.info('WARNING: EM did not converge \n'
                     'Please increase MINIMUM_EPOCHS.')

        return self.means, self.variances, self.mixture_coefficients
Exemplo n.º 11
0
def main():
    r"""Compute and visualize a geodesic regression on the SE(2).

    The generative model of the data is:
    :math:`Z = Exp_{\beta_0}(\beta_1.X)` and :math:`Y = Exp_Z(\epsilon)`
    where:
    - :math:`Exp` denotes the Riemannian exponential,
    - :math:`\beta_0` is called the intercept,
    - :math:`\beta_1` is called the coefficient,
    - :math:`\epsilon \sim N(0, 1)` is a standard Gaussian noise,
    - :math:`X` is the input, :math:`Y` is the target.
    """
    # Generate noise-free data
    n_samples = 20
    X = gs.random.normal(size=(n_samples, ))
    X -= gs.mean(X)

    intercept = SPACE.random_point()
    coef = SPACE.to_tangent(5.0 * gs.random.rand(3, 3), intercept)
    y = METRIC.exp(X[:, None, None] * coef[None], intercept)

    # Generate normal noise in the Lie algebra
    normal_noise = gs.random.normal(size=(n_samples, 3))
    normal_noise = SPACE.lie_algebra.matrix_representation(normal_noise)
    noise = SPACE.tangent_translation_map(y)(normal_noise) / gs.pi

    rss = gs.sum(METRIC.squared_norm(noise, y)) / n_samples

    # Add noise
    y = METRIC.exp(noise, y)

    # True noise level and R2
    estimator = FrechetMean(METRIC)
    estimator.fit(y)
    variance_ = variance(y, estimator.estimate_, metric=METRIC)
    r2 = 1 - rss / variance_

    # Fit geodesic regression
    gr = GeodesicRegression(
        SPACE,
        metric=METRIC,
        center_X=False,
        method="riemannian",
        max_iter=100,
        init_step_size=0.1,
        verbose=True,
        initialization="frechet",
    )
    gr.fit(X, y, compute_training_score=True)

    intercept_hat, beta_hat = gr.intercept_, gr.coef_

    # Measure Mean Squared Error
    mse_intercept = METRIC.squared_dist(intercept_hat, intercept)
    mse_beta = METRIC.squared_norm(
        METRIC.parallel_transport(beta_hat, intercept_hat,
                                  METRIC.log(intercept_hat, intercept)) - coef,
        intercept,
    )

    # Measure goodness of fit
    r2_hat = gr.training_score_

    print(f"MSE on the intercept: {mse_intercept:.2e}")
    print(f"MSE on the initial velocity beta: {mse_beta:.2e}")
    print(f"Determination coefficient: R^2={r2_hat:.2f}")
    print(f"True R^2: {r2:.2f}")

    # Plot
    fitted_data = gr.predict(X)
    fig = plt.figure(figsize=(8, 8))
    ax = fig.add_subplot(111)
    sphere_visu = visualization.SpecialEuclidean2()
    ax = sphere_visu.set_ax(ax=ax)

    path = METRIC.geodesic(initial_point=intercept_hat,
                           initial_tangent_vec=beta_hat)
    regressed_geodesic = path(gs.linspace(min(X), max(X), 100))

    sphere_visu.draw_points(ax, y, marker="o", c="black")
    sphere_visu.draw_points(ax, fitted_data, marker="o", c="gray")
    sphere_visu.draw_points(ax, gs.array([intercept]), marker="x", c="r")
    sphere_visu.draw_points(ax,
                            gs.array([intercept_hat]),
                            marker="o",
                            c="green")

    ax.plot(regressed_geodesic[:, 0, 2], regressed_geodesic[:, 1, 2], c="gray")
    plt.show()