Esempio n. 1
0
    def test_sample_von_mises_fisher(self):
        """
        Check that the maximum likelihood estimates of the mean and
        concentration parameter are close to the real values. A first
        estimation of the concentration parameter is obtained by a
        closed-form expression and improved through the Newton method.
        """
        dim = 2
        n_points = 1000000
        sphere = Hypersphere(dim)

        # check mean value for concentrated distribution
        kappa = 1000000
        points = sphere.random_von_mises_fisher(kappa, n_points)
        sum_points = gs.sum(points, axis=0)
        mean = gs.array([0., 0., 1.])
        mean_estimate = sum_points / gs.linalg.norm(sum_points)
        expected = mean
        result = mean_estimate
        self.assertTrue(gs.allclose(result, expected,
                                    atol=MEAN_ESTIMATION_TOL))
        # check concentration parameter for dispersed distribution
        kappa = 1
        points = sphere.random_von_mises_fisher(kappa, n_points)
        sum_points = gs.sum(points, axis=0)
        mean_norm = gs.linalg.norm(sum_points) / n_points
        kappa_estimate = (mean_norm * (dim + 1. - mean_norm**2) /
                          (1. - mean_norm**2))
        p = dim + 1
        n_steps = 100
        for i in range(n_steps):
            bessel_func_1 = scipy.special.iv(p / 2., kappa_estimate)
            bessel_func_2 = scipy.special.iv(p / 2. - 1., kappa_estimate)
            ratio = bessel_func_1 / bessel_func_2
            denominator = 1. - ratio**2 - (p - 1.) * ratio / kappa_estimate
            kappa_estimate = kappa_estimate - (ratio - mean_norm) / denominator
        expected = kappa
        result = kappa_estimate
        self.assertTrue(
            gs.allclose(result, expected, atol=KAPPA_ESTIMATION_TOL))
Esempio n. 2
0
    def setUp(self):
        s2 = Hypersphere(dimension=2)
        r3 = s2.embedding_manifold

        initial_point = [0., 0., 1.]
        initial_tangent_vec_a = [1., 0., 0.]
        initial_tangent_vec_b = [0., 1., 0.]
        initial_tangent_vec_c = [-1., 0., 0.]

        curve_a = s2.metric.geodesic(initial_point=initial_point,
                                     initial_tangent_vec=initial_tangent_vec_a)
        curve_b = s2.metric.geodesic(initial_point=initial_point,
                                     initial_tangent_vec=initial_tangent_vec_b)
        curve_c = s2.metric.geodesic(initial_point=initial_point,
                                     initial_tangent_vec=initial_tangent_vec_c)

        self.n_sampling_points = 10
        sampling_times = gs.linspace(0., 1., self.n_sampling_points)
        discretized_curve_a = curve_a(sampling_times)
        discretized_curve_b = curve_b(sampling_times)
        discretized_curve_c = curve_c(sampling_times)

        self.n_discretized_curves = 5
        self.times = gs.linspace(0., 1., self.n_discretized_curves)
        self.atol = 1e-6
        gs.random.seed(1234)
        self.space_curves_in_euclidean_3d = DiscretizedCurvesSpace(
            embedding_manifold=r3)
        self.space_curves_in_sphere_2d = DiscretizedCurvesSpace(
            embedding_manifold=s2)
        self.l2_metric_s2 = self.space_curves_in_sphere_2d.l2_metric
        self.l2_metric_r3 = self.space_curves_in_euclidean_3d.l2_metric
        self.srv_metric_r3 = self.space_curves_in_euclidean_3d.\
            square_root_velocity_metric
        self.curve_a = discretized_curve_a
        self.curve_b = discretized_curve_b
        self.curve_c = discretized_curve_c
Esempio n. 3
0
"""
Plot the result of optimal quantization of the von Mises Fisher distribution
on the sphere
"""

import matplotlib.pyplot as plt

import geomstats.visualization as visualization

from geomstats.hypersphere import Hypersphere

SPHERE2 = Hypersphere(dimension=2)
METRIC = SPHERE2.metric
N_POINTS = 1000
N_CENTERS = 4
N_REPETITIONS = 20
KAPPA = 10


def main():
    points = SPHERE2.random_von_mises_fisher(kappa=KAPPA, n_samples=N_POINTS)

    centers, weights, clusters, n_steps = METRIC.optimal_quantization(
        points=points, n_centers=N_CENTERS, n_repetitions=N_REPETITIONS)

    plt.figure(0)
    ax = plt.subplot(111, projection="3d", aspect="equal")
    visualization.plot(points=centers, ax=ax, space='S2', c='r')
    plt.show()

    plt.figure(1)
Esempio n. 4
0
"""Unit tests for visualization module."""

import matplotlib
matplotlib.use('Agg')  # NOQA
import unittest

import geomstats.visualization as visualization
from geomstats.hypersphere import Hypersphere
from geomstats.special_euclidean_group import SpecialEuclideanGroup
from geomstats.special_orthogonal_group import SpecialOrthogonalGroup

SO3_GROUP = SpecialOrthogonalGroup(n=3)
SE3_GROUP = SpecialEuclideanGroup(n=3)
S2 = Hypersphere(dimension=2)

# TODO(nina): add tests for examples


class TestVisualizationMethods(unittest.TestCase):
    _multiprocess_can_split_ = True

    def setUp(self):
        self.n_samples = 10

    def test_plot_points_so3(self):
        points = SO3_GROUP.random_uniform(self.n_samples)
        visualization.plot(points, space='SO3_GROUP')

    def test_plot_points_se3(self):
        points = SE3_GROUP.random_uniform(self.n_samples)
        visualization.plot(points, space='SE3_GROUP')
Esempio n. 5
0
"""
Plot the result of optimal quantization of the uniform distribution
on the circle.
"""

import matplotlib.pyplot as plt
import os

import geomstats.visualization as visualization

from geomstats.hypersphere import Hypersphere

CIRCLE = Hypersphere(dimension=1)
METRIC = CIRCLE.metric
N_POINTS = 1000
N_CENTERS = 5
N_REPETITIONS = 20
TOLERANCE = 1e-6


def main():
    points = CIRCLE.random_uniform(n_samples=N_POINTS, bound=None)

    centers, weights, clusters, n_iterations = METRIC.optimal_quantization(
        points=points,
        n_centers=N_CENTERS,
        n_repetitions=N_REPETITIONS,
        tolerance=TOLERANCE)

    plt.figure(0)
    visualization.plot(points=centers, space='S1', color='red')
Esempio n. 6
0
 def setUp(self):
     self.n_samples = 10
     self.SO3_GROUP = SpecialOrthogonalGroup(n=3)
     self.SE3_GROUP = SpecialEuclideanGroup(n=3)
     self.S2 = Hypersphere(dimension=2)
     self.H2 = HyperbolicSpace(dimension=2)
Esempio n. 7
0
print(x_test.shape[0], 'test samples')

# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

model = Sequential()

kernel_size = (3, 3)
hypersphere_dimension = kernel_size[0] * kernel_size[1] - 1
model.add(
    Conv2D(32,
           kernel_size=kernel_size,
           activation='relu',
           input_shape=input_shape,
           kernel_manifold=Hypersphere(hypersphere_dimension)))
model.add(
    Conv2D(64, (3, 3),
           activation='relu',
           kernel_manifold=Hypersphere(hypersphere_dimension)))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))

model.compile(loss=keras.losses.categorical_crossentropy,
              optimizer=keras.optimizers.SGD(lr=0.1),
              metrics=['accuracy'])
 def setUp(self):
     gs.random.seed(1234)
     self.dimension = 4
     self.space = Hypersphere(dimension=self.dimension)
     self.metric = self.space.metric
     self.n_samples = 3
 def setUp(self):
     gs.random.seed(1234)
     self.metric = HypersphereMetric(dimension=2)
     self.n_points = 1000
     self.points = Hypersphere(dimension=2).random_von_mises_fisher(
         kappa=10, n_samples=self.n_points)