def empirical_frechet_var_bubble(n_samples, theta, dim, n_expectation=1000): """Variance of the empirical Fréchet mean for a bubble distribution. Draw n_sampless from a bubble distribution, computes its empirical Fréchet mean and the square distance to the asymptotic mean. This is repeated n_expectation times to compute an approximation of its expectation (i.e. its variance) by sampling. The bubble distribution is an isotropic distributions on a Riemannian hyper sub-sphere of radius 0 < theta < Pi around the north pole of the sphere of dimension dim. Parameters ---------- n_samples : int Number of samples to draw. theta: float Radius of the bubble distribution. dim : int Dimension of the sphere (embedded in R^{dim+1}). n_expectation: int, optional (defaults to 1000) Number of computations for approximating the expectation. Returns ------- tuple (variance, std-dev on the computed variance) """ if dim <= 1: raise ValueError( 'Dim > 1 needed to draw a uniform sample on sub-sphere.') var = [] sphere = Hypersphere(dim=dim) bubble = Hypersphere(dim=dim - 1) north_pole = gs.zeros(dim + 1) north_pole[dim] = 1.0 for _ in range(n_expectation): # Sample n points from the uniform distribution on a sub-sphere # of radius theta (i.e cos(theta) in ambient space) # TODO (nina): Add this code as a method of hypersphere data = gs.zeros((n_samples, dim + 1), dtype=gs.float64) directions = bubble.random_uniform(n_samples) directions = gs.to_ndarray(directions, to_ndim=2) for i in range(n_samples): for j in range(dim): data[i, j] = gs.sin(theta) * directions[i, j] data[i, dim] = gs.cos(theta) # TODO (nina): Use FrechetMean here current_mean = _adaptive_gradient_descent(data, metric=sphere.metric, max_iter=32, init_point=north_pole) var.append(sphere.metric.squared_dist(north_pole, current_mean)) return gs.mean(var), 2 * gs.std(var) / gs.sqrt(n_expectation)
def empirical_frechet_mean_random_init_s2(data, n_init=1, init_points=[]): """Fréchet mean on S2 by gradient descent from multiple starting points" Parameters ---------- data: empirical distribution on S2 n_init: number of initial points drawn uniformly at random on S2 init_points: list of initial points for the first gradient descent Returns ------- frechet mean list """ assert n_init >= 1, "Gradient descent needs at least one starting point" dim = len(data[0]) - 1 sphere = Hypersphere(dimension=dim) if len(init_points) == 0: init_points = [sphere.random_uniform()] # for a noncompact manifold, we need to revise this to a ball # with a maximal radius mean = _adaptive_gradient_descent(data, metric=sphere.metric, n_max_iterations=64, init_points=init_points) sigma_mean = mean_sq_dist_s2(mean, data) # print ("variance {0} for FM {1}".format(sigFM,FM)) for i in range(n_init - 1): init_points = sphere.random_uniform() new_mean = _adaptive_gradient_descent(data, metric=sphere.metric, n_max_iterations=64, init_points=init_points) sigma_new_mean = mean_sq_dist_s2(new_mean, data) if sigma_new_mean < sigma_mean: mean = new_mean sigma_mean = sigma_new_mean # print ("new variance {0} for FM {1}".format(sigFM,FM)) return mean
def test_adaptive_gradient_descent_sphere(self): n_tests = 100 result = gs.zeros(n_tests) expected = gs.zeros(n_tests) for i in range(n_tests): # take 2 random points, compute their mean, and verify that # log of each at the mean is opposite points = self.sphere.random_uniform(n_samples=2) mean = _adaptive_gradient_descent(points=points, metric=self.sphere.metric) logs = self.sphere.metric.log(point=points, base_point=mean) result[i] = gs.linalg.norm(logs[1, :] + logs[0, :]) self.assertAllClose(expected, result, rtol=1e-10, atol=1e-10)
def empirical_frechet_var_bubble(n_samples, theta, dim, n_expectation=1000): """Variance of the empirical Fréchet mean for a bubble distribution. Draw n_sampless from a bubble distribution, computes its empirical Fréchet mean and the square distance to the asymptotic mean. This is repeated n_expectation times to compute an approximation of its expectation (i.e. its variance) by sampling. The bubble distribution is an isotropic distributions on a Riemannian hyper sub-sphere of radius 0 < theta = around the north pole of the hyperbolic space of dimension dim. Parameters ---------- n_samples: number of samples to draw theta: radius of the bubble distribution dim: dimension of the hyperbolic space (embedded in R^{1,dim}) n_expectation: number of computations for approximating the expectation Returns ------- tuple (variance, std-dev on the computed variance) """ assert dim > 1, "Dim > 1 needed to draw a uniform sample on sub-sphere" var = [] hyperbole = Hyperbolic(dimension=dim) bubble = Hypersphere(dimension=dim - 1) origin = gs.zeros(dim + 1) origin[0] = 1.0 for k in range(n_expectation): # Sample n points from the uniform distribution on a sub-sphere # of radius theta (i.e cos(theta) in ambient space) data = gs.zeros((n_samples, dim + 1), dtype=gs.float64) directions = bubble.random_uniform(n_samples) for i in range(n_samples): for j in range(dim): data[i, j + 1] = gs.sinh(theta) * directions[i, j] data[i, 0] = gs.cosh(theta) current_mean = _adaptive_gradient_descent(data, metric=hyperbole.metric, n_max_iterations=64, init_points=[origin]) var.append(hyperbole.metric.squared_dist(origin, current_mean)) return np.mean(var), 2 * np.std(var) / np.sqrt(n_expectation)