コード例 #1
0
    def _loss_fn(self, matrix, rels_reversed):
        """Given a numpy array with vectors for u, v and negative samples, computes loss value.

        Parameters
        ----------
        matrix : numpy.array
            Array containing vectors for u, v and negative samples, of shape (2 + negative_size, dim).

        Returns
        -------
        float
            Computed loss value.

        Warnings
        --------
        Only used for autograd gradients, since autograd requires a specific function signature.
        """
        vector_u = matrix[0]
        vectors_v = matrix[1:]
        euclidean_dists = grad_np.linalg.norm(vector_u - vectors_v, axis=1)
        norm = grad_np.linalg.norm(vector_u)
        all_norms = grad_np.linalg.norm(vectors_v, axis=1)
        poincare_dists = grad_np.arccosh(1 + 2 * ((euclidean_dists**2) /
                                                  ((1 - norm**2) *
                                                   (1 - all_norms**2))))
        if self.loss_type == 'nll':
            return PoincareModel._nll_loss_fn(poincare_dists)
        elif self.loss_type == 'neg':
            return PoincareModel._neg_loss_fn(poincare_dists, self.neg_r,
                                              self.neg_t, self.neg_mu)
        elif self.loss_type == 'maxmargin':
            return PoincareModel._maxmargin_loss_fn(poincare_dists,
                                                    self.maxmargin_margin)
        else:
            raise ValueError('Unknown loss type : ' + self.loss_type)
コード例 #2
0
 def test_dist(self):
     x = self.man.rand() / 2
     y = self.man.rand() / 2
     correct_dist = np.sum(
         np.arccosh(1 + 2 * la.norm(x - y, axis=0)**2 /
                    (1 - la.norm(x, axis=0)**2) /
                    (1 - la.norm(y, axis=0)**2))**2)
     np_testing.assert_allclose(correct_dist, self.man.dist(x, y)**2)
コード例 #3
0
ファイル: test_hyperbolic.py プロジェクト: pymanopt/pymanopt
 def test_dist(self):
     x = self.manifold.random_point() / 2
     y = self.manifold.random_point() / 2
     correct_dist = np.arccosh(
         1
         + 2
         * np.linalg.norm(x - y) ** 2
         / (1 - np.linalg.norm(x) ** 2)
         / (1 - np.linalg.norm(y) ** 2)
     )
     np_testing.assert_allclose(correct_dist, self.manifold.dist(x, y))
コード例 #4
0
ファイル: test_scalar_ops.py プロジェクト: zaxtax/autograd
def test_arccosh():
    fun = lambda x : 3.0 * np.arccosh(x)
    d_fun = grad(fun)
    check_grads(fun, npr.randn()**2 + 1)
    check_grads(d_fun, npr.randn()**2 + 1)
コード例 #5
0
def test_arccosh():
    fun = lambda x : 3.0 * np.arccosh(x)
    d_fun = grad(fun)
    check_grads(fun, npr.randn()**2 + 1)
    check_grads(d_fun, npr.randn()**2 + 1)
コード例 #6
0
def test_arccosh():
    fun = lambda x: 3.0 * np.arccosh(x)
    check_grads(fun)(npr.randn()**2 + 1)
コード例 #7
0
ファイル: test_scalar_ops.py プロジェクト: HIPS/autograd
def test_arccosh():
    fun = lambda x : 3.0 * np.arccosh(x)
    check_grads(fun)(npr.randn()**2 + 1)