Пример #1
0
 def test_api_dygraph(self):
     paddle.disable_static(self.place)
     x_tensor = paddle.to_tensor(self.x)
     out = paddle.diagonal(x_tensor)
     out_ref = np.diagonal(self.x)
     self.assertEqual(np.allclose(out.numpy(), out_ref, rtol=1e-08), True)
     paddle.enable_static()
Пример #2
0
    def gwd_loss(self,
                 pred,
                 target,
                 fun='log',
                 tau=1.0,
                 alpha=1.0,
                 normalize=False):

        xy_p, R_p, S_p = self.xywhr2xyrs(pred)
        xy_t, R_t, S_t = self.xywhr2xyrs(target)

        xy_distance = (xy_p - xy_t).square().sum(axis=-1)

        Sigma_p = R_p.matmul(S_p.square()).matmul(R_p.transpose([0, 2, 1]))
        Sigma_t = R_t.matmul(S_t.square()).matmul(R_t.transpose([0, 2, 1]))

        whr_distance = paddle.diagonal(
            S_p, axis1=-2, axis2=-1).square().sum(axis=-1)

        whr_distance = whr_distance + paddle.diagonal(
            S_t, axis1=-2, axis2=-1).square().sum(axis=-1)
        _t = Sigma_p.matmul(Sigma_t)

        _t_tr = paddle.diagonal(_t, axis1=-2, axis2=-1).sum(axis=-1)
        _t_det_sqrt = paddle.diagonal(S_p, axis1=-2, axis2=-1).prod(axis=-1)
        _t_det_sqrt = _t_det_sqrt * paddle.diagonal(
            S_t, axis1=-2, axis2=-1).prod(axis=-1)
        whr_distance = whr_distance + (-2) * (
            (_t_tr + 2 * _t_det_sqrt).clip(0).sqrt())

        distance = (xy_distance + alpha * alpha * whr_distance).clip(0)

        if normalize:
            wh_p = pred[..., 2:4].clip(min=1e-7, max=1e7)
            wh_t = target[..., 2:4].clip(min=1e-7, max=1e7)
            scale = ((wh_p.log() + wh_t.log()).sum(dim=-1) / 4).exp()
            distance = distance / scale

        if fun == 'log':
            distance = paddle.log1p(distance)

        if tau >= 1.0:
            return 1 - 1 / (tau + distance)

        return distance
Пример #3
0
    def test_api_eager(self):
        paddle.disable_static(self.place)
        with _test_eager_guard():
            x_tensor = paddle.to_tensor(self.x)
            out = paddle.diagonal(x_tensor)
            out2 = paddle.diagonal(x_tensor, offset=0, axis1=2, axis2=1)
            out3 = paddle.diagonal(x_tensor, offset=1, axis1=0, axis2=1)
            out4 = paddle.diagonal(x_tensor, offset=0, axis1=1, axis2=2)
        out_ref = np.diagonal(self.x)
        self.assertEqual(np.allclose(out.numpy(), out_ref, rtol=1e-08), True)
        out2_ref = np.diagonal(self.x, offset=0, axis1=2, axis2=1)
        self.assertEqual(np.allclose(out2.numpy(), out2_ref, rtol=1e-08), True)
        out3_ref = np.diagonal(self.x, offset=1, axis1=0, axis2=1)
        self.assertEqual(np.allclose(out3.numpy(), out3_ref, rtol=1e-08), True)
        out4_ref = np.diagonal(self.x, offset=0, axis1=1, axis2=2)
        self.assertEqual(np.allclose(out4.numpy(), out4_ref, rtol=1e-08), True)

        paddle.enable_static()
Пример #4
0
 def test_api_static(self):
     paddle.enable_static()
     with paddle.static.program_guard(paddle.static.Program()):
         x = paddle.fluid.data('X', self.shape)
         out = paddle.diagonal(x)
         exe = paddle.static.Executor(self.place)
         res = exe.run(feed={'X': self.x}, fetch_list=[out])
     out_ref = np.diagonal(self.x)
     for out in res:
         self.assertEqual(np.allclose(out, out_ref, rtol=1e-08), True)
Пример #5
0
    def log_det_by_cholesky(self, matrix):
        """
        Args:
            matrix: matrix must be a positive define matrix.
                    shape [N, C, D, D].
        """

        chol = paddle.cholesky(matrix)
        diag = paddle.diagonal(chol, offset=0, axis1=-2, axis2=-1)
        chol = paddle.log(diag + 1e-8)

        return 2.0 * paddle.sum(chol, axis=-1)
Пример #6
0
 def trace(self, A):
     tr = paddle.diagonal(A, axis1=-2, axis2=-1)
     tr = paddle.sum(tr, axis=-1)
     return tr