Esempio n. 1
0
    def testSmoothSVMth_functional(self):

        F = Topk_Smooth_SVM(self.labels, self.k, self.tau)
        res_th = F(V(self.x), V(self.y))
        res_py = svm_topk_smooth_py_1(V(self.x), V(self.y), self.tau, self.k)

        assert_all_close(res_th, res_py)
Esempio n. 2
0
    def testSmoothSVM(self):

        smooth_svm_th = SmoothTop1SVM(self.n_classes, tau=self.tau)
        res_th = smooth_svm_th(V(self.x), V(self.y))
        res_py = smooth_svm_py(V(self.x), V(self.y), self.tau)

        assert_all_close(res_th, res_py)
Esempio n. 3
0
    def testMaxSVM(self):

        max_svm_th = MaxTop1SVM(self.n_classes, alpha=self.alpha)
        res_th = max_svm_th(V(self.x), V(self.y))
        res_py = max_svm_py(V(self.x), V(self.y), alpha=self.alpha)

        assert_all_close(res_th, res_py)
Esempio n. 4
0
    def testMaxSVMtopk(self):

        max_svm_th = MaxTopkSVM(self.n_classes, k=self.k)
        res_th = max_svm_th(V(self.x), V(self.y))
        res_py = svm_topk_max_py(V(self.x), V(self.y), k=self.k)

        assert_all_close(res_th, res_py)
Esempio n. 5
0
 def testSmoothSVMth_loss_scales(self):
     svm_topk_smooth_th = SmoothTopkSVM(self.n_classes, tau=self.tau, k=self.k)
     for scale in (1e-4, 1e-3, 1e-2, 1e-1, 1e0, 1e1, 1e2, 1e3):
         x = self.x * scale
         res_th = svm_topk_smooth_th(V(x), V(self.y))
         res_py = svm_topk_smooth_py_1(V(x), V(self.y), self.tau, self.k).mean()
         assert_all_close(res_th, res_py)
Esempio n. 6
0
    def testMulTensors(self):

        sum_ = LogTensor(V(self.x)) * LogTensor(V(self.y))
        res_sb = sum_.torch()
        res_th = self.x.double() + self.y.double()

        assert_all_close(res_th, res_sb)
Esempio n. 7
0
    def testSumTensors(self):

        sum_ = LogTensor(V(self.x)) + LogTensor(V(self.y))
        res_sb = sum_.torch()
        res_th = torch.log(torch.exp(self.x.double()) +
                           torch.exp(self.y.double()))

        assert_all_close(res_th, res_sb)
Esempio n. 8
0
    def testSmoothSVMth_loss(self):
        for k in range(2, self.k + 1):
            svm_topk_smooth_th = SmoothTopkSVM(self.n_classes, tau=self.tau, k=k)
            res_th = svm_topk_smooth_th(V(self.x), V(self.y))
            res_py = svm_topk_smooth_py_1(V(self.x), V(self.y),
                                          self.tau, k).mean()

            assert_all_close(res_th, res_py)
Esempio n. 9
0
    def testLogSumProductExp(self):

        self.n_samples = 25
        self.n_classes = 20
        self.k = 7
        self.x = torch.randn(self.n_samples, self.n_classes)

        res_th = LogSumExp(self.k, p=1)(V(self.x)).squeeze()
        res1_th, res2_th = res_th[0], res_th[1]
        res1_py = np.log(sum_product_py(V(torch.exp(self.x)), self.k - 1))
        res2_py = np.log(sum_product_py(V(torch.exp(self.x)), self.k))

        assert_all_close(res1_th, res1_py)
        assert_all_close(res2_th, res2_py)
Esempio n. 10
0
    def test_backward(self):

        self.n_samples = 25
        self.n_classes = 1000
        self.k = 100
        self.k = 20
        self.x = torch.randn(self.n_samples, self.n_classes)
        self.x, _ = torch.sort(self.x, dim=1, descending=True)

        for tau in (5e-3, 1e-2, 5e-2, 1e-1, 5e-1, 1, 5, 1e1, 5e2, 1e3):
            x = self.x / (tau * self.k)
            top, _ = x.topk(self.k + 1, 1)
            thresh = 1e2
            hard = torch.ge(top[:, self.k - 1] - top[:, self.k],
                            math.log(thresh))
            smooth = hard.eq(0)

            x = x[smooth.unsqueeze(1).expand_as(x)].view(-1, x.size(1))
            if not x.size():
                print('empty tensor')
                return

            X_auto = Variable(x.double(), requires_grad=True)
            X_man = Variable(x, requires_grad=True)

            res1_auto, res2_auto = log_sum_exp_k_autograd(X_auto, self.k)
            res1_auto, res2_auto = res1_auto.squeeze(), res2_auto.squeeze()

            res_man = LogSumExp(self.k)(X_man).squeeze()
            res1_man = res_man[0]
            res2_man = res_man[1]

            proj1 = torch.randn(res1_auto.size()).fill_(1)
            proj2 = torch.randn(res2_auto.size()).fill_(1)

            proj_auto = torch.dot(V(proj1.double()), res1_auto) +\
                torch.dot(V(proj2.double()), res2_auto)
            proj_man = torch.dot(V(proj1), res1_man) +\
                torch.dot(V(proj2), res2_man)
            proj_auto.backward()
            proj_man.backward()

            # check forward
            assert_all_close(res1_auto, res1_man, atol=1e0, rtol=1e-3)
            assert_all_close(res2_auto, res2_man, atol=1e0, rtol=1e-3)

            # check backward
            assert_all_close(X_auto.grad, X_man.grad, atol=0.05, rtol=1e-2)
Esempio n. 11
0
    def testMulZero(self):

        sum_ = LogTensor(V(self.x)) * 0
        res_sb = sum_.torch()
        res_th = -np.inf * np.ones(res_sb.size())

        assert_all_close(res_th, res_sb)
Esempio n. 12
0
    def testMulNonZero(self):

        sum_ = LogTensor(V(self.x)) * self.nonzero_const
        res_sb = sum_.torch()
        res_th = self.x.double() + math.log(self.nonzero_const)

        assert_all_close(res_th, res_sb)
Esempio n. 13
0
    def testSumZero(self):

        sum_ = LogTensor(V(self.x)) + 0
        res_sb = sum_.torch()
        res_th = self.x

        assert_all_close(res_th, res_sb)
Esempio n. 14
0
    def testSumNonZero(self):

        sum_ = LogTensor(V(self.x)) + self.nonzero_const
        res_sb = sum_.torch()
        res_th = torch.log(torch.exp(self.x.double()) +
                           self.nonzero_const)

        assert_all_close(res_th, res_sb)
Esempio n. 15
0
 def testGradSmoothSVMth_loss(self):
     for k in range(2, self.k + 1):
         svm_topk_smooth_th = SmoothTopkSVM(self.n_classes, tau=self.tau, k=k)
         for scale in (1e-4, 1e-3, 1e-2, 1e-1, 1e0, 1e1, 1e2, 1e3, 1e4):
             x = self.x * scale
             x = Variable(x, requires_grad=True)
             assert gradcheck(lambda x: svm_topk_smooth_th(x, V(self.y)),
                             (x,), atol=1e-2, rtol=1e-3, eps=max(1e-4 * scale, 1e-2)), \
                 "failed with scale={}, k={}".format(scale, k)
Esempio n. 16
0
    def testSmoothSVMpy(self):

        res_py_1 = svm_topk_smooth_py_1(V(self.x), V(self.y), self.tau, self.k)
        res_py_2 = svm_topk_smooth_py_2(V(self.x), V(self.y), self.tau, self.k)

        assert_all_close(res_py_1, res_py_2)