Ejemplo n.º 1
0
 def test_cumsum_reverse(self, feats, boundaries):
     cumsum = spc_render.cumsum(feats, boundaries, reverse=True)
     expected = torch.tensor(
         [[2, 2], [1, 1], [6, 6], [5, 5], [3, 3], [5, 5]],
         device='cuda',
         dtype=torch.float)
     assert torch.equal(cumsum, expected)
Ejemplo n.º 2
0
 def test_cumsum_exclusive(self, feats, boundaries):
     cumsum = spc_render.cumsum(feats,
                                boundaries,
                                reverse=False,
                                exclusive=True)
     expected = torch.tensor(
         [[0, 0], [1, 1], [0, 0], [1, 1], [3, 3], [0, 0]],
         device='cuda',
         dtype=torch.float)
     assert torch.equal(cumsum, expected)
Ejemplo n.º 3
0
    def test_cumsum_big_backward(self, feats_big, boundaries_big):

        feats_big.requires_grad = True
        fdim = feats_big.shape[-1]

        if feats_big.grad is not None:
            feats_big.grad.detach_()
            feats_big.grad.zero_()
        cumsum = spc_render.cumsum(feats_big.reshape(-1, fdim), boundaries_big)
        loss = cumsum.sum()
        loss.backward()
        grad0 = feats_big.grad.clone()

        if feats_big.grad is not None:
            feats_big.grad.detach_()
            feats_big.grad.zero_()
        expected = torch.cumsum(feats_big, dim=1)
        loss = expected.sum()
        loss.backward()
        grad1 = feats_big.grad.clone()

        assert torch.allclose(grad0, grad1, atol=1e-4)
Ejemplo n.º 4
0
 def test_cumsum_big(self, feats_big, boundaries_big):
     fdim = feats_big.shape[-1]
     cumsum = spc_render.cumsum(feats_big.reshape(-1, fdim), boundaries_big)
     expected = torch.cumsum(feats_big, dim=1).reshape(-1, fdim)
     assert torch.allclose(cumsum, expected, atol=1e-5)