def test_cumprod_reverse(self, feats, boundaries): cumprod = spc_render.cumprod(feats, boundaries, reverse=True) expected = torch.tensor( [[1, 1], [1, 1], [6, 6], [6, 6], [3, 3], [5, 5]], device='cuda', dtype=torch.float) assert torch.equal(cumprod, expected)
def test_cumprod_exclusive(self, feats, boundaries): cumprod = spc_render.cumprod(feats, boundaries, reverse=False, exclusive=True) expected = torch.tensor( [[1, 1], [1, 1], [1, 1], [1, 1], [2, 2], [1, 1]], device='cuda', dtype=torch.float) assert torch.equal(cumprod, expected)
def test_cumprod_big_backward(self, feats_big, boundaries_big): feats_big += 1e-3 feats_big.requires_grad = True fdim = feats_big.shape[-1] if feats_big.grad is not None: feats_big.grad.detach_() feats_big.grad.zero_() cumprod = spc_render.cumprod(feats_big.reshape(-1, fdim), boundaries_big) loss = cumprod.sum() loss.backward() grad0 = feats_big.grad.clone() if feats_big.grad is not None: feats_big.grad.detach_() feats_big.grad.zero_() expected = torch.cumprod(feats_big, dim=1) loss = expected.sum() loss.backward() grad1 = feats_big.grad.clone() assert torch.allclose(grad0, grad1, atol=1e-2)
def test_cumprod_big(self, feats_big, boundaries_big): fdim = feats_big.shape[-1] cumprod = spc_render.cumprod(feats_big.reshape(-1, fdim), boundaries_big) expected = torch.cumprod(feats_big, dim=1).reshape(-1, fdim) assert torch.allclose(cumprod, expected, atol=1e-4)