def test_voxelgrids_dim(self, device, dtype): # The dimension of voxelgrids should be 4 (batched). with pytest.raises(ValueError, match="Expected voxelgrids to have 4 dimensions " "but got 3 dimensions."): voxelgrids = torch.ones([6, 6, 6], device=device, dtype=dtype) vg.downsample(voxelgrids, [2, 2, 2])
def test_scale_val_2(self, device, dtype): # Every element in the scale should be greater or equal to one. with pytest.raises(ValueError, match="Downsample ratio must be at least 1 " "along every dimension but got -1 at " "index 0."): voxelgrids = torch.ones([2, 6, 6, 6], device=device, dtype=dtype) vg.downsample(voxelgrids, [-1, 3, 2])
def test_scale_val_1(self, device, dtype): # The scale should be smaller or equal to the size of the input. with pytest.raises( ValueError, match="Downsample ratio must be less than voxelgrids " "shape of 6 at index 2, but got 7."): voxelgrids = torch.ones([2, 6, 6, 6], device=device, dtype=dtype) vg.downsample(voxelgrids, [1, 2, 7])
def test_scale_dim(self, device, dtype): # The dimension of scale should be 3 if it is a list. with pytest.raises(ValueError, match="Expected scale to have 3 dimensions " "but got 2 dimensions."): voxelgrids = torch.ones([2, 6, 6, 6], device=device, dtype=dtype) vg.downsample(voxelgrids, [2, 2]) with pytest.raises(TypeError, match="Expected scale to be type list or int " "but got <class 'str'>."): voxelgrids = torch.ones([2, 6, 6, 6], device=device, dtype=dtype) vg.downsample(voxelgrids, "2")
def test_bool_input(self, device, dtype): if dtype != torch.bool: pytest.skip("This test is only for torch.bool.") voxelgrids = torch.ones((2, 4, 4, 4), device=device, dtype=dtype) voxelgrids[:, :, 1, :] = 0 voxelgrids[:, :, 3, :] = 0 output = vg.downsample(voxelgrids, 2) expected_dtype = torch.half if device == "cuda" else torch.float expected = torch.ones( (2, 2, 2, 2), device=device, dtype=expected_dtype) * 0.5 assert torch.equal(output, expected)
def test_output_batch(self, device, dtype): if dtype == torch.bool: pytest.skip("This test won't work for torch.bool.") # The size of the batched input shoud be correct. # For example, if the input size is [2, 6, 6, 6], # Scale is [3, 3, 3], the output size should be [2, 2, 2, 2] # Also, test the function is numerically correct voxelgrid1 = torch.ones([4, 4, 4], device=device, dtype=dtype) voxelgrid2 = torch.ones((4, 4, 4), device=device, dtype=dtype) voxelgrid2[1, :2] = 0.8 voxelgrid2[1, 2:] = 0.4 voxelgrid2[3] = 0 batched_voxelgrids = torch.stack((voxelgrid1, voxelgrid2)) output = vg.downsample(batched_voxelgrids, [2, 2, 2]) expected1 = torch.ones((2, 2, 2), device=device, dtype=dtype) expected2 = torch.tensor( [[[0.9, 0.9], [0.7, 0.7]], [[0.5000, 0.5000], [0.5000, 0.5000]]], device=device, dtype=dtype) expected = torch.stack((expected1, expected2)) assert torch.allclose(output, expected)
def test_output_size(self, device, dtype): # The size of the output should be input.shape / scale voxelgrids = torch.ones([3, 6, 6, 6], device=device, dtype=dtype) output = vg.downsample(voxelgrids, [1, 2, 3]) assert (output.shape == torch.Size([3, 6, 3, 2]))