def test_padding_batch(self, padding, device, dtype): torch.manual_seed(42) batch_size = 2 inp = torch.tensor([[[ [0., 1., 2.], [3., 4., 5.], [6., 7., 8.] ]]], device=device, dtype=dtype).repeat(batch_size, 1, 3, 1, 1) expected = torch.tensor([[[ [[0., 1., 2., 10.], [3., 4., 5., 10.], [6., 7., 8., 10.]], [[0., 1., 2., 10.], [3., 4., 5., 10.], [6., 7., 8., 10.]], ]], [[ [[3., 4., 5., 10.], [6., 7., 8., 10.], [10, 10, 10, 10.]], [[3., 4., 5., 10.], [6., 7., 8., 10.], [10, 10, 10, 10.]], ]]], device=device, dtype=dtype) f = RandomCrop3D(size=(2, 3, 4), fill=10., padding=padding, align_corners=True, p=1.) out = f(inp) assert_allclose(out, expected, atol=1e-4, rtol=1e-4)
def test_no_padding(self, batch_size, device, dtype): torch.manual_seed(42) inp = torch.tensor([[[[ [0., 1., 2., 3., 4.], [5., 6., 7., 8., 9.], [10, 11, 12, 13, 14], [15, 16, 17, 18, 19], [20, 21, 22, 23, 24] ]]]], device=device, dtype=dtype).repeat(batch_size, 1, 5, 1, 1) f = RandomCrop3D(size=(2, 3, 4), padding=None, align_corners=True, p=1.) out = f(inp) if batch_size == 1: expected = torch.tensor([[[[ [11, 12, 13, 14], [16, 17, 18, 19], [21, 22, 23, 24] ]]]], device=device, dtype=dtype).repeat(batch_size, 1, 2, 1, 1) if batch_size == 2: expected = torch.tensor([ [[[[6.0000, 7.0000, 8.0000, 9.0000], [11.0000, 12.0000, 13.0000, 14.0000], [16.0000, 17.0000, 18.0000, 19.0000]], [[6.0000, 7.0000, 8.0000, 9.0000], [11.0000, 12.0000, 13.0000, 14.0000], [16.0000, 17.0000, 18.0000, 19.0000]]]], [[[[11.0000, 12.0000, 13.0000, 14.0000], [16.0000, 17.0000, 18.0000, 19.0000], [21.0000, 22.0000, 23.0000, 24.0000]], [[11.0000, 12.0000, 13.0000, 14.0000], [16.0000, 17.0000, 18.0000, 19.0000], [21.0000, 22.0000, 23.0000, 24.0000]]]]], device=device, dtype=dtype) assert_allclose(out, expected, atol=1e-4, rtol=1e-4)
def test_smoke(self): f = RandomCrop3D(size=(2, 3, 4), padding=(0, 1, 2), fill=10, pad_if_needed=False, p=1.0) repr = ( "RandomCrop3D(crop_size=(2, 3, 4), padding=(0, 1, 2), fill=10, pad_if_needed=False, " "padding_mode=constant, resample=BILINEAR, p=1.0, p_batch=1.0, same_on_batch=False, " "return_transform=False)" ) assert str(f) == repr
def test_same_on_batch(self, device, dtype): f = RandomCrop3D(size=(2, 3, 4), padding=None, align_corners=True, p=1.0, same_on_batch=True) input = torch.eye(6).unsqueeze(dim=0).unsqueeze(dim=0).unsqueeze( dim=0).repeat(2, 3, 5, 1, 1) res = f(input) assert (res[0] == res[1]).all()
def test_pad_if_needed(self, device, dtype): torch.manual_seed(42) inp = torch.tensor([[ [0., 1., 2.], ]], device=device, dtype=dtype) expected = torch.tensor([[[ [[9., 9., 9., 9.], [9., 9., 9., 9.], [9., 9., 9., 9.]], [[0., 1., 2., 9.], [9., 9., 9., 9.], [9., 9., 9., 9.]], ]]], device=device, dtype=dtype) rc = RandomCrop3D(size=(2, 3, 4), pad_if_needed=True, fill=9, align_corners=True, p=1.) out = rc(inp) assert_allclose(out, expected, atol=1e-4, rtol=1e-4)
def test_gradcheck(self, device, dtype): torch.manual_seed(0) # for random reproductibility inp = torch.rand((3, 3, 3), device=device, dtype=dtype) # 3 x 3 inp = utils.tensor_to_gradcheck_var(inp) # to var assert gradcheck(RandomCrop3D(size=(3, 3, 3), p=1.), (inp, ), raise_exception=True)