Example #1
0
    def test_normalized_mean_filter(self, device, dtype):
        kernel = torch.ones(1, 3, 3, 3, device=device, dtype=dtype)
        input = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
        ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 5., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ]]]],
                             device=device,
                             dtype=dtype)
        input = input.expand(2, 2, -1, -1, -1)

        nv = 5. / 27  # normalization value
        expected = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., nv, nv, nv, 0.],
            [0., nv, nv, nv, 0.],
            [0., nv, nv, nv, 0.],
            [0., 0., 0., 0., 0.],
        ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., 0., 0., 0., 0.],
                                   ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., 0., 0., 0., 0.],
                                   ]]]],
                                device=device,
                                dtype=dtype)
        expected = expected.expand(2, 2, -1, -1, -1)

        actual = kornia.filter3D(input, kernel, normalized=True)

        tol_val: float = utils._get_precision_by_name(device, 'xla', 1e-1,
                                                      1e-4)
        assert_allclose(actual, expected, rtol=tol_val, atol=tol_val)
Example #2
0
    def test_noncontiguous(self, device, dtype):
        batch_size = 3
        inp = torch.rand(3, 5, 5, 5, device=device,
                         dtype=dtype).expand(batch_size, -1, -1, -1, -1)
        kernel = torch.ones(1, 2, 2, 2, device=device, dtype=dtype)

        actual = kornia.filter3D(inp, kernel)
        expected = actual
        assert_allclose(actual, expected)
Example #3
0
    def test_mean_filter_2batch_2ch(self, device, dtype):
        kernel = torch.ones(1, 3, 3, 3, device=device, dtype=dtype)
        input = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
        ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 5., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ]]]],
                             device=device,
                             dtype=dtype)
        input = input.expand(2, 2, -1, -1, -1)

        expected = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., 5., 5., 5., 0.],
            [0., 5., 5., 5., 0.],
            [0., 5., 5., 5., 0.],
            [0., 0., 0., 0., 0.],
        ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., 5., 5., 5., 0.],
                                       [0., 5., 5., 5., 0.],
                                       [0., 5., 5., 5., 0.],
                                       [0., 0., 0., 0., 0.],
                                   ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., 5., 5., 5., 0.],
                                       [0., 5., 5., 5., 0.],
                                       [0., 5., 5., 5., 0.],
                                       [0., 0., 0., 0., 0.],
                                   ]]]],
                                device=device,
                                dtype=dtype)
        expected = expected.expand(2, 2, -1, -1, -1)

        actual = kornia.filter3D(input, kernel)
        assert_allclose(actual, expected)
Example #4
0
    def test_normalized_mean_filter(self, device, dtype):
        kernel = torch.ones(1, 3, 3, 3, device=device, dtype=dtype)
        input = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
        ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 5., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ]]]],
                             device=device,
                             dtype=dtype)
        input = input.expand(2, 2, -1, -1, -1)

        nv = 5. / 27  # normalization value
        expected = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., nv, nv, nv, 0.],
            [0., nv, nv, nv, 0.],
            [0., nv, nv, nv, 0.],
            [0., 0., 0., 0., 0.],
        ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., 0., 0., 0., 0.],
                                   ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., nv, nv, nv, 0.],
                                       [0., 0., 0., 0., 0.],
                                   ]]]],
                                device=device,
                                dtype=dtype)

        actual = kornia.filter3D(input, kernel, normalized=True)
        assert_allclose(actual, expected)
Example #5
0
    def test_even_sized_filter(self, device, dtype):
        kernel = torch.ones(1, 2, 2, 2, device=device, dtype=dtype)
        input = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
        ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 5., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ],
                                [
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                    [0., 0., 0., 0., 0.],
                                ]]]],
                             device=device,
                             dtype=dtype)

        expected = torch.tensor([[[[
            [0., 0., 0., 0., 0.],
            [0., 5., 5., 0., 0.],
            [0., 5., 5., 0., 0.],
            [0., 0., 0., 0., 0.],
            [0., 0., 0., 0., 0.],
        ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., 5., 5., 0., 0.],
                                       [0., 5., 5., 0., 0.],
                                       [0., 0., 0., 0., 0.],
                                       [0., 0., 0., 0., 0.],
                                   ],
                                   [
                                       [0., 0., 0., 0., 0.],
                                       [0., 0., 0., 0., 0.],
                                       [0., 0., 0., 0., 0.],
                                       [0., 0., 0., 0., 0.],
                                       [0., 0., 0., 0., 0.],
                                   ]]]],
                                device=device,
                                dtype=dtype)

        actual = kornia.filter3D(input, kernel)
        assert_allclose(actual, expected)
Example #6
0
def motion_blur3d(input: torch.Tensor,
                  kernel_size: int,
                  angle: Union[Tuple[float, float, float], torch.Tensor],
                  direction: Union[float, torch.Tensor],
                  border_type: str = 'constant',
                  mode: str = 'nearest') -> torch.Tensor:
    r"""Perform motion blur on 3D volumes (5D tensor).

    Args:
        input (torch.Tensor): the input tensor with shape :math:`(B, C, D, H, W)`.
        kernel_size (int): motion kernel width, height and depth. It should be odd and positive.
        angle (torch.Tensor or tuple): Range of yaw (x-axis), pitch (y-axis), roll (z-axis) to select from.
            If tensor, it must be :math:`(B, 3)`.
        direction (tensor or float): forward/backward direction of the motion blur.
            Lower values towards -1.0 will point the motion blur towards the back (with angle provided via angle),
            while higher values towards 1.0 will point the motion blur forward. A value of 0.0 leads to a
            uniformly (but still angled) motion blur.
            If tensor, it must be :math:`(B,)`.
        border_type (str): the padding mode to be applied before convolving. The expected modes are:
            ``'constant'``, ``'reflect'``, ``'replicate'`` or ``'circular'``. Default: ``'constant'``.
        mode (str): interpolation mode for rotating the kernel. ``'bilinear'`` or ``'nearest'``.
            Default: ``'nearest'``

    Return:
        torch.Tensor: the blurred image with shape :math:`(B, C, D, H, W)`.

    Example:
        >>> input = torch.randn(1, 3, 120, 80, 90).repeat(2, 1, 1, 1, 1)
        >>> # perform exact motion blur across the batch
        >>> out_1 = motion_blur3d(input, 5, (0., 90., 90.), 1)
        >>> torch.allclose(out_1[0], out_1[1])
        True
        >>> # perform element-wise motion blur across the batch
        >>> out_1 = motion_blur3d(input, 5, torch.tensor([[0., 90., 90.], [90., 180., 0.]]), torch.tensor([1., -1.]))
        >>> torch.allclose(out_1[0], out_1[1])
        False
    """
    assert border_type in ["constant", "reflect", "replicate", "circular"]
    kernel: torch.Tensor = get_motion_kernel3d(kernel_size, angle, direction,
                                               mode)
    return kornia.filter3D(input, kernel, border_type)
Example #7
0
 def test_batch(self, batch_size, device, dtype):
     B: int = batch_size
     kernel = torch.rand(1, 3, 3, 3, device=device, dtype=dtype)
     input = torch.ones(B, 3, 6, 7, 8, device=device, dtype=dtype)
     assert kornia.filter3D(input, kernel).shape == input.shape
Example #8
0
 def test_smoke(self, device, dtype):
     kernel = torch.rand(1, 3, 3, 3).to(device)
     input = torch.ones(1, 1, 6, 7, 8).to(device)
     assert kornia.filter3D(input, kernel).shape == input.shape