Example #1
0
 def test_jit(self, device, dtype):
     x = torch.rand(1, 3, 4, 4, device=device, dtype=dtype)
     op = _torch_inverse_cast
     op_jit = torch.jit.script(op)
     assert_allclose(op(x), op_jit(x))
Example #2
0
 def test_module_bgra2bgr(self, device, dtype):
     B, C, H, W = 2, 4, 4, 4
     img = torch.ones(B, C, H, W, device=device, dtype=dtype)
     ops = kornia.color.RgbaToBgr().to(device, dtype)
     fcn = kornia.color.rgba_to_bgr
     assert_allclose(ops(img), fcn(img))
Example #3
0
 def test_back_and_forth(self, device, dtype):
     data_bgr = torch.rand(1, 3, 3, 2, device=device, dtype=dtype)
     data_rgb = kornia.color.bgr_to_rgb(data_bgr)
     data_bgr_new = kornia.color.rgb_to_bgr(data_rgb)
     assert_allclose(data_bgr, data_bgr_new)
 def test_same_on_batch(self, device, dtype):
     torch.manual_seed(42)
     degrees = torch.tensor([10, 20], device=device, dtype=dtype)
     translate = torch.tensor([0.1, 0.1], device=device, dtype=dtype)
     scale = torch.tensor([0.7, 1.2], device=device, dtype=dtype)
     shear = torch.tensor([[10, 20], [10, 20]], device=device, dtype=dtype)
     res = random_affine_generator(
         batch_size=2, height=200, width=200, degrees=degrees,
         translate=translate, scale=scale, shear=shear, same_on_batch=True)
     expected = dict(
         translations=torch.tensor([[-4.6854, 18.3722], [-4.6854, 18.3722]], device=device, dtype=dtype),
         center=torch.tensor([[99.5000, 99.5000], [99.5000, 99.5000]], device=device, dtype=dtype),
         scale=torch.tensor([[1.1575, 1.1575], [1.1575, 1.1575]], device=device, dtype=dtype),
         angle=torch.tensor([18.8227, 18.8227], device=device, dtype=dtype),
         sx=torch.tensor([13.9045, 13.9045], device=device, dtype=dtype),
         sy=torch.tensor([16.0090, 16.0090], device=device, dtype=dtype)
     )
     assert res.keys() == expected.keys()
     assert_allclose(res['translations'], expected['translations'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['center'], expected['center'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['scale'], expected['scale'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['angle'], expected['angle'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['sx'], expected['sx'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['sy'], expected['sy'], rtol=1e-4, atol=1e-4)
Example #5
0
 def test_back_and_forth_bgr(self, device, dtype):
     a_val: float = 1.0
     x_bgr = torch.ones(3, 4, 4, device=device, dtype=dtype)
     x_rgba = kornia.color.bgr_to_rgba(x_bgr, a_val)
     x_bgr_new = kornia.color.rgba_to_bgr(x_rgba)
     assert_allclose(x_bgr, x_bgr_new)
def test_precompute_C():
    x, la, y, lb, x_sampler, y_sampler = make_data('gmm_1d', 100)
    F, G = sinkhorn(x=x, la=la, y=y, lb=lb, n_iter=10, precompute_C=False)
    Fp, Gp = sinkhorn(x=x, la=la, y=y, lb=lb, n_iter=10, precompute_C=True)
    assert_allclose(Fp(x), F(x))
    assert_allclose(Gp(y), G(y))
Example #7
0
    def test_jit(self, device):

        data = torch.rand((2, 3, 4, 5)).to(device)
        luv = kornia.color.rgb_to_luv
        luv_jit = torch.jit.script(kornia.color.rgb_to_luv)
        assert_allclose(luv_jit(data), luv(data))
Example #8
0
 def test_module(self, device, dtype):
     B, C, H, W = 2, 3, 4, 4
     img = torch.ones(B, C, H, W, device=device, dtype=dtype)
     ops = kornia.color.YcbcrToRgb().to(device, dtype)
     fcn = kornia.color.ycbcr_to_rgb
     assert_allclose(ops(img), fcn(img))
def test_torch_stft(
    n_fft_next_pow,
    hop_ratio,
    win_length,
    window,
    center,
    pad_mode,
    normalized,
    sample_rate,
    pass_length,
    wav_shape,
):
    # Accept 0.1 less tolerance for larger windows.
    RTOL = 1e-3 if win_length > 256 else 1e-4
    ATOL = 1e-4 if win_length > 256 else 1e-5
    wav = torch.randn(wav_shape, dtype=torch.float32)
    output_len = wav.shape[-1] if pass_length else None
    n_fft = win_length if not n_fft_next_pow else next_power_of_2(win_length)
    hop_length = win_length // hop_ratio

    window = None if window is None else get_window(
        window, win_length, fftbins=True)
    if window is not None:
        # Cannot restore the signal without overlap and near to zero window.
        if hop_ratio == 1 and (window**2 < 1e-11).any():
            pass

    fb = TorchSTFTFB.from_torch_args(
        n_fft=n_fft,
        hop_length=hop_length,
        win_length=win_length,
        window=window,
        center=center,
        pad_mode=pad_mode,
        normalized=normalized,
        onesided=True,
        sample_rate=sample_rate,
    )

    stft = Encoder(fb)
    istft = Decoder(fb)

    spec = torch.stft(
        wav,
        n_fft=n_fft,
        hop_length=hop_length,
        win_length=win_length,
        window=fb.torch_window,
        center=center,
        pad_mode=pad_mode,
        normalized=normalized,
        onesided=True,
    )

    spec_asteroid = stft(wav)
    torch_spec = to_asteroid(spec.float())
    assert_allclose(spec_asteroid, torch_spec, rtol=RTOL, atol=ATOL)

    try:
        wav_back = torch.istft(
            spec,
            n_fft=n_fft,
            hop_length=hop_length,
            win_length=win_length,
            window=fb.torch_window,
            center=center,
            normalized=normalized,
            onesided=True,
            length=output_len,
        )
    except RuntimeError:
        # If there was a RuntimeError, the OLA had zeros. So we cannot unit test
        # But we can make sure that istft raises a warning about it.
        with pytest.warns(RuntimeWarning):
            _ = istft(spec_asteroid, length=output_len)
    else:
        # If there was no RuntimeError, we unit-test against the results.
        wav_back_asteroid = istft(spec_asteroid, length=output_len)
        # Asteroid always returns a longer signal.
        assert wav_back_asteroid.shape[-1] >= wav_back.shape[-1]
        # The unit test is done on the left part of the signal.
        assert_allclose(wav_back_asteroid[:wav_back.shape[-1]],
                        wav_back.float(),
                        rtol=RTOL,
                        atol=ATOL)
Example #10
0
 def test_even(self, device, dtype):
     kernel = _get_center_kernel2d(2, 2, device=device).to(dtype=dtype)
     expected = torch.ones(2, 2, 2, 2, device=device, dtype=dtype) * 0.25
     expected[0, 1] = 0
     expected[1, 0] = 0
     assert_allclose(kernel, expected, atol=1e-4, rtol=1e-4)
Example #11
0
    def test_diag(self, device, dtype):
        input = torch.tensor([[
            [[0., 0., 0., 0, 0],
             [0., 0., 0.0, 0, 0.],
             [0., 0, 0., 0, 0.],
             [0., 0., 0, 0, 0.],
             [0., 0., 0., 0, 0.]],

            [[0., 0., 0., 0, 0],
             [0., 0., 1, 0, 0.],
             [0., 1, 1.2, 1.1, 0.],
             [0., 0., 1., 0, 0.],
             [0., 0., 0., 0, 0.]],

            [[0., 0., 0., 0, 0],
             [0., 0., 0.0, 0, 0.],
             [0., 0, 0., 0, 0.],
             [0., 0., 0, 0, 0.],
             [0., 0., 0., 0, 0.],
             ]]], device=device, dtype=dtype)
        input = kornia.gaussian_blur2d(input, (5, 5), (0.5, 0.5)).unsqueeze(0)
        softargmax = kornia.geometry.ConvQuadInterp3d(10)
        expected_val = torch.tensor([[[
            [[0., 0., 0., 0, 0],
             [0., 0., 0.0, 0, 0.],
             [0., 0, 0., 0, 0.],
             [0., 0., 0, 0, 0.],
             [0., 0., 0., 0, 0.]],
            [[2.2504e-04, 2.3146e-02, 1.6808e-01, 2.3188e-02, 2.3628e-04],
             [2.3146e-02, 1.8118e-01, 7.4338e-01, 1.8955e-01, 2.5413e-02],
             [1.6807e-01, 7.4227e-01, 1.1086e+01, 8.0414e-01, 1.8482e-01],
             [2.3146e-02, 1.8118e-01, 7.4338e-01, 1.8955e-01, 2.5413e-02],
             [2.2504e-04, 2.3146e-02, 1.6808e-01, 2.3188e-02, 2.3628e-04]],
            [[0., 0., 0., 0, 0],
             [0., 0., 0.0, 0, 0.],
             [0., 0, 0., 0, 0.],
             [0., 0., 0, 0, 0.],
             [0., 0., 0., 0, 0.]]]]], device=device, dtype=dtype)
        expected_coord = torch.tensor([[[[[[0.0, 0.0, 0.0, 0.0, 0.0],
                                           [0.0, 0.0, 0.0, 0.0, 0.0],
                                           [0.0, 0.0, 0.0, 0.0, 0.0],
                                           [0.0, 0.0, 0.0, 0.0, 0.0],
                                           [0.0, 0.0, 0.0, 0.0, 0.0]],

                                          [[1.0, 1.0, 1.0, 1.0, 1.0],
                                           [1.0, 1.0, 1.0, 1.0, 1.0],
                                           [1.0, 1.0, 1.0, 1.0, 1.0],
                                           [1.0, 1.0, 1.0, 1.0, 1.0],
                                           [1.0, 1.0, 1.0, 1.0, 1.0]],

                                          [[2.0, 2.0, 2.0, 2.0, 2.0],
                                           [2.0, 2.0, 2.0, 2.0, 2.0],
                                           [2.0, 2.0, 2.0, 2.0, 2.0],
                                           [2.0, 2.0, 2.0, 2.0, 2.0],
                                           [2.0, 2.0, 2.0, 2.0, 2.0]]],


                                         [[[0.0, 1.0, 2.0, 3.0, 4.0],
                                           [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0]],

                                          [[0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0]],

                                          [[0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0],
                                             [0.0, 1.0, 2.0, 3.0, 4.0]]],


                                         [[[0.0, 0.0, 0.0, 0.0, 0.0],
                                           [1.0, 1.0, 1.0, 1.0, 1.0],
                                             [2.0, 2.0, 2.0, 2.0, 2.0],
                                             [3.0, 3.0, 3.0, 3.0, 3.0],
                                             [4.0, 4.0, 4.0, 4.0, 4.0]],

                                          [[0.0, 0.0, 0.0, 0.0, 0.0],
                                             [1.0, 1.0, 1.0, 1.0, 1.0],
                                             [2.0, 2.0, 2.0495, 2.0, 2.0],
                                             [3.0, 3.0, 3.0, 3.0, 3.0],
                                             [4.0, 4.0, 4.0, 4.0, 4.0]],

                                          [[0.0, 0.0, 0.0, 0.0, 0.0],
                                             [1.0, 1.0, 1.0, 1.0, 1.0],
                                             [2.0, 2.0, 2.0, 2.0, 2.0],
                                             [3.0, 3.0, 3.0, 3.0, 3.0],
                                             [4.0, 4.0, 4.0, 4.0, 4.0]]]]]], device=device, dtype=dtype)
        coords, val = softargmax(input)
        assert_allclose(val, expected_val, atol=1e-4, rtol=1e-4)
        assert_allclose(coords, expected_coord, atol=1e-4, rtol=1e-4)
Example #12
0
 def test_jit_trace(self, device, dtype):
     input = torch.rand((2, 3, 7, 7), dtype=dtype, device=device)
     op = kornia.spatial_soft_argmax2d
     op_jit = torch.jit.trace(op, (input,))
     assert_allclose(op(input), op_jit(input), rtol=0, atol=1e-5)
Example #13
0
    def test_batch2_n2(self, device, dtype):
        input = torch.zeros(2, 2, 2, 3, device=device, dtype=dtype)
        input[0, 0, 0, 0] = 1e16  # top-left
        input[0, 1, 0, -1] = 1e16  # top-right
        input[1, 0, -1, 0] = 1e16  # bottom-left
        input[1, 1, -1, -1] = 1e16  # bottom-right

        coord = kornia.spatial_soft_argmax2d(input)
        assert_allclose(coord[0, 0, 0].item(), -1.0, atol=1e-4, rtol=1e-4)  # top-left
        assert_allclose(coord[0, 0, 1].item(), -1.0, atol=1e-4, rtol=1e-4)
        assert_allclose(coord[0, 1, 0].item(), 1.0, atol=1e-4, rtol=1e-4)  # top-right
        assert_allclose(coord[0, 1, 1].item(), -1.0, atol=1e-4, rtol=1e-4)
        assert_allclose(coord[1, 0, 0].item(), -1.0, atol=1e-4, rtol=1e-4)  # bottom-left
        assert_allclose(coord[1, 0, 1].item(), 1.0, atol=1e-4, rtol=1e-4)
        assert_allclose(coord[1, 1, 0].item(), 1.0, atol=1e-4, rtol=1e-4)  # bottom-right
        assert_allclose(coord[1, 1, 1].item(), 1.0, atol=1e-4, rtol=1e-4)
Example #14
0
    def test_smoke(self, device, dtype):
        a = torch.randn(5, 3, 3, device=device, dtype=dtype)
        u, s, v = _torch_svd_cast(a)

        tol_val: float = 1e-1 if dtype == torch.float16 else 1e-3
        assert_allclose(a, u @ torch.diag_embed(s) @ v.transpose(-2, -1), atol=tol_val, rtol=tol_val)
Example #15
0
    def test_pinhole_camera_scale_inplace(self, device, dtype):
        batch_size = 2
        height, width = 4, 6
        fx, fy, cx, cy = 1, 2, width / 2, height / 2
        tx, ty, tz = 1, 2, 3
        scale_val = 2.0

        intrinsics = self._create_intrinsics(batch_size, fx, fy, cx, cy, device=device, dtype=dtype)
        extrinsics = self._create_extrinsics(batch_size, tx, ty, tz, device=device, dtype=dtype)
        height = torch.ones(batch_size, device=device, dtype=dtype) * height
        width = torch.ones(batch_size, device=device, dtype=dtype) * width
        scale_factor = torch.ones(batch_size, device=device, dtype=dtype) * scale_val

        pinhole = kornia.PinholeCamera(intrinsics, extrinsics, height, width)
        pinhole_scale = pinhole.clone()
        pinhole_scale.scale_(scale_factor)

        assert_allclose(
            pinhole_scale.intrinsics[..., 0, 0], pinhole.intrinsics[..., 0, 0] * scale_val, atol=1e-4, rtol=1e-4
        )  # fx
        assert_allclose(
            pinhole_scale.intrinsics[..., 1, 1], pinhole.intrinsics[..., 1, 1] * scale_val, atol=1e-4, rtol=1e-4
        )  # fy
        assert_allclose(
            pinhole_scale.intrinsics[..., 0, 2], pinhole.intrinsics[..., 0, 2] * scale_val, atol=1e-4, rtol=1e-4
        )  # cx
        assert_allclose(
            pinhole_scale.intrinsics[..., 1, 2], pinhole.intrinsics[..., 1, 2] * scale_val, atol=1e-4, rtol=1e-4
        )  # cy
        assert_allclose(pinhole_scale.height, pinhole.height * scale_val, atol=1e-4, rtol=1e-4)
        assert_allclose(pinhole_scale.width, pinhole.width * scale_val, atol=1e-4, rtol=1e-4)
Example #16
0
    def test_get_metric_on_new_object_works(self, device: str):
        recall = UnigramRecall()

        actual_recall = recall.get_metric(reset=True)
        assert_allclose(actual_recall, 0)
    def test_fbeta_multilabel_with_mask(self, device: str):
        self.predictions = self.predictions.to(device)
        self.targets = self.targets.to(device)

        mask = torch.tensor([True, True, True, True, True, False], device=device).unsqueeze(-1)

        fbeta = FBetaMultiLabelMeasure()
        fbeta(self.predictions, self.targets, mask)
        metric = fbeta.get_metric()
        precisions = metric["precision"]
        recalls = metric["recall"]
        fscores = metric["fscore"]

        assert_allclose(fbeta._pred_sum.tolist(), [3, 3, 3, 4, 1])
        assert_allclose(fbeta._true_sum.tolist(), [4, 5, 2, 4, 0])
        assert_allclose(fbeta._true_positive_sum.tolist(), [3, 3, 2, 4, 0])

        desired_precisions = [3 / 3, 3 / 3, 2 / 3, 4 / 4, 0 / 1]
        desired_recalls = [3 / 4, 3 / 5, 2 / 2, 4 / 4, 0.00]
        desired_fscores = [
            (2 * p * r) / (p + r) if p + r != 0.0 else 0.0
            for p, r in zip(desired_precisions, desired_recalls)
        ]
        assert_allclose(precisions, desired_precisions)
        assert_allclose(recalls, desired_recalls)
        assert_allclose(fscores, desired_fscores)
Example #18
0
 def test_module(self, device, dtype):
     B, C, H, W = 2, 3, 4, 4
     img = torch.ones(B, C, H, W, device=device, dtype=dtype)
     ops = kornia.color.RgbToLab().to(device, dtype)
     fcn = kornia.color.rgb_to_lab
     assert_allclose(ops(img), fcn(img))
Example #19
0
    def test_jit(self, device):

        data = torch.rand((2, 3, 4, 5)).to(device)
        rgb = kornia.color.luv_to_rgb
        rgb_jit = torch.jit.script(kornia.color.luv_to_rgb)
        assert_allclose(rgb_jit(data), rgb(data))
Example #20
0
    def test_unit(self, device, dtype):
        data = torch.tensor(
            [[
                [
                    [
                        50.21928787, 23.29810143, 14.98279190, 62.50927353,
                        72.78904724
                    ],
                    [
                        70.86846924, 68.75330353, 52.81696701, 76.17090607,
                        88.63134003
                    ],
                    [
                        46.87160873, 72.38699341, 37.71450806, 82.57386780,
                        74.79967499
                    ],
                    [
                        77.33016968, 47.39180374, 61.76217651, 90.83254242,
                        86.96239471
                    ],
                ],
                [
                    [
                        65.81327057, -3.69859719, 0.16971001, 14.86583614,
                        -65.54960632
                    ],
                    [
                        -41.03258133, -19.52661896, 64.16155243, -58.53935242,
                        -71.78411102
                    ],
                    [
                        112.05227661, -60.13330460, 43.07910538, -51.01456833,
                        -58.25787354
                    ],
                    [
                        -62.37575531, 50.88882065, -39.27450943, 17.00958824,
                        -24.93779755
                    ],
                ],
                [
                    [
                        -69.53346252, -73.34986877, -11.47461891, 66.73863220,
                        70.43983459
                    ],
                    [
                        51.92737579, 58.77009583, 45.97863388, 24.44452858,
                        98.81991577
                    ],
                    [
                        -7.60597992, 78.97976685, -69.31867218, 67.33953857,
                        14.28889370
                    ],
                    [
                        92.31149292, -85.91405487, -32.83668518, -23.45091820,
                        69.99038696
                    ],
                ],
            ]],
            device=device,
            dtype=dtype,
        )

        # Reference output generated using skimage: lab2rgb(data)
        expected = torch.tensor(
            [[
                [
                    [0.63513142, 0.0, 0.10660624, 0.79048697, 0.26823414],
                    [
                        0.48903025, 0.64529494, 0.91140099, 0.15877841,
                        0.45987959
                    ],
                    [1.0, 0.36069696, 0.29236125, 0.55744393, 0.0],
                    [0.41710863, 0.3198324, 0.0, 0.94256868, 0.82748892],
                ],
                [
                    [
                        0.28210726, 0.26080003, 0.15027717, 0.54540429,
                        0.80323837
                    ],
                    [0.748392, 0.68774842, 0.24204415, 0.83695682, 0.9902132],
                    [0.0, 0.79101603, 0.26633725, 0.89223337, 0.82301254],
                    [
                        0.84857086, 0.34455393, 0.66555314, 0.86168397,
                        0.8948667
                    ],
                ],
                [
                    [
                        0.94172458, 0.66390044, 0.21043296, 0.02453515,
                        0.04169043
                    ],
                    [0.28233233, 0.20235374, 0.19803933, 0.55069441, 0.0],
                    [0.50205101, 0.0, 0.79745394, 0.25376936, 0.6114783],
                    [0.0, 1.0, 0.80867314, 1.0, 0.28778443],
                ],
            ]],
            device=device,
            dtype=dtype,
        )

        expected_unclipped = torch.tensor(
            [[
                [
                    [
                        0.63513142, -1.78708635, 0.10660624, 0.79048697,
                        0.26823414
                    ],
                    [
                        0.48903025, 0.64529494, 0.91140099, 0.15877841,
                        0.45987959
                    ],
                    [
                        1.01488435, 0.36069696, 0.29236125, 0.55744393,
                        -0.28090181
                    ],
                    [
                        0.41710863, 0.3198324, -1.81087917, 0.94256868,
                        0.82748892
                    ],
                ],
                [
                    [
                        0.28210726, 0.26080003, 0.15027717, 0.54540429,
                        0.80323837
                    ],
                    [0.748392, 0.68774842, 0.24204415, 0.83695682, 0.9902132],
                    [
                        -1.37862046, 0.79101603, 0.26633725, 0.89223337,
                        0.82301254
                    ],
                    [
                        0.84857086, 0.34455393, 0.66555314, 0.86168397,
                        0.8948667
                    ],
                ],
                [
                    [
                        0.94172458, 0.66390044, 0.21043296, 0.02453515,
                        0.04169043
                    ],
                    [
                        0.28233233, 0.20235374, 0.19803933, 0.55069441,
                        -0.62707704
                    ],
                    [
                        0.50205101, -0.25005965, 0.79745394, 0.25376936,
                        0.6114783
                    ],
                    [
                        -0.55802926, 1.0223477, 0.80867314, 1.07334156,
                        0.28778443
                    ],
                ],
            ]],
            device=device,
            dtype=dtype,
        )

        tol_val: float = utils._get_precision_by_name(device, 'xla', 1e-1,
                                                      1e-4)
        assert_allclose(kornia.color.lab_to_rgb(data),
                        expected,
                        rtol=tol_val,
                        atol=tol_val)
        assert_allclose(kornia.color.lab_to_rgb(data, clip=False),
                        expected_unclipped,
                        rtol=tol_val,
                        atol=tol_val)
 def test_random_gen(self, device, dtype):
     torch.manual_seed(42)
     degrees = torch.tensor([10, 20], device=device, dtype=dtype)
     translate = torch.tensor([0.1, 0.1], device=device, dtype=dtype)
     scale = torch.tensor([0.7, 1.2], device=device, dtype=dtype)
     shear = torch.tensor([[10, 20], [10, 20]], device=device, dtype=dtype)
     res = random_affine_generator(
         batch_size=2, height=200, width=200, degrees=degrees,
         translate=translate, scale=scale, shear=shear, same_on_batch=False)
     expected = dict(
         translations=torch.tensor([[-4.3821, -9.7371], [4.0358, 11.7457]], device=device, dtype=dtype),
         center=torch.tensor([[99.5000, 99.5000], [99.5000, 99.5000]], device=device, dtype=dtype),
         scale=torch.tensor([[0.8914, 0.8914], [1.1797, 1.1797]], device=device, dtype=dtype),
         angle=torch.tensor([18.8227, 19.1500], device=device, dtype=dtype),
         sx=torch.tensor([19.4077, 11.3319], device=device, dtype=dtype),
         sy=torch.tensor([19.3460, 15.9358], device=device, dtype=dtype)
     )
     assert res.keys() == expected.keys()
     assert_allclose(res['translations'], expected['translations'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['center'], expected['center'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['scale'], expected['scale'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['angle'], expected['angle'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['sx'], expected['sx'], rtol=1e-4, atol=1e-4)
     assert_allclose(res['sy'], expected['sy'], rtol=1e-4, atol=1e-4)
Example #22
0
    def test_unit(self, device, dtype):
        data = torch.tensor(
            [
                [
                    [0.0, 1.0, 0.69396782, 0.63581685, 0.09902618],
                    [
                        0.59459005, 0.74215373, 0.89662376, 0.25920381,
                        0.89937686
                    ],
                    [
                        0.29857584, 0.28139791, 0.16441015, 0.55507519,
                        0.06124221
                    ],
                    [
                        0.40908658, 0.10261389, 0.01691456, 0.76006799,
                        0.32971736
                    ],
                ],
                [
                    [0.0, 1.0, 0.79009938, 0.91742945, 0.60044175],
                    [
                        0.42812678, 0.18552390, 0.04186043, 0.38030245,
                        0.15420346
                    ],
                    [
                        0.13552373, 0.53955473, 0.79102736, 0.49050815,
                        0.75271446
                    ],
                    [
                        0.39861023, 0.80680277, 0.82823833, 0.54438462,
                        0.22063386
                    ],
                ],
                [
                    [0.0, 1.0, 0.84317145, 0.59529881, 0.15297393],
                    [
                        0.59235313, 0.36617295, 0.34600773, 0.40304737,
                        0.61720451
                    ],
                    [
                        0.46040250, 0.42006640, 0.54765106, 0.48982632,
                        0.13914755
                    ],
                    [
                        0.58402964, 0.89597990, 0.98276161, 0.25019163,
                        0.69285921
                    ],
                ],
            ],
            device=device,
            dtype=dtype,
        )

        # Reference output generated using skimage: rgb2lab(data)
        expected = torch.tensor(
            [
                [
                    [0.0, 100.0, 79.75208576, 86.38913217, 55.25164186],
                    [
                        51.66668553, 43.81214392, 48.93865503, 39.03804484,
                        52.55152607
                    ],
                    [
                        23.7114063, 52.38661792, 72.54607218, 53.89587489,
                        67.94892652
                    ],
                    [
                        45.02897165, 75.98315061, 78.257619, 61.85069778,
                        33.77972627
                    ],
                ],
                [
                    [
                        0.0, -0.002454937, -5.40909568, -37.74958445,
                        -55.02172792
                    ],
                    [
                        24.16049084, 58.53088654, 75.33566652, -9.65827726,
                        76.94753157
                    ],
                    [
                        36.53113547, -28.57665427, -54.16269089, 6.2586262,
                        -67.69290198
                    ],
                    [
                        12.32708756, -33.04781428, -29.29282657, 13.46090338,
                        42.98737069
                    ],
                ],
                [
                    [0.0, 0.00465342, -9.49591204, 32.9931831, 47.80929165],
                    [
                        -16.11189945, 7.72083678, 19.17820444, -6.90801653,
                        -17.46468994
                    ],
                    [
                        -39.99097133, 9.92432127, 19.90687976, 2.40429413,
                        61.24066709
                    ],
                    [
                        -25.45166461, -22.94347485, -31.32259433, 47.2621717,
                        -60.05694598
                    ],
                ],
            ],
            device=device,
            dtype=dtype,
        )

        tol_val: float = utils._get_precision_by_name(device, 'xla', 1e-1,
                                                      1e-4)
        assert_allclose(kornia.color.rgb_to_lab(data),
                        expected,
                        rtol=tol_val,
                        atol=tol_val)
Example #23
0
def test_get_motion_kernel2d(ksize, angle, direction):
    kernel = kornia.get_motion_kernel2d(ksize, angle, direction)
    assert kernel.shape == (ksize, ksize)
    assert_allclose(kernel.sum(), 1.)
Example #24
0
def test_pixel_to_normalized_coordinates():
    expected = torch.tensor([0.0, 0.0])
    actual = pixel_to_normalized_coordinates(torch.tensor([0.5, 2.0]), (5, 2))
    assert_allclose(actual, expected)
Example #25
0
 def test_module_bgr(self, device, dtype):
     B, C, H, W = 2, 3, 4, 4
     img = torch.ones(B, C, H, W, device=device, dtype=dtype)
     ops = kornia.color.BgrToRgba(1.0).to(device, dtype)
     fcn = kornia.color.bgr_to_rgba
     assert_allclose(ops(img), fcn(img, 1.0))
Example #26
0
def test_dsnt_forward():
    expected = SIMPLE_OUTPUT
    actual = dsnt(SIMPLE_INPUT)
    assert_allclose(actual, expected)
Example #27
0
 def test_module_linear(self, device, dtype):
     B, C, H, W = 2, 3, 4, 4
     img = torch.ones(B, C, H, W, device=device, dtype=dtype)
     ops = kornia.color.LinearRgbToRgb().to(device, dtype)
     fcn = kornia.color.linear_rgb_to_rgb
     assert_allclose(ops(img), fcn(img))
Example #28
0
def test_dsnt_forward_not_normalized():
    expected = torch.tensor([[[3.0, 2.0]]])
    actual = dsnt(SIMPLE_INPUT, normalized_coordinates=False)
    assert_allclose(actual, expected)
Example #29
0
 def test_jit(self, device, dtype):
     B, C, H, W = 2, 3, 4, 4
     img = torch.ones(B, C, H, W, device=device, dtype=dtype)
     op = kornia.color.rgb_to_bgr
     op_jit = torch.jit.script(op)
     assert_allclose(op(img), op_jit(img))
Example #30
0
    def test_f1_measure(self, device: str):
        f1_measure = F1Measure(positive_label=0)
        predictions = torch.tensor(
            [
                [0.35, 0.25, 0.1, 0.1, 0.2],
                [0.1, 0.6, 0.1, 0.2, 0.0],
                [0.1, 0.6, 0.1, 0.2, 0.0],
                [0.1, 0.5, 0.1, 0.2, 0.0],
                [0.1, 0.2, 0.1, 0.7, 0.0],
                [0.1, 0.6, 0.1, 0.2, 0.0],
            ],
            device=device,
        )
        # [True Positive, True Negative, True Negative,
        #  False Negative, True Negative, False Negative]
        targets = torch.tensor([0, 4, 1, 0, 3, 0], device=device)
        f1_measure(predictions, targets)
        precision, recall, f1 = f1_measure.get_metric()
        assert f1_measure._true_positives == 1.0
        assert f1_measure._true_negatives == 3.0
        assert f1_measure._false_positives == 0.0
        assert f1_measure._false_negatives == 2.0
        f1_measure.reset()
        # check value
        assert_allclose(precision, 1.0)
        assert_allclose(recall, 0.333333333)
        assert_allclose(f1, 0.499999999)
        # check type
        assert isinstance(precision, float)
        assert isinstance(recall, float)
        assert isinstance(f1, float)

        # Test the same thing with a mask:
        mask = torch.BoolTensor([True, False, True, True, True, False],
                                device=device)
        f1_measure(predictions, targets, mask)
        precision, recall, f1 = f1_measure.get_metric()
        assert f1_measure._true_positives == 1.0
        assert f1_measure._true_negatives == 2.0
        assert f1_measure._false_positives == 0.0
        assert f1_measure._false_negatives == 1.0
        f1_measure.reset()
        assert_allclose(precision, 1.0)
        assert_allclose(recall, 0.5)
        assert_allclose(f1, 0.6666666666)