Exemplo n.º 1
0
    def spatial(self, x):
        affine_matrix = kornia.augmentation.RandomAffine(
            degrees=(-20, 20),
            translate=(0.2, 0.2),
            scale=(0.9, 1.1),
            keepdim=True).generate_parameters(x.unsqueeze(0).shape)

        affine_matrix = kornia.get_affine_matrix2d(**affine_matrix)
        x = kornia.geometry.warp_perspective(
            x.unsqueeze(0), affine_matrix, dsize=x.squeeze(0).shape).squeeze(0)
        return x, affine_matrix
Exemplo n.º 2
0
    def test_compose_affine_matrix_3x3(self, device, dtype):
        """To get parameters:
        import torchvision as tv
        from PIL import Image
        from torch import Tensor as T
        import math
        import random
        img_size = (96,96)
        seed = 42
        torch.manual_seed(seed)
        torch.cuda.manual_seed(seed)
        torch.cuda.manual_seed_all(seed)  # if you are using multi-GPU.
        np.random.seed(seed)  # Numpy module.
        random.seed(seed)  # Python random module.
        torch.manual_seed(seed)
        tfm = tv.transforms.RandomAffine(degrees=(-25.0,25.0),
                                        scale=(0.6, 1.4) ,
                                        translate=(0, 0.1),
                                        shear=(-25., 25., -20., 20.))
        angle, translations, scale, shear = tfm.get_params(tfm.degrees, tfm.translate,
                                                        tfm.scale, tfm.shear, img_size)
        print (angle, translations, scale, shear)
        output_size = img_size
        center = (img.size[0] * 0.5 + 0.5, img.size[1] * 0.5 + 0.5)

        matrix = tv.transforms.functional._get_inverse_affine_matrix(center, angle, translations, scale, shear)
        matrix = np.array(matrix).reshape(2,3)
        print (matrix)
        """
        import math

        from torch import Tensor as T

        batch_size, ch, height, width = 1, 1, 96, 96
        angle, translations = 6.971339922894188, (0.0, -4.0)
        scale, shear = [0.7785685905190581, 0.7785685905190581
                        ], [11.8235607082617, 7.06797949691645]
        matrix_expected = T([[1.27536969, 4.26828945e-01, -3.2876e01],
                             [2.18297196e-03, 1.29424165e00, -1.1717e01]])
        center = T([float(width), float(height)]).view(1, 2) / 2.0 + 0.5
        center = center.expand(batch_size, -1)
        matrix_kornia = kornia.get_affine_matrix2d(
            T(translations).view(-1, 2),
            center,
            T([scale]).view(-1, 2),
            T([angle]).view(-1),
            T([math.radians(shear[0])]).view(-1, 1),
            T([math.radians(shear[1])]).view(-1, 1),
        )
        matrix_kornia = matrix_kornia.inverse()[0, :2].detach().cpu()
        assert_allclose(matrix_kornia, matrix_expected, atol=1e-4, rtol=1e-4)
Exemplo n.º 3
0
def get_heatmap_transformation_matrix(
    jitter_x: Tensor,
    jitter_y: Tensor,
    scale: Tensor,
    angle: Tensor,
    heatmap_dim: Tensor,
) -> Tensor:
    """
    Generates transfromation matric to revert the transformation on heatmap.

    Args:
        jitter_x (Tensor): x Pixels by which heatmap should be jittered (batch)
        jitter_y (Tensor): y Pixels by which heatmap should be jittered (batch)
        scale (Tensor): Scale factor from crop margin (batch).
        angle (Tensor): Rotation angle (batch)
        heatmap_dim (Tensor): Height and width of heatmap (1x2)

    Returns:
        [Tensor]: Transformation matrix (batch x 2 x3).
    """
    # Making a translation matrix
    translations = torch.cat(
        [jitter_x.view(-1, 1), jitter_y.view(-1, 1)], axis=1).float()
    origin = torch.zeros_like(translations)
    zero_angle = torch.zeros_like(jitter_x[:, 0])
    unit_scale = torch.ones_like(translations)
    # NOTE: The function below returns a batch x 3 x 3 matrix.
    translation_matrix = kornia.get_affine_matrix2d(translations=translations,
                                                    center=origin,
                                                    angle=zero_angle,
                                                    scale=unit_scale)
    # Making a rotation matrix.
    center_of_rotation = torch.ones_like(translations) * (
        (heatmap_dim / 2).view(1, 2))
    # NOTE: The function below returns a batch x 2 x 3 matrix.
    rotation_matrix = kornia.get_rotation_matrix2d(
        center=center_of_rotation.float(),
        angle=angle.float(),
        scale=scale.repeat(1, 2).float(),
    )
    # Applying transformations in the order.
    return torch.bmm(rotation_matrix, translation_matrix)