def test_jit(self, device): @torch.jit.script def op_script(input): return kornia.invert_affine_transform(input) matrix = torch.eye(2, 3).to(device) op_traced = torch.jit.trace(op_script, matrix) actual = op_traced(matrix) expected = kornia.invert_affine_transform(matrix) assert_allclose(actual, expected)
def test_jit(self, device, dtype): @torch.jit.script def op_script(input): return kornia.invert_affine_transform(input) matrix = torch.eye(2, 3, device=device, dtype=dtype) op_traced = torch.jit.trace(op_script, matrix) actual = op_traced(matrix) expected = kornia.invert_affine_transform(matrix) assert_close(actual, expected, rtol=1e-4, atol=1e-4)
def test_rot90(self, device, dtype): angle = torch.tensor([90.0], device=device, dtype=dtype) scale = torch.tensor([[1.0, 1.0]], device=device, dtype=dtype) center = torch.tensor([[0.0, 0.0]], device=device, dtype=dtype) expected = torch.tensor([[[0.0, -1.0, 0.0], [1.0, 0.0, 0.0]]], device=device, dtype=dtype) matrix = kornia.get_rotation_matrix2d(center, angle, scale) matrix_inv = kornia.invert_affine_transform(matrix) assert_close(matrix_inv, expected, rtol=1e-4, atol=1e-4)
def test_rot90(self, device): angle = torch.tensor([90.]).to(device) scale = torch.tensor([[1., 1.]]).to(device) center = torch.tensor([[0., 0.]]).to(device) expected = torch.tensor([[ [0., -1., 0.], [1., 0., 0.], ]]).to(device) matrix = kornia.get_rotation_matrix2d(center, angle, scale) matrix_inv = kornia.invert_affine_transform(matrix) assert_allclose(matrix_inv, expected)
def test_jit_trace(self): @torch.jit.script def op_script(input): return kornia.invert_affine_transform(input) matrix = torch.eye(2, 3) matrix_2 = torch.eye(2, 3).repeat(2, 1, 1) op_traced = torch.jit.trace(op_script, matrix) actual = op_traced(matrix_2) expected = kornia.invert_affine_transform(matrix_2) assert_allclose(actual, expected)
def test_rot90_batch(self): angle = torch.tensor([90.]) scale = torch.tensor([1.]) center = torch.tensor([[0., 0.]]) expected = torch.tensor([[ [0., -1., 0.], [1., 0., 0.], ]]) matrix = kornia.get_rotation_matrix2d(center, angle, scale).repeat(2, 1, 1) matrix_inv = kornia.invert_affine_transform(matrix) assert_allclose(matrix_inv, expected)
def test_rot90_batch(self, device, dtype): angle = torch.tensor([90.], device=device, dtype=dtype) scale = torch.tensor([[1., 1.]], device=device, dtype=dtype) center = torch.tensor([[0., 0.]], device=device, dtype=dtype) expected = torch.tensor([[ [0., -1., 0.], [1., 0., 0.], ]], device=device, dtype=dtype) matrix = kornia.get_rotation_matrix2d( center, angle, scale).repeat(2, 1, 1) matrix_inv = kornia.invert_affine_transform(matrix) assert_allclose(matrix_inv, expected, rtol=1e-4, atol=1e-4)
def op_script(input): return kornia.invert_affine_transform(input)
def test_smoke(self, device): matrix = torch.eye(2, 3).to(device)[None] matrix_inv = kornia.invert_affine_transform(matrix) assert_allclose(matrix, matrix_inv)
def test_smoke(self, device, dtype): matrix = torch.eye(2, 3, device=device, dtype=dtype)[None] matrix_inv = kornia.invert_affine_transform(matrix) assert_allclose(matrix, matrix_inv, rtol=1e-4, atol=1e-4)
def test_smoke(self): matrix = torch.eye(2, 3) matrix_inv = kornia.invert_affine_transform(matrix) assert_allclose(matrix, matrix_inv)
def align_fake(self, margin=40, alignUnaligned=True): # get params desiredLeftEye = [ float(self.alignment_params["desiredLeftEye"][0]), float(self.alignment_params["desiredLeftEye"][1]) ] rotation_point = self.alignment_params["eyesCenter"] angle = -self.alignment_params["angle"] h, w = self.fake_B.shape[2:] # get original positions m1 = round(w * 0.5) m2 = round(desiredLeftEye[1] * w) # define the scale factor scale = 1 / self.alignment_params["scale"] width = int(self.alignment_params["shape"][0]) long_edge_size = width / abs(np.cos(np.deg2rad(angle))) w_original = int(scale * long_edge_size) h_original = int(scale * long_edge_size) # get offset tX = w_original * 0.5 tY = h_original * desiredLeftEye[1] # get rotation center center = torch.ones(1, 2) center[..., 0] = m1 center[..., 1] = m2 # compute the transformation matrix M: torch.tensor = kornia.get_rotation_matrix2d(center, angle, scale).to(self.device) M[0, 0, 2] += (tX - m1) M[0, 1, 2] += (tY - m2) # get insertion point x_start = int(rotation_point[0] - (0.5 * w_original)) y_start = int(rotation_point[1] - (desiredLeftEye[1] * h_original)) # _, _, h_tensor, w_tensor = self.real_B_unaligned_full.shape # # # # # # # # # # # # # # # # # # ## # # # # # # # ## # # # # ## # # # # # # # # # ## # # # get safe margin h_size_tensor, w_size_tensor = self.real_B_unaligned_full.shape[2:] margin = max( min( y_start - max(0, y_start - margin), x_start - max(0, x_start - margin), min(y_start + h_original + margin, h_size_tensor) - y_start - h_original, min(x_start + w_original + margin, w_size_tensor) - x_start - w_original, ), 0) # get face + margin unaligned space self.real_B_aligned_margin = self.real_B_unaligned_full[:, :, y_start - margin: y_start + h_original + margin, x_start - margin: x_start + w_original + margin] # invert matrix M_inverse = kornia.invert_affine_transform(M) # update output size to fit the 256 + scaled margin old_size = self.real_B_aligned_margin.shape[2] new_size = old_size + 2 * round(float(margin * scale)) _, _, h_tensor, w_tensor = self.real_B_aligned_margin.shape self.real_B_aligned_margin = kornia.warp_affine( self.real_B_aligned_margin, M_inverse, dsize=(new_size, new_size)) # padding_mode="reflection") self.fake_B_aligned_margin = self.real_B_aligned_margin.clone( ).requires_grad_(True) # update margin as we now scale the image! # update start point start = round(float(margin * scale * new_size / old_size)) print(start) # point = torch.tensor([0, 0, 1], dtype=torch.float) # M_ = M_inverse[0].clone().detach() # M_ = torch.cat((M_, torch.tensor([[0, 0, 1]], dtype=torch.float))) # # M_n = M[0].clone().detach() # M_n = torch.cat((M_n, torch.tensor([[0, 0, 1]], dtype=torch.float))) # # start_tensor = torch.matmul(torch.matmul(point, M_) + margin, M_n) # print(start_tensor) # start_y, start_x = round(float(start_tensor[0])), round(float(start_tensor[1])) # reinsert into tensor self.fake_B_aligned_margin[0, :, start:start + 256, start:start + 256] = self.real_B Image.fromarray(tensor2im(self.real_B_aligned_margin)).save( "/home/mo/datasets/ff_aligned_unaligned/real.png") Image.fromarray(tensor2im(self.fake_B_aligned_margin)).save( "/home/mo/datasets/ff_aligned_unaligned/fake.png") exit() # # # # # # # # # # # # # # # # # # ## # # # # # # # ## # # # # ## # # # # # # # # # ## # # if not alignUnaligned: # Now apply the transformation to original image # clone fake fake_B_clone = self.fake_B.clone().requires_grad_(True) # apply warp fake_B_warped: torch.tensor = kornia.warp_affine( fake_B_clone, M, dsize=(h_original, w_original)) # make sure warping does not exceed real_B_unaligned_full dimensions if y_start < 0: fake_B_warped = fake_B_warped[:, :, abs(y_start):h_original, :] h_original += y_start y_start = 0 if x_start < 0: fake_B_warped = fake_B_warped[:, :, :, abs(x_start):w_original] w_original += x_start x_start = 0 if y_start + h_original > h_tensor: h_original -= (y_start + h_original - h_tensor) fake_B_warped = fake_B_warped[:, :, 0:h_original, :] if x_start + w_original > w_tensor: w_original -= (x_start + w_original - w_tensor) fake_B_warped = fake_B_warped[:, :, :, 0:w_original] # create mask that is true where fake_B_warped is 0 # This is the background that is not filled with image after the transformation mask = ((fake_B_warped[0][0] == 0) & (fake_B_warped[0][1] == 0) & (fake_B_warped[0][2] == 0)) # fill fake_B_filled where mask = False with self.real_B_unaligned_full fake_B_filled = torch.where( mask, self.real_B_unaligned_full[:, :, y_start:y_start + h_original, x_start:x_start + w_original], fake_B_warped) # reinsert into tensor self.fake_B_unaligned = self.real_B_unaligned_full.clone( ).requires_grad_(True) mask = torch.zeros_like(self.fake_B_unaligned, dtype=torch.bool) mask[0, :, y_start:y_start + h_original, x_start:x_start + w_original] = True self.fake_B_unaligned = self.fake_B_unaligned.masked_scatter( mask, fake_B_filled) # cutout tensor h_size_tensor, w_size_tensor = self.real_B_unaligned_full.shape[2:] margin = max( min( y_start - max(0, y_start - margin), x_start - max(0, x_start - margin), min(y_start + h_original + margin, h_size_tensor) - y_start - h_original, min(x_start + w_original + margin, w_size_tensor) - x_start - w_original, ), 0) self.fake_B_unaligned = self.fake_B_unaligned[:, :, y_start - margin:y_start + h_original + margin, x_start - margin:x_start + w_original + margin] self.real_B_unaligned = self.real_B_unaligned_full[:, :, y_start - margin:y_start + h_original + margin, x_start - margin:x_start + w_original + margin]