def test_multiple_broadcast_compose(self): t1 = Scale(0.1, 0.1, 0.1) t2 = Scale(0.2, 0.2, 0.2) N = 10 scale_n = torch.tensor([0.3] * N) tN = Scale(scale_n) t1N2 = t1.compose(tN.compose(t2)) composed_mat = t1N2.get_matrix() self.assertTrue(composed_mat.shape == (N, 4, 4)) expected_mat = torch.eye(3, dtype=torch.float32) * 0.3 * 0.2 * 0.1 self.assertTrue(torch.allclose(composed_mat[0, :3, :3], expected_mat))
def test_broadcast_compose(self): t1 = Scale(0.1, 0.1, 0.1) N = 10 scale_n = torch.tensor([0.3] * N) tN = Scale(scale_n) t1N = t1.compose(tN) self.assertTrue(t1._matrix.shape == (1, 4, 4)) self.assertTrue(tN._matrix.shape == (N, 4, 4)) self.assertTrue(t1N.get_matrix().shape == (N, 4, 4)) t11 = t1.compose(t1) self.assertTrue(t11.get_matrix().shape == (1, 4, 4))
def test_broadcast_compose_fail(self): # Cannot compose two transforms which have batch dimensions N and M # other than the case where either N or M is 1 N = 10 M = 20 scale_n = torch.tensor([0.3] * N) tN = Scale(scale_n) x = torch.tensor([0.2] * M) y = torch.tensor([0.3] * M) z = torch.tensor([0.4] * M) tM = Translate(x, y, z) with self.assertRaises(ValueError): t = tN.compose(tM) t.get_matrix()
def test_compose_fail(self): # Only composing Transform3d objects is possible t1 = Scale(0.1, 0.1, 0.1) with self.assertRaises(ValueError): t1.compose(torch.randn(100))