示例#1
0
    def test_sigmoid_alpha_blend(self):
        """
        Test outputs of sigmoid alpha blend tensorised function match those of
        the naive iterative version. Also check gradients match.
        """

        # Create dummy outputs of rasterization simulating a cube in the centre
        # of the image with surrounding padded values.
        N, S, K = 1, 8, 2
        pix_to_face = -torch.ones((N, S, S, K), dtype=torch.int64)
        h = int(S / 2)
        pix_to_face_full = torch.randint(size=(N, h, h, K), low=0, high=100)
        s = int(S / 4)
        e = int(0.75 * S)
        pix_to_face[:, s:e, s:e, :] = pix_to_face_full
        bary_coords = torch.ones((N, S, S, K, 3))

        # randomly flip the sign of the distance
        # (-) means inside triangle, (+) means outside triangle.
        random_sign_flip = torch.rand((N, S, S, K))
        random_sign_flip[random_sign_flip > 0.5] *= -1.0
        dists = torch.randn(size=(N, S, S, K))
        dists1 = dists * random_sign_flip
        dists2 = dists1.clone()
        dists1.requires_grad = True
        dists2.requires_grad = True
        colors = torch.randn_like(bary_coords)
        fragments1 = Fragments(
            pix_to_face=pix_to_face,
            bary_coords=bary_coords,  # dummy
            zbuf=pix_to_face,  # dummy
            dists=dists1,
        )
        fragments2 = Fragments(
            pix_to_face=pix_to_face,
            bary_coords=bary_coords,  # dummy
            zbuf=pix_to_face,  # dummy
            dists=dists2,
        )
        blend_params = BlendParams(sigma=2e-1)
        images = sigmoid_alpha_blend(colors, fragments1, blend_params)
        images_naive = sigmoid_blend_naive(colors, fragments2, blend_params)
        self.assertTrue(torch.allclose(images, images_naive))

        torch.manual_seed(231)
        images.sum().backward()
        self.assertTrue(hasattr(dists1, "grad"))
        images_naive.sum().backward()
        self.assertTrue(hasattr(dists2, "grad"))

        self.assertTrue(torch.allclose(dists1.grad, dists2.grad, rtol=1e-5))
示例#2
0
    def render_torch(self,
                     verts,
                     faces,
                     rgb,
                     bcg_color=(1., 1., 1.),
                     get_depth=False,
                     get_alpha=False):
        # b, h, w = grid_3d.shape[:3]
        b = verts.size(0)
        textures = TexturesVertex(verts_features=rgb.view(b, -1, 3))
        mesh = Meshes(verts=verts, faces=faces, textures=textures)

        fragments = self.rasterizer_torch(mesh)
        texels = mesh.sample_textures(fragments)
        materials = Materials(device=verts.device)
        blend_params = BlendParams(background_color=bcg_color)
        images = hard_rgb_blend(texels, fragments, blend_params)
        images = images[..., :3].permute(0, 3, 1, 2)

        out = (images, )
        if get_depth:
            depth = fragments.zbuf[..., 0]
            mask = (depth == -1.0).float()
            max_depth = self.max_depth + 0.5 * (self.max_depth -
                                                self.min_depth)
            depth = mask * max_depth * torch.ones_like(depth) + (1 -
                                                                 mask) * depth
            out = out + (depth, )
        if get_alpha:
            colors = torch.ones_like(fragments.bary_coords)
            blend_params = BlendParams(sigma=1e-2,
                                       gamma=1e-4,
                                       background_color=(1., 1., 1.))
            alpha = sigmoid_alpha_blend(colors, fragments, blend_params)[...,
                                                                         -1]
            out = tuple(out) + (alpha, )
        if len(out) == 1:
            out = out[0]
        return out
示例#3
0
 def fn():
     # test forward and backward pass
     images = sigmoid_alpha_blend(colors, fragments, blend_params)
     images.sum().backward()
     torch.cuda.synchronize()