def test_forward_rgb_th_3():
    """Same silhouette as blender"""
    # load teapot
    vertices, faces, textures = utils.load_teapot_batch_th()

    # Fill back by reversing face points order
    faces = torch.cat([faces, faces[:, :, faces.new([2, 1, 0]).long()]], dim=1)
    textures = torch.cat([textures, textures.permute(0, 1, 4, 3, 2, 5)], dim=1)
    # Add lighting
    light_intensity_ambient = 1
    light_intensity_directional = 0
    light_color_ambient = [1, 1, 1]  # white
    light_color_directional = [1, 1, 1]  # white
    light_direction = [0, 1, 0]  # up-to-down
    faces_lighting = vertices_to_faces_th(vertices, faces)
    textures = lighting_th(faces_lighting, textures, light_intensity_ambient,
                           light_intensity_directional, light_color_ambient,
                           light_color_directional, light_direction)
    faces_th = preprocess_th(vertices, faces, perspective=True)
    rasterize_rgb_th = RasterizeRGB(256, 0.1, 100, 1e-3, [0, 0, 0])
    images = rasterize_rgb_th(faces_th, textures)
    image = images[2].cpu().numpy().mean(0)

    # Extract silhouette from blender image
    ref = scipy.misc.imread('./tests/data/teapot_blender.png')
    ref = ref.astype('float32')
    ref = (ref.min(-1) != 255).astype('float32')
    scipy.misc.imsave('./tests/data/test_rasterize_rgb_th3.png', image)
    assert np.mean(np.abs(ref - image)) < 1e-8
def test_forward_rgb_th_2():
    """Different viewpoint"""
    # load teapot
    vertices, faces, textures = utils.load_teapot_batch_th()

    # Fill back by reversing face points order
    faces = torch.cat([faces, faces[:, :, faces.new([2, 1, 0]).long()]], dim=1)
    textures = torch.cat([textures, textures.permute(0, 1, 4, 3, 2, 5)], dim=1)
    # Add lighting
    light_intensity_ambient = 0.5
    light_intensity_directional = 0.5
    light_color_ambient = [1, 1, 1]  # white
    light_color_directional = [1, 1, 1]  # white
    light_direction = [0, 1, 0]  # up-to-down
    faces_lighting = vertices_to_faces_th(vertices, faces)
    textures = lighting_th(faces_lighting, textures, light_intensity_ambient,
                           light_intensity_directional, light_color_ambient,
                           light_color_directional, light_direction)
    faces_th = preprocess_th(vertices,
                             faces,
                             perspective=True,
                             eye=[1, 1, -2.7])
    rasterize_rgb_th = RasterizeRGB(256, 0.1, 100, 1e-3, [0, 0, 0])
    images = rasterize_rgb_th(faces_th, textures)
    image = images[2].cpu().numpy().transpose(1, 2, 0)

    ref = scipy.misc.imread('./tests/data/test_rasterize2.png') / 255

    scipy.misc.imsave('./tests/data/test_rasterize_rgb_th2.png', image)
    assert np.mean(np.abs(ref - image)) < 1e-2
def test_compare_preprocess_teapot():
    vertices, faces, textures = utils.load_teapot_batch()
    viewing_angle = 30
    eye = [0, 0, -(1. / math.tan(math.radians(viewing_angle)) + 1)]
    look_at_vertices = look_at(vertices, eye)
    perspective_vertices = perspective(look_at_vertices, angle=viewing_angle)
    faces_2 = vertices_to_faces(perspective_vertices, faces)

    vertices_th, faces_th, _ = utils.load_teapot_batch_th()
    eye = [0, 0, -(1. / math.tan(math.radians(viewing_angle)) + 1)]
    look_at_vertices_th = look_at_th(vertices_th, eye)
    perspective_vertices_th = perspective_th(look_at_vertices_th,
                                             angle=viewing_angle)
    faces_2_th = vertices_to_faces_th(perspective_vertices_th, faces_th)
    assert np.mean(np.abs(vertices.get() - vertices_th.numpy())) == 0
    assert np.mean(
        np.abs(look_at_vertices.data.get() -
               look_at_vertices_th.numpy())) < 1e-5
    assert np.mean(
        np.abs(perspective_vertices.data.get() -
               perspective_vertices_th.numpy())) < 1e-5
    assert np.mean(np.abs(faces_2.data.get() - faces_2_th.numpy())) < 1e-5
def test_forward_th():
    """Whether a silhouette by neural renderer matches that by Blender."""

    # load teapot
    vertices, faces, textures = utils.load_teapot_batch()
    vertices_th, faces_th, textures_th = utils.load_teapot_batch_th()

    # create renderer
    renderer_th = RendererTh()
    renderer_th.image_size = 256
    renderer_th.anti_aliasing = False

    renderer = Renderer()
    renderer.fill_back = False
    renderer.image_size = 256
    renderer.anti_aliasing = False

    images = renderer.render_silhouettes(vertices, faces)
    images_th = renderer.render_silhouettes(
        cp.asarray(vertices_th.cpu().numpy()),
        cp.asarray(faces_th.cpu().numpy()))
    assert (images_th - images).data.get().sum() == 0
def test_forward_th_2():
    """Rendering a teapot without anti-aliasing."""

    # load teapot
    vertices, faces, textures = utils.load_teapot_batch_th()

    # create renderer
    renderer = Renderer()
    renderer.image_size = 256
    renderer.anti_aliasing = False

    # render
    images_th = renderer.render(cp.asarray(vertices.cpu().numpy()),
                                cp.asarray(faces.cpu().numpy()),
                                cp.asarray(textures.cpu().numpy()))
    images = images_th.data.get()
    image = images[2]
    image = image.transpose((1, 2, 0))

    scipy.misc.imsave('./tests/data/test_rasterize_th1.png', image)
    ref = scipy.misc.imread('./tests/data/test_rasterize1.png')
    ref = ref.astype('float32')
    assert np.abs((ref / 255 - image)).mean() < 1e-3