def test_compare_preprocess_simple():
    # Prepare chainer arrays
    viewing_angle = 30
    eye = [0, 0, -(1. / math.tan(math.radians(viewing_angle)) + 1)]
    vertices = np.array([[0.8, 0.8, 1.], [0.0, -0.5, 1.], [0.2, -0.4, 1.]])
    faces = np.array([[0, 1, 2]])

    vertices_ch = cp.array(vertices, 'float32')
    faces_ch = cp.array(faces, 'int32')
    vertices_ch, faces_ch = utils.to_minibatch((vertices_ch, faces_ch))

    # Prepare torch arrays
    vertices_th, faces_th = utils.to_minibatch_th((vertices, faces))

    look_at_vertices = look_at(vertices_ch, eye)
    perspective_vertices = perspective(look_at_vertices, angle=viewing_angle)
    faces_2 = vertices_to_faces(perspective_vertices, faces_ch)

    look_at_vertices_th = look_at_th(vertices_th, eye)
    perspective_vertices_th = perspective_th(look_at_vertices_th,
                                             angle=viewing_angle)
    faces_2_th = vertices_to_faces_th(perspective_vertices_th, faces_th)
    assert np.mean(np.abs(vertices_ch.get() - vertices_th.numpy())) == 0
    assert np.mean(
        np.abs(look_at_vertices.data.get() -
               look_at_vertices_th.numpy())) < 1e-5
    assert np.mean(
        np.abs(perspective_vertices.data.get() -
               perspective_vertices_th.numpy())) < 1e-5
    assert np.mean(np.abs(faces_2.data.get() - faces_2_th.numpy())) < 1e-5
Ejemplo n.º 2
0
    def render(self, vertices, faces, textures):
        # fill back
        if self.fill_back:
            faces = cf.concat((faces, faces[:, :, ::-1]), axis=1).data
            textures = cf.concat(
                (textures, textures.transpose((0, 1, 4, 3, 2, 5))), axis=1)

        # lighting
        faces_lighting = vertices_to_faces(vertices, faces)
        textures = lighting(faces_lighting, textures,
                            self.light_intensity_ambient,
                            self.light_intensity_directional,
                            self.light_color_ambient,
                            self.light_color_directional, self.light_direction)

        # viewpoint transformation
        if self.camera_mode == 'look_at':
            vertices = look_at(vertices, self.eye)
        elif self.camera_mode == 'look':
            vertices = look(vertices, self.eye, self.camera_direction)

        # perspective transformation
        if self.perspective:
            vertices = perspective(vertices, angle=self.viewing_angle)

        # rasterization
        faces = vertices_to_faces(vertices, faces)
        images = rasterize(faces, textures, self.image_size,
                           self.anti_aliasing, self.near, self.far,
                           self.rasterizer_eps, self.background_color)
        return images
def test_perspective():
    v_in = [1, 2, 10]
    v_out = [np.sqrt(3) / 10, 2 * np.sqrt(3) / 10, 10]
    vertices = np.array(v_in, 'float32')
    vertices = vertices[None, None, :]
    transformed = perspective(vertices)
    chainer.testing.assert_allclose(transformed.data.flatten(),
                                    np.array(v_out, 'float32'))
Ejemplo n.º 4
0
    def render_depth(self, vertices, faces):
        # fill back
        if self.fill_back:
            faces = cf.concat((faces, faces[:, :, ::-1]), axis=1).data

        # viewpoint transformation
        if self.camera_mode == 'look_at':
            vertices = look_at(vertices, self.eye)

        # perspective transformation
        if self.perspective:
            vertices = perspective(vertices, angle=self.viewing_angle)

        # rasterization
        faces = vertices_to_faces(vertices, faces)
        images = neurender.rasterize_depth(faces, self.image_size,
                                           self.anti_aliasing)
        return images
def test_compare_preprocess_teapot():
    vertices, faces, textures = utils.load_teapot_batch()
    viewing_angle = 30
    eye = [0, 0, -(1. / math.tan(math.radians(viewing_angle)) + 1)]
    look_at_vertices = look_at(vertices, eye)
    perspective_vertices = perspective(look_at_vertices, angle=viewing_angle)
    faces_2 = vertices_to_faces(perspective_vertices, faces)

    vertices_th, faces_th, _ = utils.load_teapot_batch_th()
    eye = [0, 0, -(1. / math.tan(math.radians(viewing_angle)) + 1)]
    look_at_vertices_th = look_at_th(vertices_th, eye)
    perspective_vertices_th = perspective_th(look_at_vertices_th,
                                             angle=viewing_angle)
    faces_2_th = vertices_to_faces_th(perspective_vertices_th, faces_th)
    assert np.mean(np.abs(vertices.get() - vertices_th.numpy())) == 0
    assert np.mean(
        np.abs(look_at_vertices.data.get() -
               look_at_vertices_th.numpy())) < 1e-5
    assert np.mean(
        np.abs(perspective_vertices.data.get() -
               perspective_vertices_th.numpy())) < 1e-5
    assert np.mean(np.abs(faces_2.data.get() - faces_2_th.numpy())) < 1e-5