Beispiel #1
0
# -*- coding: utf-8 -*-

import os
import sys

import numpy as np
import pc_util

BASE_DIR = os.path.dirname(__file__)

sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, "../"))

data = np.load("scannet_scenes/scene0001_01.npy")
scene_points = data[:, 0:3]
colors = data[:, 3:6]
instance_labels = data[:, 6]
semantic_labels = data[:, 7]

output_folder = "demo_output"
if not os.path.exists(output_folder):
    os.mkdir(output_folder)

# Write scene as OBJ file for visualization
pc_util.write_ply_rgb(scene_points, colors,
                      os.path.join(output_folder, "scene.obj"))
pc_util.write_ply_color(scene_points, instance_labels,
                        os.path.join(output_folder, "scene_instance.obj"))
pc_util.write_ply_color(scene_points, semantic_labels,
                        os.path.join(output_folder, "scene_semantic.obj"))
Beispiel #2
0
def data_viz(data_dir, dump_dir=os.path.join(BASE_DIR, 'data_viz_dump')):
    ''' Examine and visualize SUN RGB-D data. '''
    sunrgbd = sunrgbd_object(data_dir)
    idxs = np.array(range(1, len(sunrgbd) + 1))
    np.random.seed(0)
    np.random.shuffle(idxs)
    for idx in range(len(sunrgbd)):
        data_idx = idxs[idx]
        print('-' * 10, 'data index: ', data_idx)
        pc = sunrgbd.get_depth(data_idx)
        print('Point cloud shape:', pc.shape)

        # Project points to image
        calib = sunrgbd.get_calibration(data_idx)
        uv, d = calib.project_upright_depth_to_image(pc[:, 0:3])
        print('Point UV:', uv)
        print('Point depth:', d)

        import matplotlib.pyplot as plt
        cmap = plt.cm.get_cmap('hsv', 256)
        cmap = np.array([cmap(i) for i in range(256)])[:, :3] * 255

        img = sunrgbd.get_image(data_idx)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        for i in range(uv.shape[0]):
            depth = d[i]
            color = cmap[int(120.0 / depth), :]
            cv2.circle(img, (int(np.round(uv[i, 0])), int(np.round(uv[i, 1]))),
                       2,
                       color=tuple(color),
                       thickness=-1)
        if not os.path.exists(dump_dir):
            os.mkdir(dump_dir)
        Image.fromarray(img).save(os.path.join(dump_dir, 'img_depth.jpg'))

        # Load box labels
        objects = sunrgbd.get_label_objects(data_idx)
        print('Objects:', objects)

        # Draw 2D boxes on image
        img = sunrgbd.get_image(data_idx)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        for i, obj in enumerate(objects):
            cv2.rectangle(img, (int(obj.xmin), int(obj.ymin)),
                          (int(obj.xmax), int(obj.ymax)), (0, 255, 0), 2)
            cv2.putText(img, '%d %s' % (i, obj.classname),
                        (max(int(obj.xmin), 15), max(int(obj.ymin), 15)),
                        cv2.FONT_HERSHEY_SIMPLEX, 0.8, (255, 0, 0), 2)
        Image.fromarray(img).save(os.path.join(dump_dir, 'img_box2d.jpg'))

        # Dump OBJ files for the colored point cloud
        for num_point in [10000, 20000, 40000, 80000]:
            sampled_pcrgb = pc_util.random_sampling(pc, num_point)
            pc_util.write_ply_rgb(
                sampled_pcrgb[:, 0:3],
                (sampled_pcrgb[:, 3:] * 256).astype(np.int8),
                os.path.join(dump_dir, 'pcrgb_%dk.obj' % (num_point // 1000)))
        # Dump OBJ files for 3D bounding boxes
        # l,w,h correspond to dx,dy,dz
        # heading angle is from +X rotating towards -Y
        # (+X is degree, -Y is 90 degrees)
        oriented_boxes = []
        for obj in objects:
            obb = np.zeros((7))
            obb[0:3] = obj.centroid
            # Some conversion to map with default setting of w,l,h
            # and angle in box dumping
            obb[3:6] = np.array([obj.l, obj.w, obj.h]) * 2
            obb[6] = -1 * obj.heading_angle
            print('Object cls, heading, l, w, h:',\
                 obj.classname, obj.heading_angle, obj.l, obj.w, obj.h)
            oriented_boxes.append(obb)
        if len(oriented_boxes) > 0:
            oriented_boxes = np.vstack(tuple(oriented_boxes))
            pc_util.write_oriented_bbox(oriented_boxes,
                                        os.path.join(dump_dir, 'obbs.ply'))
        else:
            print('-' * 30)
            continue

        # Draw 3D boxes on depth points
        box3d = []
        ori3d = []
        for obj in objects:
            corners_3d_image, corners_3d = sunrgbd_utils.compute_box_3d(
                obj, calib)
            ori_3d_image, ori_3d = sunrgbd_utils.compute_orientation_3d(
                obj, calib)
            print('Corners 3D: ', corners_3d)
            box3d.append(corners_3d)
            ori3d.append(ori_3d)
        pc_box3d = np.concatenate(box3d, 0)
        pc_ori3d = np.concatenate(ori3d, 0)
        print(pc_box3d.shape)
        print(pc_ori3d.shape)
        pc_util.write_ply(pc_box3d, os.path.join(dump_dir,
                                                 'box3d_corners.ply'))
        pc_util.write_ply(pc_ori3d, os.path.join(dump_dir, 'box3d_ori.ply'))
        print('-' * 30)
        print('Point clouds and bounding boxes saved to PLY files under %s' %
              (dump_dir))
        print('Type anything to continue to the next sample...')
        input()
Beispiel #3
0
import sys
import os

BASE_DIR = os.path.dirname(__file__)

sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, '../'))

import numpy as np
import pc_util

data = np.load('scannet_scenes/scene0001_01.npy')
scene_points = data[:,0:3]
colors = data[:,3:6]
instance_labels = data[:,6]
semantic_labels = data[:,7]


output_folder = 'demo_output'
if not os.path.exists(output_folder):
    os.mkdir(output_folder)

# Write scene as OBJ file for visualization
pc_util.write_ply_rgb(scene_points, colors, os.path.join(output_folder, 'scene.obj'))
pc_util.write_ply_color(scene_points, instance_labels, os.path.join(output_folder, 'scene_instance.obj'))
pc_util.write_ply_color(scene_points, semantic_labels, os.path.join(output_folder, 'scene_semantic.obj'))