Esempio n. 1
0
def matching_keypoints():
    """Returns the matching keypoints of other frames"""

    face_num = off_to_ply_conversion(
    )  #Gives the face numbers of keypoints associated with first frame
    rootdir = "/home/aditya/Documents/Sem_3/TDCV/project_2/tracking/ballet_vicon/mesh/"
    path, dirs, files = next(os.walk(rootdir))
    files = sorted(files)

    for i in range(1, len(files)):
        ba_vicon = read_off(rootdir + files[i])

        ba_vicon_points_xyz = ba_vicon["points"][["x", "y", "z"]].values

        vertex1_xyz = np.zeros([np.shape(face_num)[0], 3])
        vertex2_xyz = np.zeros([np.shape(face_num)[0], 3])
        vertex3_xyz = np.zeros([np.shape(face_num)[0], 3])
        for j in range(np.shape(face_num)[0]):
            vertex1_xyz[j] = ba_vicon_points_xyz[ba_vicon["mesh"]["v1"][
                face_num[j]]]
            vertex2_xyz[j] = ba_vicon_points_xyz[ba_vicon["mesh"]["v2"][
                face_num[j]]]
            vertex3_xyz[j] = ba_vicon_points_xyz[ba_vicon["mesh"]["v3"][
                face_num[j]]]

        bary_coords = Barycentric_coordinates(vertex1_xyz, vertex2_xyz,
                                              vertex3_xyz,
                                              np.shape(face_num)[0])
        if files[i].endswith('.off'):
            files[i] = files[i][:-4]
        write_ply(
            "/home/aditya/PycharmProjects/OpenCV-python/Project_2/ply_files_keypoints/"
            + files[i] + ".ply",
            points=bary_coords)
Esempio n. 2
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('-source',
                        help="path to ModelNet dataset(e.g. ModelNet40/)",
                        default=None)
    parser.add_argument(
        '-target',
        help="path to folder of output points(e.g. ModelNet40_1024_points/)",
        default=None)
    parser.add_argument(
        '-point_num',
        type=int,
        default=1024,
        help="How many points are sampled from each mesh object")
    parser.add_argument('-normal',
                        dest='normal',
                        action='store_true',
                        help="Normalize point clouds while sampling")
    parser.set_defaults(normal=False)
    args = parser.parse_args()

    source_dir = args.source
    categories_all = [
        name for name in os.listdir(source_dir)
        if name not in ['.DS_Store', 'README.txt']
    ]
    target_dir = args.target

    os.mkdir(target_dir)

    for category in categories_all:
        os.mkdir(os.path.join(target_dir, category))
        for mode in ['train', 'test']:
            source_folder = os.path.join(source_dir, category, mode)
            target_folder = os.path.join(target_dir, category, mode)
            os.mkdir(target_folder)

            mesh_names = [
                os.path.join(source_folder, name)
                for name in os.listdir(source_folder) if name != '.DS_Store'
            ]
            for name in mesh_names:
                vertices, faces = get_vertices_faces_from_off_file(name)
                pointcloud = mesh2pointcloud(vertices,
                                             faces,
                                             args.point_num,
                                             normalize=args.normal)
                # save model
                model = pd.DataFrame()
                model['x'] = pointcloud[:, 0]
                model['y'] = pointcloud[:, 1]
                model['z'] = pointcloud[:, 2]
                name = name.split('/')[-1]
                target_name = os.path.join(target_folder, name[:-4] + '.ply')
                write_ply(target_name, points=model)

        print('finished category: {}'.format(category))

    print('Finish generating dataset: {}'.format(target_dir))
Esempio n. 3
0
def process_test_sunrgbd(path):

    if isdir(join(path, 'test')):
        rmtree(join(path, 'test'))

    makedirs(join(path, 'test'))

    gt_data = loadmat(join(
        path, 'SUNRGBDMetaStructIOTest.mat'))['SUNRGBDMetaStructIOTest'][0]
    tv_split = loadmat(join(path, 'allsplit.mat'))['trainvalsplit'][0, 0]
    t_dirs = tv_split[0][:, 0]
    v_dirs = tv_split[1][:, 0]

    t_dirs = [str(t[0]) for t in t_dirs]
    v_dirs = [str(v[0]) for v in v_dirs]

    t_dirs.sort()
    v_dirs.sort()
    num_skipped = 0
    scene_index = 0
    for scene_gt in gt_data:
        scene_path = scene_gt[0][0]
        intrinsics = scene_gt[3]
        extrinsics = scene_gt[2]
        d_name = scene_gt[7][0]
        rgb_name = scene_gt[8][0]

        save_path = join(path, 'test', basename(str(scene_path)))

        rgb_img = imread(
            join(path, "sunrgbd_test", scene_path[14:], 'image',
                 str(rgb_name)))
        d_img = imread(
            join(path, "sunrgbd_test", scene_path[14:], 'depth', str(d_name)))
        colored_pc = rgbd2pc(rgb_img, d_img, intrinsics,
                             extrinsics).astype('float32')
        result = pd.DataFrame(dtype='float32')
        result["x"] = colored_pc[:, 0]
        result["y"] = colored_pc[:, 1]
        result["z"] = colored_pc[:, 2]

        result["r"] = colored_pc[:, 3]
        result["g"] = colored_pc[:, 4]
        result["b"] = colored_pc[:, 5]

        write_ply(save_path + '.ply', points=result)
        np.save(save_path + '_rgb.npy', rgb_img)
        np.save(save_path + '_k.npy', intrinsics)
        np.save(save_path + '_d.npy', d_img)
        np.save(save_path + '_rt.npy', extrinsics)
        scene_index += 1

        if scene_index % 100 == 0:
            print('\tProcessed {}/{}.'.format(scene_index, len(gt_data)))

    print('Skipped {} entries'.format(num_skipped))
Esempio n. 4
0
def main():
    start_t = time.time()
    room_fname = "scans/1LXtFkjw3qL/region_segmentations/region0.ply"
    label_fname = "scans/1LXtFkjw3qL/region_segmentations/region0.semseg.json"
    fsegs_fname = "scans/1LXtFkjw3qL/region_segmentations/region0.fsegs.json"
    result = ply_to_xyz(room_fname, label_fname, fsegs_fname)
    write_ply("test.ply", points=result)
    end_t = time.time()

    total_time = float(end_t - start_t)
    print("Took {} seconds".format(total_time))
Esempio n. 5
0
def save_ply(save_path, pointcloud):
    print(save_path)
    print(pointcloud)

    result = pd.DataFrame()

    result["x"] = pointcloud[:,0]
    result["y"] = pointcloud[:,1]
    result["z"] = pointcloud[:,2]

    from pyntcloud.io import write_ply
    write_ply(save_path,points=result,as_text=True)
Esempio n. 6
0
def off_to_ply_conversion():
    """Function that converts a .off file into .ply file"""

    ballet_vicon = read_off(
        "/home/aditya/Documents/Sem_3/TDCV/project_2/tracking/ballet_vicon/mesh/1746411.off"
    )
    n = 7000

    ballet_vicon_points_xyz = ballet_vicon["points"][[
        "x", "y", "z"
    ]].values  #Gives the x,y,z coordinates of a point
    ballet_vicon_points_rgb = ballet_vicon["points"][["red", "green",
                                                      "blue"]].values

    v1_xyz = ballet_vicon_points_xyz[ballet_vicon["mesh"][
        "v1"]]  #Coordinates of Point 1 of the triangular face
    v2_xyz = ballet_vicon_points_xyz[ballet_vicon["mesh"][
        "v2"]]  #Coordinates of Point 2 of the triangular face
    v3_xyz = ballet_vicon_points_xyz[ballet_vicon["mesh"][
        "v3"]]  #Coordinates of Point 3 of the triangular face

    v1_rgb = ballet_vicon_points_rgb[ballet_vicon["mesh"]["v1"]]
    v2_rgb = ballet_vicon_points_rgb[ballet_vicon["mesh"]["v2"]]
    v3_rgb = ballet_vicon_points_rgb[ballet_vicon["mesh"]["v3"]]

    areas = triangle_area(v1_xyz, v2_xyz, v3_xyz)
    probabilities = areas / areas.sum()
    weighted_random_choices = np.random.choice(
        range(len(areas)), size=n, p=probabilities)  #Gives the vertex number
    #weighted_random_choices are the face numbers.

    v1_xyz = v1_xyz[
        weighted_random_choices]  # Gives the coordinates of the vertex number weighted_random_choices
    v2_xyz = v2_xyz[weighted_random_choices]
    v3_xyz = v3_xyz[weighted_random_choices]

    v1_rgb = v1_rgb[weighted_random_choices]
    v2_rgb = v2_rgb[weighted_random_choices]
    v3_rgb = v3_rgb[weighted_random_choices]

    kp = Barycentric_coordinates(v1_xyz, v2_xyz, v3_xyz, n)
    write_ply(
        "/home/aditya/PycharmProjects/OpenCV-python/Project_2/ply_files_keypoints/1746411.ply",
        points=kp)
    return weighted_random_choices
Esempio n. 7
0
def write_color_ply(fname, points, color_vals, confidence, normals):
    new_pc = fuse_point_cloud(points, color_vals, confidence, normals)
    write_ply(fname, new_pc.points, as_text=True)
    print(fname)
Esempio n. 8
0
v[is_a_problem] = 1 - v[is_a_problem]
w = 1 - (u + v)

result = pd.DataFrame()

result_xyz = (v1_xyz * u) + (v2_xyz * v) + (v3_xyz * w)
result_xyz = result_xyz.astype(np.float32)
result["x"] = result_xyz[:,0]
result["y"] = result_xyz[:,1]
result["z"] = result_xyz[:,2]
sum_normals = v1_normals + v2_normals + v3_normals
result_normals = sum_normals / np.linalg.norm(sum_normals, axis = 1)[..., None]
result_normals = result_normals.astype(np.float32)
result["nx"] = result_normals[:,0]
result["ny"] = result_normals[:,1]
result["nz"] = result_normals[:,2]

print(result.head())

# post processing of pcl in result df
#post_Processing()

write_ply("pointcloud.ply", points=result)
print(result)
print("PCL ready.")

### OUTPUT
# --> PCL_output.py


Esempio n. 9
0
def write_color_ply(fname, points, color_vals):
    new_pc = fuse_point_cloud(points, color_vals)
    write_ply(fname, new_pc.points, as_text=True)
Esempio n. 10
0
def process_sunrgbd(path):

    if isdir(join(path, 'train')):
        rmtree(join(path, 'train'))

    if isdir(join(path, 'val')):
        rmtree(join(path, 'val'))

    if isdir(join(path, 'test')):
        rmtree(join(path, 'test'))

    makedirs(join(path, 'train'))
    makedirs(join(path, 'val'))
    makedirs(join(path, 'test'))

    gt_data = loadmat(join(path, 'SUNRGBDMeta3DBB_v2.mat'))['SUNRGBDMeta'][0]
    tv_split = loadmat(join(path, 'allsplit.mat'))['trainvalsplit'][0, 0]
    t_dirs = tv_split[0][:, 0]
    v_dirs = tv_split[1][:, 0]

    t_dirs = [str(t[0]) for t in t_dirs]
    v_dirs = [str(v[0]) for v in v_dirs]

    t_dirs.sort()
    v_dirs.sort()
    num_skipped = 0
    scene_index = 0

    val_split = len(gt_data) * 0.8

    for scene_gt in gt_data:
        scene_path = scene_gt[0][0]
        intrinsics = scene_gt[2]
        extrinsics = scene_gt[1]
        d_name = scene_gt[6][0]
        rgb_name = scene_gt[7][0]

        try:
            annotations = scene_gt[10][0]
        except:
            num_skipped += 1
            continue

        thetas = []
        coeffs = []
        centroids = []
        classnames = []
        for annotation in annotations:
            rot_mat = annotation[0]
            if len(rot_mat.shape) == 1:
                rot_mat = np.reshape(rot_mat, (3, 3))
            thetas.append(np.arccos(rot_mat[0, 0]))
            coeffs.append(annotation[1][0])
            centroids.append(annotation[2][0])
            classnames.append(annotation[3][0])

        bbox_loc = np.concatenate([
            np.array(centroids),
            np.array(coeffs),
            np.array(thetas).reshape(-1, 1)
        ],
                                  axis=-1)
        bbox_cls = np.array(classnames)

        # if join('/n/fs/sun3d/data/', scene_path + '/') in t_dirs:
        #     save_path = join(path, 'train', str(rgb_name)[:-4])
        # elif join('/n/fs/sun3d/data/', scene_path + '/') in v_dirs:
        #     save_path = join(path, 'val', str(rgb_name)[:-4])
        # else:
        #     # print("{} not in train or val".format(join('/n/fs/sun3d/data/', scene_path)))
        #     num_skipped += 1
        #     continue

        if scene_index < val_split:
            save_path = join(path, 'train', basename(str(scene_path)))
        else:
            save_path = join(path, 'val', basename(str(scene_path)))

        rgb_img = imread(join(path, scene_path[8:], 'image', str(rgb_name)))
        d_img = imread(join(path, scene_path[8:], 'depth', str(d_name)))
        colored_pc = rgbd2pc(rgb_img, d_img, intrinsics,
                             extrinsics).astype('float32')
        result = pd.DataFrame(dtype='float32')
        result["x"] = colored_pc[:, 0]
        result["y"] = colored_pc[:, 1]
        result["z"] = colored_pc[:, 2]

        result["r"] = colored_pc[:, 3]
        result["g"] = colored_pc[:, 4]
        result["b"] = colored_pc[:, 5]

        # bbox_pcs = []
        # bbox_loc = []
        # bbox_cls = []
        write_ply(save_path + '.ply', points=result)
        np.save(save_path + '_rgb.npy', rgb_img)
        np.save(save_path + '_k.npy', intrinsics)
        np.save(save_path + '_d.npy', d_img)
        np.save(save_path + '_rt.npy', extrinsics)
        np.save(save_path + '_bboxes.npy', np.array(bbox_loc))
        np.save(save_path + '_labels.npy', np.array(bbox_cls))

        scene_index += 1

        if scene_index % 100 == 0:
            print('\tProcessed {}/{}.'.format(scene_index, len(gt_data)))

    print('Skipped {} entries'.format(num_skipped))
Esempio n. 11
0
def process_folder(data_path, save_path, fullres=False):

    if exists(save_path):
        rmtree(save_path)
    os.makedirs(save_path)

    scene_index = 0

    imgs = listdir(data_path)
    imgs = sorted(imgs)
    for img in imgs:
        if img == '.DS_Store':
            continue
        orig_dir = join(data_path, img)
        next_dir = listdir(orig_dir)[0]
        if listdir(orig_dir)[0] == '.DS_Store':
            next_dir = listdir(orig_dir[1])
        xtion_dir = join(data_path, img, next_dir)
        if isdir(xtion_dir):
            folder_paths = []
            for num in listdir(xtion_dir):
                folder_paths.append(join(xtion_dir, num))
        else:
            folder_paths = [orig_dir]
        for folder_path in folder_paths:
            if isdir(folder_path):

                try:
                    # extrinsics
                    extrinsics_folder = join(folder_path, 'extrinsics')

                    # sometimes there is more than 1 extrinsics file.
                    extrinsics_file = join(extrinsics_folder,
                                           listdir(extrinsics_folder)[-1])
                    extrinsics_npy = np.loadtxt(extrinsics_file)
                    anno_extrinsics = extrinsics_npy[:3, :3]

                    if fullres:
                        fullres_folder = join(folder_path, 'fullres')
                        if not exists(fullres_folder):
                            continue
                        rgb_img = None
                        d_img = None
                        intrinsics_npy = None
                        for f in listdir(fullres_folder):
                            if f.endswith('.jpg'):
                                rgb_img = imread(join(fullres_folder, f))
                            elif f.endswith('.png'):
                                d_img = imread(join(fullres_folder, f))
                            elif f.endswith('.txt'):
                                intrinsics_npy = np.loadtxt(
                                    join(fullres_folder, f))

                    else:

                        intrinsics_npy = np.loadtxt(
                            join(folder_path, 'intrinsics.txt'))
                        for f in listdir(join(folder_path, 'image')):
                            rgb_img = imread(join(folder_path, 'image', f))
                            image_index = str(f[-8:-4])

                        for f in listdir(join(folder_path, 'depth')):
                            d_img = imread(join(folder_path, 'depth', f))

                        if rgb_img is None or d_img is None or intrinsics_npy is None:
                            print('Image didn\'t load in {}.'.format(
                                folder_path))
                            continue

                    raw_annotations = json.load(
                        open(
                            join(folder_path, 'annotation3Dfinal',
                                 'index.json')))['objects']
                except FileNotFoundError:
                    print(
                        "\tFolder {} was skipped due to missing information.".
                        format(folder_path))
                    continue

                colored_pc = rgbd2pc(rgb_img, d_img, intrinsics_npy,
                                     extrinsics_npy).astype('float32')
                result = pd.DataFrame(dtype='float32')
                result["x"] = colored_pc[:, 0]
                result["y"] = colored_pc[:, 1]
                result["z"] = colored_pc[:, 2]

                result["r"] = colored_pc[:, 3]
                result["g"] = colored_pc[:, 4]
                result["b"] = colored_pc[:, 5]

                bbox_pcs = []
                bbox_loc = []
                bbox_cls = []
                for raw_annot in raw_annotations:
                    if raw_annot is None or type(raw_annot) is not dict:
                        continue
                    for poly in raw_annot['polygon']:
                        bbox = annotation_to_bbox(poly, anno_extrinsics)
                        #bbox_pcs.append(bbox_to_pc(bbox))
                        bbox_loc.append(bbox)
                        bbox_cls.append(raw_annot['name'])

                if len(bbox_loc) > 0 and len(bbox_cls) > 0:
                    write_ply(join(save_path,
                                   'region' + str(image_index) + '.ply'),
                              points=result)
                    np.save(
                        join(save_path,
                             'region' + str(image_index) + '_rgb.npy'),
                        rgb_img)
                    np.save(
                        join(save_path,
                             'region{}_bboxes.npy'.format(image_index)),
                        np.array(bbox_loc))
                    np.save(
                        join(save_path,
                             'region{}_labels.npy'.format(image_index)),
                        np.array(bbox_cls))
                    np.save(
                        join(save_path,
                             'region' + str(image_index) + '_k.npy'),
                        intrinsics_npy)
                    np.save(
                        join(save_path,
                             'region' + str(image_index) + '_rt.npy'),
                        extrinsics_npy)

                else:
                    print(
                        "\tFolder {} was skipped due to missing information.".
                        format(folder_path))

                scene_index += 1

        if scene_index % 100 == 0:
            print('\tProcessed {}/{} scenes from {}.'.format(
                scene_index, len(imgs), data_path))
Esempio n. 12
0
def reorg_fstructure(path):
    already_processed = []
    start_t = time.time()
    num_areas = len(listdir(path))
    area_id = 1
    for i in listdir(path):
        if isdir(join(path, i)):
            print(i)
            try:
                zip_ref = zipfile.ZipFile(
                    join(path, i, 'region_segmentations.zip'), 'r')
                zip_ref.extractall(join(path))
                zip_ref.close()
                remove(join(path, i, 'region_segmentations.zip'))
            except:
                continue
            rs = join(path, i, "region_segmentations")
            if isdir(rs):
                ri = 0
                region_left = True
                processed_path = join(path, i, "processed_regions")
                if not isdir(processed_path):
                    makedirs(processed_path)
                while region_left:
                    if not exists(join(rs, "region{}.ply".format(
                            str(ri)))) or i in already_processed:
                        region_left = False
                        print(
                            "Finished {}/{} Areas. Processed {} regions from area {}."
                            .format(area_id, num_areas, ri, i))
                        continue

                    try:
                        room_fname = join(rs, "region{}.ply".format(str(ri)))
                        label_fname = join(
                            rs, "region{}.semseg.json".format(str(ri)))
                        fsegs_fname = join(
                            rs, "region{}.fsegs.json".format(str(ri)))
                        res, bboxes, labels = ply_to_xyz(
                            room_fname, label_fname, fsegs_fname)

                        res_save = join(processed_path,
                                        "region{}.ply".format(str(ri)))
                        label_save = join(
                            processed_path,
                            "region{}_labels.npy".format(str(ri)))
                        bboxes_save = join(
                            processed_path,
                            "region{}_bboxes.npy".format(str(ri)))

                        write_ply(res_save, points=res)
                        np.save(label_save, labels)
                        np.save(bboxes_save, bboxes)
                    except:
                        print(
                            "Region {} for area {} threw an error and did not get processed."
                            .format(ri, i))
                        pass

                    ri += 1
        area_id += 1
    end_t = time.time()
    print("Total time: {} seconds.".format(end_t - start_t))