Esempio n. 1
0
def export_one_scan(scan_name, output_filename_prefix):
    mesh_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '_vh_clean_2.ply')
    agg_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '.aggregation.json')
    seg_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '_vh_clean_2.0.010000.segs.json')
    meta_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '.txt') # includes axisAlignment info for the train set scans.
    mesh_vertices, semantic_labels, instance_labels, instance_bboxes, instance2semantic = \
        export(mesh_file, agg_file, seg_file, meta_file, LABEL_MAP_FILE, None)

    mask = np.logical_not(np.in1d(semantic_labels, DONOTCARE_CLASS_IDS))
    mesh_vertices = mesh_vertices[mask,:]
    semantic_labels = semantic_labels[mask]
    instance_labels = instance_labels[mask]

    num_instances = len(np.unique(instance_labels))
    print('Num of instances: ', num_instances)

    bbox_mask = np.in1d(instance_bboxes[:,-1], OBJ_CLASS_IDS)
    instance_bboxes = instance_bboxes[bbox_mask,:]
    print('Num of care instances: ', instance_bboxes.shape[0])

    N = mesh_vertices.shape[0]
    if N > MAX_NUM_POINT:
        choices = np.random.choice(N, MAX_NUM_POINT, replace=False)
        mesh_vertices = mesh_vertices[choices, :]
        semantic_labels = semantic_labels[choices]
        instance_labels = instance_labels[choices]

    np.save(output_filename_prefix+'_vert.npy', mesh_vertices)
    np.save(output_filename_prefix+'_sem_label.npy', semantic_labels)
    np.save(output_filename_prefix+'_ins_label.npy', instance_labels)
    np.save(output_filename_prefix+'_bbox.npy', instance_bboxes)
def export_one_scan(scan_name,
                    output_filename_prefix,
                    max_num_point,
                    label_map_file,
                    scannet_dir,
                    test_mode=False):
    mesh_file = osp.join(scannet_dir, scan_name, scan_name + '_vh_clean_2.ply')
    agg_file = osp.join(scannet_dir, scan_name,
                        scan_name + '.aggregation.json')
    seg_file = osp.join(scannet_dir, scan_name,
                        scan_name + '_vh_clean_2.0.010000.segs.json')
    # includes axisAlignment info for the train set scans.
    meta_file = osp.join(scannet_dir, scan_name, f'{scan_name}.txt')
    mesh_vertices, semantic_labels, instance_labels, unaligned_bboxes, \
        aligned_bboxes, instance2semantic, axis_align_matrix = export(
            mesh_file, agg_file, seg_file, meta_file, label_map_file, None,
            test_mode)

    if not test_mode:
        mask = np.logical_not(np.in1d(semantic_labels, DONOTCARE_CLASS_IDS))
        mesh_vertices = mesh_vertices[mask, :]
        semantic_labels = semantic_labels[mask]
        instance_labels = instance_labels[mask]

        num_instances = len(np.unique(instance_labels))
        print(f'Num of instances: {num_instances}')

        bbox_mask = np.in1d(unaligned_bboxes[:, -1], OBJ_CLASS_IDS)
        unaligned_bboxes = unaligned_bboxes[bbox_mask, :]
        bbox_mask = np.in1d(aligned_bboxes[:, -1], OBJ_CLASS_IDS)
        aligned_bboxes = aligned_bboxes[bbox_mask, :]
        assert unaligned_bboxes.shape[0] == aligned_bboxes.shape[0]
        print(f'Num of care instances: {unaligned_bboxes.shape[0]}')

    if max_num_point is not None:
        max_num_point = int(max_num_point)
        N = mesh_vertices.shape[0]
        if N > max_num_point:
            choices = np.random.choice(N, max_num_point, replace=False)
            mesh_vertices = mesh_vertices[choices, :]
            if not test_mode:
                semantic_labels = semantic_labels[choices]
                instance_labels = instance_labels[choices]

    np.save(f'{output_filename_prefix}_vert.npy', mesh_vertices)
    if not test_mode:
        np.save(f'{output_filename_prefix}_sem_label.npy', semantic_labels)
        np.save(f'{output_filename_prefix}_ins_label.npy', instance_labels)
        np.save(f'{output_filename_prefix}_unaligned_bbox.npy',
                unaligned_bboxes)
        np.save(f'{output_filename_prefix}_aligned_bbox.npy', aligned_bboxes)
        np.save(f'{output_filename_prefix}_axis_align_matrix.npy',
                axis_align_matrix)
def export_one_scan(scan_name, output_filename_prefix):
    labels_file = os.path.join(SCANNET_DIR, scan_name,
                               scan_name + '_vh_clean_2.labels.ply')
    mesh_file = os.path.join(SCANNET_DIR, scan_name,
                             scan_name + '_vh_clean_2.ply')
    agg_file = os.path.join(SCANNET_DIR, scan_name,
                            scan_name + '.aggregation.json')
    seg_file = os.path.join(SCANNET_DIR, scan_name,
                            scan_name + '_vh_clean_2.0.010000.segs.json')
    coords, colors, sem_labels, instance_labels = export(
        scan_name, mesh_file, agg_file, seg_file, labels_file, LABEL_MAP_FILE,
        None)
    num_instances = len(np.unique(instance_labels))
    print('Num of instances: ', num_instances)

    print("Shape of points: {}".format(coords.shape))

    torch.save((coords, colors, sem_labels, instance_labels),
               output_filename_prefix + '_inst_nostuff.pth')
def export_one_scan(model, scan_name, output_filename_prefix):
    mesh_file = os.path.join(SCANNET_DIR, scan_name,
                             scan_name + '_vh_clean_2.ply')
    seg_file = os.path.join(SCANNET_DIR, scan_name,
                            scan_name + '_vh_clean_2.0.010000.segs.json')

    if (model == 'votenet'):

        meta_file = os.path.join(
            SCANNET_DIR, scan_name, scan_name +
            '.txt')  # includes axisAlignment info for the train set scans.
        agg_file = os.path.join(SCANNET_DIR, scan_name,
                                scan_name + '_vh_clean.aggregation.json')
        mesh_vertices, aligned_vertices, semantic_labels, instance_labels, instance_bboxes, aligned_instance_bboxes = export(
            mesh_file, agg_file, seg_file, meta_file, LABEL_MAP_FILE, None)

        mask = np.logical_not(np.in1d(semantic_labels, DONOTCARE_CLASS_IDS))
        mesh_vertices = mesh_vertices[mask, :]
        aligned_vertices = aligned_vertices[mask, :]
        semantic_labels = semantic_labels[mask]
        instance_labels = instance_labels[mask]

        if instance_bboxes.shape[0] > 1:
            num_instances = len(np.unique(instance_labels))
            print('Num of instances: ', num_instances)

            bbox_mask = np.in1d(instance_bboxes[:, -2],
                                OBJ_CLASS_IDS)  # match the mesh2cap
            instance_bboxes = instance_bboxes[bbox_mask, :]
            aligned_instance_bboxes = aligned_instance_bboxes[bbox_mask, :]
            print('Num of care instances: ', instance_bboxes.shape[0])
        else:
            print("No semantic/instance annotation for test scenes")

        N = mesh_vertices.shape[0]
        if N > MAX_NUM_POINT:
            choices = np.random.choice(N, MAX_NUM_POINT, replace=False)
            mesh_vertices = mesh_vertices[choices, :]
            aligned_vertices = aligned_vertices[choices, :]
            semantic_labels = semantic_labels[choices]
            instance_labels = instance_labels[choices]

        print("Shape of points: {}".format(mesh_vertices.shape))

        np.save(output_filename_prefix + '_vert.npy', mesh_vertices)
        np.save(output_filename_prefix + '_aligned_vert.npy', aligned_vertices)
        np.save(output_filename_prefix + '_sem_label.npy', semantic_labels)
        np.save(output_filename_prefix + '_ins_label.npy', instance_labels)
        np.save(output_filename_prefix + '_bbox.npy', instance_bboxes)
        np.save(output_filename_prefix + '_aligned_bbox.npy',
                aligned_instance_bboxes)

    elif (model == 'pointgroup'):

        TEST_SCAN_NAMES = sorted(
            [line.rstrip() for line in open('meta_data/scannetv2_test.txt')])

        if (scan_name in TEST_SCAN_NAMES):

            vertices = scannet_utils.read_mesh_vertices_rgb(mesh_file)
            coords = np.ascontiguousarray(vertices[:, :3] -
                                          vertices[:, :3].mean(0))
            colors = np.ascontiguousarray(vertices[:, 3:6]) / 127.5 - 1

            torch.save((coords, colors),
                       output_filename_prefix + '_pointgroup.pth')

        else:
            labels_file = os.path.join(SCANNET_DIR, scan_name,
                                       scan_name + '_vh_clean_2.labels.ply')
            agg_file = os.path.join(SCANNET_DIR, scan_name,
                                    scan_name + '.aggregation.json')

            vertices = scannet_utils.read_mesh_vertices_rgb(mesh_file)
            coords = np.ascontiguousarray(vertices[:, :3] -
                                          vertices[:, :3].mean(0))
            colors = np.ascontiguousarray(vertices[:, 3:6]) / 127.5 - 1

            sem_labels = scannet_utils.get_labels(labels_file, OBJ_CLASS_IDS)

            segid_to_pointid, _ = read_segmentation(seg_file)

            instance_segids = scannet_utils.get_instance_segids(
                scan_name, agg_file)

            instance_labels = np.ones(sem_labels.shape[0]) * -100
            for i in range(len(instance_segids)):
                segids = instance_segids[i]
                pointids = []
                for segid in segids:
                    pointids += segid_to_pointid[segid]
                instance_labels[pointids] = i
                assert (len(np.unique(sem_labels[pointids])) == 1)
            N = len(sem_labels)
            if N > MAX_NUM_POINT:
                choices = np.random.choice(N, MAX_NUM_POINT, replace=False)
                coords = coords[choices, :]
                colors = colors[choices, :]
                sem_labels = sem_labels[choices]
                instance_labels = instance_labels[choices]
            torch.save((coords, colors, sem_labels, instance_labels),
                       output_filename_prefix + '_pointgroup.pth')
def export_one_scan(scan_name, output_filename_prefix):    
    mesh_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '_vh_clean_2.ply')
    print("###### mesh_file", mesh_file)
    agg_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '_vh_clean.aggregation.json')
    seg_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '_vh_clean_2.0.010000.segs.json')
    meta_file = os.path.join(SCANNET_DIR, scan_name, scan_name + '.txt') # includes axisAlignment info for the train set scans.   
    mesh_vertices, aligned_vertices, semantic_labels, instance_labels, instance_bboxes, aligned_instance_bboxes = export(mesh_file, agg_file, seg_file, meta_file, LABEL_MAP_FILE, None)

    mask = np.logical_not(np.in1d(semantic_labels, DONOTCARE_CLASS_IDS))
    mesh_vertices = mesh_vertices[mask,:]
    aligned_vertices = aligned_vertices[mask,:]
    semantic_labels = semantic_labels[mask]
    instance_labels = instance_labels[mask]

    if instance_bboxes.shape[0] > 1:
        num_instances = len(np.unique(instance_labels))
        print('Num of instances: ', num_instances)

        # bbox_mask = np.in1d(instance_bboxes[:,-1], OBJ_CLASS_IDS)
        bbox_mask = np.in1d(instance_bboxes[:,-2], OBJ_CLASS_IDS) # match the mesh2cap
        instance_bboxes = instance_bboxes[bbox_mask,:]
        aligned_instance_bboxes = aligned_instance_bboxes[bbox_mask,:]
        print('Num of care instances: ', instance_bboxes.shape[0])
    else:
        print("No semantic/instance annotation for test scenes")

    N = mesh_vertices.shape[0]
    if N > MAX_NUM_POINT:
        choices = np.random.choice(N, MAX_NUM_POINT, replace=False)
        mesh_vertices = mesh_vertices[choices, :]
        aligned_vertices = aligned_vertices[choices, :]
        semantic_labels = semantic_labels[choices]
        instance_labels = instance_labels[choices]

    print("Shape of points: {}".format(mesh_vertices.shape))

    np.save(output_filename_prefix+'_vert.npy', mesh_vertices)
    np.save(output_filename_prefix+'_aligned_vert.npy', aligned_vertices)
    np.save(output_filename_prefix+'_sem_label.npy', semantic_labels)
    np.save(output_filename_prefix+'_ins_label.npy', instance_labels)
    np.save(output_filename_prefix+'_bbox.npy', instance_bboxes)
    np.save(output_filename_prefix+'_aligned_bbox.npy', aligned_instance_bboxes)