def __getitem__(self, index):
        if self.split == 'test':
            raw_n = self.rng.randint(0, 7)
            raw_pc_name = self.shape_names[index] + "__{}__.ply".format(raw_n)
            raw_ply_path = os.path.join(self.cat_pc_raw_root, raw_pc_name)
            raw_pc = np.array(trimesh.load(raw_ply_path).vertices)
            raw_pc = self._rotate_point_cloud_by_axis_angle(raw_pc)
            raw_pc = sample_point_cloud_by_n(raw_pc, 1024)
            raw_pc = torch.tensor(raw_pc, dtype=torch.float32)

            # process existing complete shapes
            real_shape_name = self.shape_names[index]
            real_ply_path = os.path.join(self.cat_pc_root,
                                         real_shape_name + '.ply')
            real_pc = np.array(trimesh.load(real_ply_path).vertices)
            real_pc = sample_point_cloud_by_n(real_pc, 2048)
            real_pc = torch.tensor(real_pc, dtype=torch.float32)

            return raw_pc, 0, real_pc, real_shape_name
        else:
            pc_filename = self.shape_names[index // self.num_samples]
            existing = load_ply(
                join(self.root_dir, 'slices', 'existing', self.cat,
                     str(index % self.num_samples) + '~' + pc_filename))
            missing = load_ply(
                join(self.root_dir, 'slices', 'missing', self.cat,
                     str(index % self.num_samples) + '~' + pc_filename))
            gt = load_ply(
                join(self.root_dir, 'slices', 'gt', self.cat, pc_filename))
            return existing, missing, gt, pc_filename[:-4]
def main(config):
    dataset_dir = config['dataset']['path']

    with open(join(dataset_dir, 'test.list')) as file:
        pc_paths = [line.strip() + '.ply' for line in file]

    plane_points = np.zeros((3, 3))
    plane_points[1][2] = 1
    plane_points[2][0] = 1

    for cat in [
            '02691156', '02933112', '02958343', '03001627', '03636649',
            '04256520', '04379243', '04530566'
    ]:
        makedirs(join(dataset_dir, 'test_gen', 'left', cat), exist_ok=True)
        makedirs(join(dataset_dir, 'test_gen', 'right', cat), exist_ok=True)
        makedirs(join(dataset_dir, 'test_gen', 'gt', cat), exist_ok=True)

    div_left_right_min_y(dataset_dir, pc_paths)

    not_existed_pc = []

    for pc_path in pc_paths:
        if not (exists(join(dataset_dir, 'test_gen', 'left', pc_path))
                and exists(join(dataset_dir, 'test_gen', 'left', pc_path))):
            not_existed_pc.append(pc_path)

    # div_left_right_bin_search(dataset_dir, plane_points, not_existed_pc)

    not_1024 = []
    for pc_path in pc_paths:
        if load_ply(join(dataset_dir, 'test_gen', 'left',
                         pc_path)).shape[0] != 1024:
            not_1024.append(pc_path)
def div_left_right_min_y(dataset_dir, pc_paths):
    for i, pc_path in tqdm(enumerate(pc_paths), total=len(pc_paths)):
        pc = load_ply(join(dataset_dir, pc_path))

        right_points = pc[pc.T[1].argsort()[1024:]]
        left_points = pc[pc.T[1].argsort()[:1024]]

        quick_save_ply_file(left_points,
                            join(dataset_dir, 'test_gen', 'left', pc_path))
        quick_save_ply_file(right_points,
                            join(dataset_dir, 'test_gen', 'right', pc_path))
        quick_save_ply_file(pc, join(dataset_dir, 'test_gen', 'gt', pc_path))
def div_left_right_bin_search(dataset_dir, init_plane_points, pc_paths):
    for i, pc_path in tqdm(enumerate(pc_paths), total=len(pc_paths)):

        pc = load_ply(join(dataset_dir, pc_path))

        points = init_plane_points.copy()

        l, r = pc.T[1].min(), pc.T[1].max()

        counter = 0

        while True:

            m = np.divide(l + r, 2)

            points[0][1] = m
            points[1][1] = m
            points[2][1] = m

            right = HyperPlane.get_plane_from_3_points(points).check_point(
                pc) > 0
            right_points = pc[right]
            left_points = pc[~right]

            counter += 1
            if counter == 100000000:
                quick_save_ply_file(
                    right_points,
                    join(dataset_dir, 'test_gen', 'right', pc_path))
                quick_save_ply_file(
                    left_points, join(dataset_dir, 'test_gen', 'left',
                                      pc_path))
                quick_save_ply_file(
                    pc, join(dataset_dir, 'test_gen', 'gt', pc_path))
                break

            if len(left_points) > len(right_points):
                l = m
            elif len(left_points) < len(right_points):
                r = m
            else:
                quick_save_ply_file(
                    left_points, join(dataset_dir, 'test_gen', 'left',
                                      pc_path))
                quick_save_ply_file(
                    right_points,
                    join(dataset_dir, 'test_gen', 'right', pc_path))
                quick_save_ply_file(
                    pc, join(dataset_dir, 'test_gen', 'gt', pc_path))
                break
def generate_one_shapenet(category: str,
                          filename: str,
                          dataset_path: str,
                          num_samples: int = 4):
    pc_filepath = join(dataset_path, category, filename)
    points = load_ply(pc_filepath)

    for i in range(num_samples):
        existing, missing = SlicedDatasetGenerator.generate_item(points)
        quick_save_ply_file(
            existing,
            join(dataset_path, 'slices', 'existing', category,
                 str(i) + '~' + filename))
        quick_save_ply_file(
            missing,
            join(dataset_path, 'slices', 'missing', category,
                 str(i) + '~' + filename))
示例#6
0
    def __getitem__(self, idx):
        if self.split == 'train':
            pc_names = self.point_clouds_names_train
        elif self.split == 'valid':
            pc_names = self.point_clouds_names_valid
        elif self.split == 'test':
            pc_names = self.point_clouds_names_test
        else:
            raise ValueError('Invalid split. Should be train, valid or test.')

        pc_category, pc_filename = pc_names.iloc[idx].values

        pc_filepath = join(self.root_dir, pc_category, pc_filename)
        sample = load_ply(pc_filepath)

        if self.transform:
            sample = self.transform(sample)
        return sample, self.synth_id_to_number[pc_category]
示例#7
0
    def __getitem__(self, idx):
        if self.use_pcn_model_list:
            pc_category, pc_filename = self.point_clouds_names[
                idx // self.num_samples].split('/')
            pc_filename += '.ply'
        else:
            pc_category, pc_filename = self.point_clouds_names.iloc[
                idx // self.num_samples].values

        if self.is_random_rotated:
            from scipy.spatial.transform import Rotation
            random_rotation_matrix = Rotation.from_euler(
                'z', np.random.randint(360),
                degrees=True).as_matrix().astype(np.float32)

        scan_idx = str(idx % self.num_samples)

        if self.is_gen and self.split == 'test':
            existing = resample_pcd(
                load_ply(
                    join(self.root_dir, 'test_gen', 'right', pc_category,
                         pc_filename)), 1024)
            missing = resample_pcd(
                load_ply(
                    join(self.root_dir, 'test_gen', 'left', pc_category,
                         pc_filename)), 1024)
            gt = load_ply(
                join(self.root_dir, 'test_gen', 'gt', pc_category,
                     pc_filename))
        else:
            existing = load_ply(
                join(self.root_dir, 'slices', 'existing', pc_category,
                     scan_idx + '~' + pc_filename))
            missing = load_ply(
                join(self.root_dir, 'slices', 'missing', pc_category,
                     scan_idx + '~' + pc_filename))
            gt = load_ply(join(self.root_dir, pc_category, pc_filename))

        if self.is_random_rotated:
            existing = existing @ random_rotation_matrix
            missing = missing @ random_rotation_matrix
            gt = gt @ random_rotation_matrix

        return existing, missing, gt, synth_id_to_number[pc_category]