Example #1
0
    def __getitem__(self, index):

        sid = index // self.scene_size
        pid = index % self.scene_size
        src, tgt, T = self._load_instance(sid, pid)

        #################### Augmentation ###################
        # hard rotation augmentation
        if self.mode == 'train' and not self.opt.no_augmentation:
            if self.opt.model.kpconv:
                tgt, R_aug = pctk.rotate_point_cloud(tgt, max_degree=60)
            else:
                tgt, R_aug = pctk.rotate_point_cloud(tgt)

        _, src = pctk.uniform_resample_np(src, self.opt.model.input_num)
        _, tgt = pctk.uniform_resample_np(tgt, self.opt.model.input_num)
        ##########################################################

        if self.opt.model.flag == 'rotation':
            src = pctk.normalize_pc_np(src)
            tgt = pctk.normalize_pc_np(tgt)
            R, R_label = label_relative_rotation_np(self.anchors, T)
            data = {'src': torch.from_numpy(src.astype(np.float32)),\
                'tgt': torch.from_numpy(tgt.astype(np.float32)),\
                'T': torch.from_numpy(T.astype(np.float32)),\
                'R': torch.from_numpy(R.astype(np.float32)),\
                'R_label': torch.Tensor([R_label]).long()}
        else:
            data = {'src': torch.from_numpy(src.astype(np.float32)),\
                'tgt': torch.from_numpy(tgt.astype(np.float32)),\
                'T': torch.from_numpy(T.astype(np.float32)),\
                'fn': "None"}

        return data
def radius_ball_search_np_radii(pc,
                                kpt_indices,
                                radii,
                                search_radius,
                                input_num=None,
                                msg=None):
    if msg is not None:
        print(msg)

    # radius-ball search
    keypoints = pc[kpt_indices]
    if pc.shape[0] > 50000:
        _, pc = pctk.uniform_resample_np(pc, 50000)

    search = KDTree(pc)

    all_pc = []
    for idx, kpt in enumerate(kpt_indices):
        r = search_radius * radii[kpt] / 0.026
        indices = search.query_ball_point(keypoints[idx], r)
        if len(indices) <= 1:
            i = 1024 if input_num is None else input_num
            all_pc.append(np.zeros([i, 3], dtype=np.float32))
        else:
            if input_num is not None:
                _, patch = pctk.uniform_resample_np(pc[indices], input_num)
            else:
                patch = pc[indices]
            all_pc.append(patch)

    return all_pc
    def next_batch(self):
        buf = self.current_grouped_points if self.grouped else self.current_kpts
        if self.scene_pt >= self.datasize:
            return False
        if self.batch_pt + self.batch_size >= buf.shape[0]:
            kpts = buf[self.batch_pt:]
        else:
            kpts = buf[self.batch_pt: self.batch_pt+self.batch_size]
        if self.grouped:
            grouped_points = kpts
            if grouped_points.shape[1] != self.knn:
                resampled_points = []
                for pc in grouped_points:
                    _, pc_down = pctk.uniform_resample_np(pc, self.knn)
                    resampled_points.append(pc_down)
                grouped_points = np.array(resampled_points)
            # print(grouped_points.shape)
        else:
            grouped_indices = self.ball_search(self.current_scene, kpts, self.knn, self.search_radius)
            # # BxNx3
            grouped_points = self.current_scene[grouped_indices]

        self.batch_data = grouped_points
        self.batch_pt += self.batch_size
        if self.batch_pt >= buf.shape[0]:
            self.reload()
        return True
def radius_ball_search_o3d(pcd, kpt, search_radius, voxel_size=0.015, return_normals=False, input_num=None, name=None):
    # radius-ball search

    normals_at_kpt = None
    from_o3d = lambda pcd: np.asarray(pcd.points)
    keypoints = from_o3d(pcd)[kpt]
    pcd_down = pcd.voxel_down_sample(voxel_size=voxel_size) #0.015
    # pcd_down = pcd
    pc = from_o3d(pcd_down)

    if return_normals:
        if len(pcd.normals) != len(pcd.points):
                raise RuntimeError('[!] The point cloud needs normals.')
        normals_at_kpt = np.asarray(pcd.normals)[kpt]

    search = KDTree(pc)
    results = search.query_ball_point(keypoints, search_radius)
    all_pc = []
    for indices in results:
        # print(len(indices))
        if len(indices) <= 1:
            i = 1024 if input_num is None else input_num
            all_pc.append(np.zeros([i,3],dtype=np.float32))
        else:
            if input_num is not None:
                resample_indices, patch = pctk.uniform_resample_np(pc[indices], input_num)
            else:
                patch = pc[indices]
            all_pc.append(patch)
    if return_normals:
        all_pc = transform_with_normals(all_pc, normals_at_kpt)

    return all_pc, pcd_down, normals_at_kpt
Example #5
0
    def __getitem__(self, index):
        data = sio.loadmat(self.all_data[index])
    
        if self.mode == 'train':
            _, pc = pctk.uniform_resample_np(data['pc'], self.opt.model.input_num)
        else:
            pc = data['pc']
    
        pc = p3dtk.normalize_np(pc.T)
        pc = pc.T

        R = np.eye(3)
        R_label = 29

        if not self.opt.no_augmentation:
            if 'R' in data.keys() and self.mode != 'train':
                pc, R = pctk.rotate_point_cloud(pc, data['R'])
            else:
                pc, R = pctk.rotate_point_cloud(pc)

            _, R_label, R0 = rotation_distance_np(R, self.anchors)

            if self.flag == 'rotation':
                R = R0

        return {'pc':torch.from_numpy(pc.astype(np.float32)),
                'label':torch.from_numpy(data['label'].flatten()).long(),
                'fn': data['name'][0],
                'R': R,
                'R_label': torch.Tensor([R_label]).long(),
               }
 def _preprocess(self, pc, R_aug=None, n=None):
     idx, pc = pctk.uniform_resample_np(pc, self.input_num)
     if n is not None:
         n = n[idx]
     if R_aug is not None:
         # rotational augmentation
         pc, _ = pctk.rotate_point_cloud(pc, R_aug)
         if n is not None:
             n, _ = pctk.rotate_point_cloud(n, R_aug)
     if n is not None:
         pc = np.concatenate([pc,n],axis=1)
     return pc
Example #7
0
    def __getitem__(self, index):
        # data = sio.loadmat(self.all_data[index])
        data = np.load(self.all_data[index])
        # _, pc = pctk.uniform_resample_np(data['pc'], self.opt.model.input_num)

        # [10000, 3], the dimension should be correct according to the comments
        _, pc = pctk.uniform_resample_np(data['points'], self.opt.model.input_num)

        # normalization
        pc = p3dtk.normalize_np(pc.T)
        pc = pc.T

        # R = np.eye(3)
        # R_label = 29

        # source shape
        # if 'R' in data.keys() and self.mode != 'train':
        #     pc_src, R_src = pctk.rotate_point_cloud(pc, data['R'])
        # else:
        #     pc_src, R_src = pctk.rotate_point_cloud(pc)

        # Rotated point cloud, rotation matrix
        pc_src, R_src = pctk.rotate_point_cloud(pc)
        # target shape

        # pc_tgt, R_tgt = pctk.rotate_point_cloud(pc)
        pc_tgt = pc

        # if self.mode == 'test':
        #     data['R'] = R
        #     output_path = os.path.join(self.dataset_path, data['cat'][0], 'testR')
        #     os.makedirs(output_path,exist_ok=True)
        #     sio.savemat(os.path.join(output_path, data['name'][0] + '.mat'), data)
        # _, R_label, R0 = rotation_distance_np(R, self.anchors)

        # T = R_src @ R_tgt.T
        T = R_src # @ R_tgt.T

        # RR_regress = np.einsum('abc,bj,ijk -> aick', self.anchors, T, self.anchors)
        # R_label = np.argmax(np.einsum('abii->ab', RR_regress),axis=1)
        # idxs = np.vstack([np.arange(R_label.shape[0]), R_label]).T
        # R = RR_regress[idxs[:,0], idxs[:,1]]
        R, R_label = label_relative_rotation_np(self.anchors, T)
        pc_tensor = np.stack([pc_src, pc_tgt])

        return {'pc':torch.from_numpy(pc_tensor.astype(np.float32)),
                # 'fn': data['name'][0],
                'T' : torch.from_numpy(T.astype(np.float32)),
                'R': torch.from_numpy(R.astype(np.float32)),
                'R_label': torch.Tensor([R_label]).long(),
               }
def radius_ball_search_np(pc, kpt, search_radius, input_num=None, log=None):
    if log is not None:
        print(log)

    # radius-ball search
    keypoints = pc[kpt]
    maxpoints = 50000
    if pc.shape[0] > maxpoints:
        _, pc = pctk.uniform_resample_np(pc, maxpoints)

    search = KDTree(pc)
    results = search.query_ball_point(keypoints, search_radius)
    all_pc = []
    for indices in results:
        if len(indices) <= 1:
            i = 1024 if input_num is None else input_num
            all_pc.append(np.zeros([i,3],dtype=np.float32))
        else:
            if input_num is not None:
                _, patch = pctk.uniform_resample_np(pc[indices], input_num)
            else:
                patch = pc[indices]
            all_pc.append(patch)
    return all_pc
Example #9
0
    def __getitem__(self, index):
        # data = sio.loadmat(self.all_data[index])
        data = np.load(self.all_data[index])
        # _, pc = pctk.uniform_resample_np(data['pc'], self.opt.model.input_num)


        # [10000, 3], the dimension should be correct according to the comments
        _, pc = pctk.uniform_resample_np(data['points'], self.opt.model.input_num)

        pc = p3dtk.normalize_np(pc.T)
        pc = pc.T

        R = np.eye(3)
        R_label = 29

        if not self.opt.no_augmentation:

            # ###################### HACK #########################
            # ridx = np.random.randint(0, high=self.anchors.shape[0])
            # R = self.anchors[ridx]
            # pcR, _ = pctk.rotate_point_cloud(pc, R)
            #######################################################

            if 'R' in data.keys() and self.mode != 'train':
                pc, R = pctk.rotate_point_cloud(pc, data['R'])
            else:
                pc, R = pctk.rotate_point_cloud(pc)

            # if self.mode == 'test':
            #     data['R'] = R
            #     output_path = os.path.join(self.dataset_path, data['cat'][0], 'testR')
            #     os.makedirs(output_path,exist_ok=True)
            #     sio.savemat(os.path.join(output_path, data['name'][0] + '.mat'), data)

            _, R_label, R0 = rotation_distance_np(R, self.anchors)

            if self.flag == 'rotation':
                R = R0

        return {'pc':torch.from_numpy(pc.astype(np.float32)),
                'label':torch.from_numpy(data['label'].flatten()).long(),
                # 'fn': data['name'][0],
                'R': R,
                'R_label': torch.Tensor([R_label]).long(),
               }
Example #10
0
 def next_batch(self):
     buf = self.current_grouped_points if self.grouped else self.current_kpts
     if self.scene_pt >= self.datasize:
         return False
     if self.batch_pt + self.batch_size >= buf.shape[0]:
         kpts = buf[self.batch_pt:]
     else:
         kpts = buf[self.batch_pt:self.batch_pt + self.batch_size]
     if self.grouped:
         grouped_points = kpts
         if grouped_points.shape[1] != self.knn:
             resampled_points = []
             for pc in grouped_points:
                 _, pc_down = pctk.uniform_resample_np(pc, self.knn)
                 resampled_points.append(pc_down)
             grouped_points = np.array(resampled_points)
         # print(grouped_points.shape)
     else:
         grouped_indices = self.ball_search(self.current_scene, kpts,
                                            self.knn, self.search_radius)
         # # BxNx3
         grouped_points = self.current_scene[grouped_indices]
     # if self.opt.normalize_input:
     #     grouped_points = proc.normalize_pcbatch_np(grouped_points)
     #########################
     # batch_data = np.zeros_like(grouped_points)
     # for i in range(grouped_points.shape[0]):
     #     rotated_points, _ = pctk.rotate_point_cloud(grouped_points[i])
     #     batch_data[i] = rotated_points
     # grouped_points = batch_data
     ########################
     self.batch_data = grouped_points
     self.batch_pt += self.batch_size
     if self.batch_pt >= buf.shape[0]:
         self.reload()
     return True
 def _process(self, pc):
     if pc.shape[0] != self.input_num:
         _, pc = pctk.uniform_resample_np(pc, self.input_num)
     return pc
 def _preprocess(self, pc, n=None):
     idx, pc = pctk.uniform_resample_np(pc, self.input_num)
     if n is not None:
         n = n[idx]
         pc = np.concatenate([pc,n],axis=1)
     return pc