def sample_and_group(npoint, nsample, xyz, points): B, N, C = xyz.shape S = npoint fps_idx = farthest_point_sample(xyz, npoint) # [B, npoint] new_xyz = index_points(xyz, fps_idx) new_points = index_points(points, fps_idx) dists = square_distance(new_xyz, xyz) # B x npoint x N idx = dists.argsort()[:, :, :nsample] # B x npoint x K grouped_points = index_points(points, idx) grouped_points_norm = grouped_points - new_points.view(B, S, 1, -1) new_points = torch.cat([ grouped_points_norm, new_points.view(B, S, 1, -1).repeat(1, 1, nsample, 1) ], dim=-1) return new_xyz, new_points
def _get_item(self, index): if index in self.cache: point_set, cls = self.cache[index] else: fn = self.datapath[index] cls = self.classes[self.datapath[index][0]] cls = np.array([cls]).astype(np.int32) point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32) if self.uniform: point_set = farthest_point_sample(point_set, self.npoints) else: point_set = point_set[0:self.npoints,:] point_set[:, 0:3] = pc_normalize(point_set[:, 0:3]) if not self.normal_channel: point_set = point_set[:, 0:3] if len(self.cache) < self.cache_size: self.cache[index] = (point_set, cls) return point_set, cls