def determinize(self, local_rank=0): # Get the determinized SPVNAS network by running dummy inference. self.eval() sample_feat = torch.randn(1000, 4) sample_coord = torch.randn(1000, 4).random_(997) sample_coord[:, -1] = 0 #x = SparseTensor(sample_feat, # sample_coord.int()).to('cuda:%d' % local_rank) if torch.cuda.is_available(): x = SparseTensor(sample_feat, sample_coord.int()).to('cuda:%d' % local_rank) else: x = SparseTensor(sample_feat, sample_coord.int()) with torch.no_grad(): x = self.forward(x) model = copy.deepcopy(self) queue = deque([model]) while queue: x = queue.popleft() for name, module in x._modules.items(): while isinstance(module, RandomModule): module = x._modules[name] = module.determinize() queue.append(module) return model
def point_to_voxel(x, z): if (z.additional_features is None or z.additional_features.get("idx_query") is None or z.additional_features["idx_query"].get(x.s) is None): # pc_hash = hash_gpu(torch.floor(z.C).int()) pc_hash = spf.sphash( torch.cat([ torch.floor(z.C[:, :3] / x.s).int() * x.s, z.C[:, -1].int().view(-1, 1) ], 1)) sparse_hash = spf.sphash(x.C) idx_query = spf.sphashquery(pc_hash, sparse_hash) counts = spf.spcount(idx_query.int(), x.C.shape[0]) z.additional_features["idx_query"][x.s] = idx_query z.additional_features["counts"][x.s] = counts else: idx_query = z.additional_features["idx_query"][x.s] counts = z.additional_features["counts"][x.s] inserted_feat = spf.spvoxelize(z.F, idx_query, counts) new_tensor = SparseTensor(inserted_feat, x.C, x.s) new_tensor.coord_maps = x.coord_maps new_tensor.kernel_maps = x.kernel_maps return new_tensor
def spcrop(self, inputs): features = inputs.F coords = inputs.C cur_stride = inputs.s valid_flag = ((coords[:, :3] >= loc_min) & (coords[:, :3] < loc_max)).all(-1) output_coords = coords[valid_flag] output_features = features[valid_flag] return SparseTensor(output_features, output_coords, cur_stride)
def initial_voxelize(z, init_res, after_res): new_float_coord = torch.cat( [(z.C[:, :3] * init_res) / after_res, z.C[:, -1].view(-1, 1)], 1) pc_hash = spf.sphash(torch.floor(new_float_coord).int()) sparse_hash = torch.unique(pc_hash) idx_query = spf.sphashquery(pc_hash, sparse_hash) counts = spf.spcount(idx_query.int(), len(sparse_hash)) inserted_coords = spf.spvoxelize(torch.floor(new_float_coord), idx_query, counts) inserted_coords = torch.round(inserted_coords).int() inserted_feat = spf.spvoxelize(z.F, idx_query, counts) new_tensor = SparseTensor(inserted_feat, inserted_coords, 1) new_tensor.check() z.additional_features['idx_query'][1] = idx_query z.additional_features['counts'][1] = counts z.C = new_float_coord return new_tensor
def spcrop(inputs: SparseTensor, loc_min, loc_max) -> SparseTensor: coords, feats, stride = inputs.C, inputs.F, inputs.s mask = ((coords[:, :3] >= loc_min) & (coords[:, :3] < loc_max)).all(-1) coords, feats = coords[mask], feats[mask] return SparseTensor(coords=coords, feats=feats, stride=stride)