def prepare_input(self, index): # read xyz, normal, color from the ply file vertices_path = os.path.join( self.data_root, self.ims[index].replace("ImageSequence", "vertices")[:-4] + ".npy") xyz = np.load(vertices_path).astype(np.float32) nxyz = np.zeros_like(xyz).astype(np.float32) # obtain the original bounds for point sampling min_xyz = np.min(xyz, axis=0) max_xyz = np.max(xyz, axis=0) if cfg.big_box: min_xyz -= 0.05 max_xyz += 0.05 else: min_xyz[2] -= 0.05 max_xyz[2] += 0.05 can_bounds = np.stack([min_xyz, max_xyz], axis=0) # transform smpl from the world coordinate to the smpl coordinate params_path = os.path.join( self.data_root, self.ims[index].replace("ImageSequence", "smpl")[:-4] + ".npy") params = np.load(params_path, allow_pickle=True).item() Rh = params['Rh'] R = cv2.Rodrigues(Rh)[0].astype(np.float32) Th = params['Th'].astype(np.float32) xyz = np.dot(xyz - Th, R) # transformation augmentation xyz, center, rot, trans = if_nerf_dutils.transform_can_smpl(xyz) # obtain the bounds for coord construction min_xyz = np.min(xyz, axis=0) max_xyz = np.max(xyz, axis=0) if cfg.big_box: min_xyz -= 0.05 max_xyz += 0.05 else: min_xyz[2] -= 0.05 max_xyz[2] += 0.05 bounds = np.stack([min_xyz, max_xyz], axis=0) cxyz = xyz.astype(np.float32) nxyz = nxyz.astype(np.float32) feature = np.concatenate([cxyz, nxyz], axis=1).astype(np.float32) # construct the coordinate dhw = xyz[:, [2, 1, 0]] min_dhw = min_xyz[[2, 1, 0]] max_dhw = max_xyz[[2, 1, 0]] voxel_size = np.array(cfg.voxel_size) coord = np.round((dhw - min_dhw) / voxel_size).astype(np.int32) # construct the output shape out_sh = np.ceil((max_dhw - min_dhw) / voxel_size).astype(np.int32) x = 32 out_sh = (out_sh | (x - 1)) + 1 return feature, coord, out_sh, can_bounds, bounds, Rh, Th, center, rot, trans
def prepare_input(self, i): # read xyz, normal, color from the npy file vertices_path = os.path.join(self.data_root, 'vertices', '{}.npy'.format(i)) xyz = np.load(vertices_path).astype(np.float32) nxyz = np.zeros_like(xyz).astype(np.float32) # obtain the original bounds for point sampling min_xyz = np.min(xyz, axis=0) max_xyz = np.max(xyz, axis=0) min_xyz[1] -= 0.1 max_xyz[1] += 0.1 can_bounds = np.stack([min_xyz, max_xyz], axis=0) # transform smpl from the world coordinate to the smpl coordinate Rh = self.params['pose'][i][:3] R = cv2.Rodrigues(Rh)[0].astype(np.float32) Th = self.params['trans'][i].astype(np.float32) xyz = np.dot(xyz - Th, R) # transformation augmentation xyz, center, rot, trans = if_nerf_dutils.transform_can_smpl(xyz) # obtain the bounds for coord construction min_xyz = np.min(xyz, axis=0) max_xyz = np.max(xyz, axis=0) min_xyz[1] -= 0.1 max_xyz[1] += 0.1 bounds = np.stack([min_xyz, max_xyz], axis=0) cxyz = xyz.astype(np.float32) nxyz = nxyz.astype(np.float32) feature = np.concatenate([cxyz, nxyz], axis=1).astype(np.float32) # construct the coordinate dhw = xyz[:, [2, 1, 0]] min_dhw = min_xyz[[2, 1, 0]] max_dhw = max_xyz[[2, 1, 0]] voxel_size = np.array(cfg.voxel_size) coord = np.round((dhw - min_dhw) / voxel_size).astype(np.int32) # construct the output shape out_sh = np.ceil((max_dhw - min_dhw) / voxel_size).astype(np.int32) x = 32 out_sh = (out_sh | (x - 1)) + 1 return feature, coord, out_sh, can_bounds, bounds, Rh, Th, center, rot, trans
def prepare_input(self, i): # read xyz, normal, color from the ply file vertices_path = os.path.join(self.data_root, cfg.vertices, "{}.npy".format(i)) xyz = np.load(vertices_path).astype(np.float32) # (6890, 3) nxyz = np.zeros_like(xyz).astype(np.float32) # (6890, 3) all zeros # obtain the original bounds for point sampling min_xyz = np.min(xyz, axis=0) # (1, 3) max_xyz = np.max(xyz, axis=0) # (1, 3) if cfg.big_box: min_xyz -= 0.05 max_xyz += 0.05 else: min_xyz[2] -= 0.05 max_xyz[2] += 0.05 can_bounds = np.stack([min_xyz, max_xyz], axis=0) # (2, 3) # transform smpl from the world coordinate to the smpl coordinate params_path = os.path.join(self.data_root, cfg.params, "{}.npy".format(i)) params = np.load(params_path, allow_pickle=True).item() Rh = params["Rh"] # (1, 3) # Converts a rotation matrix to a rotation vector or vice versa. # here convert rotation vector to a rotation matrix. R = cv2.Rodrigues(Rh)[0].astype(np.float32) # (3, 3) Th = params["Th"].astype(np.float32) # (1, 3) xyz = np.dot(xyz - Th, R) # (6890, 3) # transform the smpl vertices from the world coordinate to the smple coordinate # transformation augmentation xyz, center, rot, trans = if_nerf_dutils.transform_can_smpl(xyz) # # obtain the bounds for coord construction # obtain the bounding box after the transformation augmentation min_xyz = np.min(xyz, axis=0) max_xyz = np.max(xyz, axis=0) if cfg.big_box: min_xyz -= 0.05 max_xyz += 0.05 else: min_xyz[2] -= 0.05 max_xyz[2] += 0.05 bounds = np.stack([min_xyz, max_xyz], axis=0) cxyz = xyz.astype(np.float32) # (6890,3) nxyz = nxyz.astype(np.float32) # (6890,3) all zeros feature = np.concatenate([cxyz, nxyz], axis=1).astype(np.float32) # (6890, 6) # construct the coordinate dhw = xyz[:, [2, 1, 0]] min_dhw = min_xyz[[2, 1, 0]] max_dhw = max_xyz[[2, 1, 0]] voxel_size = np.array(cfg.voxel_size) coord = np.round((dhw - min_dhw) / voxel_size).astype(np.int32) # voxelized smpl model coordinates # construct the output shape out_sh = np.ceil((max_dhw - min_dhw) / voxel_size).astype(np.int32) x = 32 out_sh = (out_sh | (x - 1)) + 1 return feature, coord, out_sh, can_bounds, bounds, Rh, Th, center, rot, trans