try: volume = np.load( 'sdf' + save_fold + '/sdf_{}_{}_{}.npy'.format(split, checkpoint, conditioned_ind)) except FileNotFoundError: volume = None if volume is None: DATA_PATH = 'data/ShapeNet' fields = {'inputs': dataset.PointCloudField('pointcloud.npz')} category = ['02958343'] test_dataset = dataset.ShapenetDataset( dataset_folder=DATA_PATH, fields=fields, categories=category, split=split, partial_input=partial_input, data_completeness=data_completeness, data_sparsity=data_sparsity, evaluation=True) # conditioned_input = test_dataset.__getitem__(conditioned_ind)['points'].unsqueeze(0) ds_kitti = dataset.KITTI360Dataset('data/KITTI-360/data_3d_pointcloud', 'train', 'building', evaluation=True) conditioned_input = ds_kitti.__getitem__( conditioned_ind)['points_tgt'].unsqueeze(0) print("object id:", conditioned_ind + 1, "sample points:", conditioned_input.shape[1])
net.to(device) num_params = sum(p.numel() for p in net.parameters() if p.requires_grad) print('The number of parameters of model is', num_params) # create dataloader # ShapeNet DATA_PATH = cfg['data']['path'] fields = { 'inputs': dataset.PointCloudField(cfg['data']['pointcloud_file']) } category = cfg['data']['classes'] shapenet_dataset = dataset.ShapenetDataset( dataset_folder=DATA_PATH, fields=fields, categories=category, split='train', with_normals=use_normal, points_batch=points_batch, partial_input=partial_input, data_completeness=data_completeness, data_sparsity=data_sparsity) shapenet_loader = torch.utils.data.DataLoader( shapenet_dataset, batch_size=batch_size_shapenet, num_workers=0, shuffle=True, drop_last=True, pin_memory=True) # KITTI kitti_dataset = dataset.KITTI360Dataset(cfg['data']['kitti_pcl_path'], 'train',