def test(model, device, test_loader): save_test_loss = [] save_correct = [] model.eval() y_mask = test_loader.dataset.y_mask ious = [[] for _ in range(len(test_loader.dataset.categories))] with torch.no_grad(): for _, data in enumerate(test_loader): data = data.to(device) output = model(data) # get the index of the max log-probability pred = output.argmax(dim=1) i, u = i_and_u(pred, data.y, test_loader.dataset.num_classes, data.batch) iou = i.cpu().to(torch.float) / u.cpu().to(torch.float) iou[torch.isnan(iou)] = 1 # Find and filter the relevant classes for each category. for iou, category in zip(iou.unbind(), data.category.unbind()): ious[category.item()].append(iou[y_mask[category]]) # Compute mean IoU. ious = [torch.stack(iou).mean(0).mean(0) for iou in ious] save_test_loss.append(torch.tensor(ious).mean().item()) save_correct.append(ious)
def test(model, test_loader, device): model.eval() correct_nodes = total_nodes = 0 intersections, unions, categories = [], [], [] for data in test_loader: data = data.to(device) with torch.no_grad(): out = model(data) pred = out.max(dim=1)[1] correct_nodes += pred.eq(data.y).sum().item() total_nodes += data.num_nodes i, u = i_and_u(pred, data.y, test_dataset.num_classes, data.batch) intersections.append(i.to(torch.device('cpu'))) unions.append(u.to(torch.device('cpu'))) categories.append(data.category.to(torch.device('cpu'))) category = torch.cat(categories, dim=0) intersection = torch.cat(intersections, dim=0) union = torch.cat(unions, dim=0) ious = [[] for _ in range(len(test_loader.dataset.categories))] for j in range(len(test_loader.dataset)): i = intersection[j, test_loader.dataset.y_mask[category[j]]] u = union[j, test_loader.dataset.y_mask[category[j]]] iou = i.to(torch.float) / u.to(torch.float) iou[torch.isnan(iou)] = 1 ious[category[j]].append(iou.mean().item()) for cat in range(len(test_loader.dataset.categories)): ious[cat] = torch.tensor(ious[cat]).mean().item() return correct_nodes / total_nodes, torch.tensor(ious).mean().item()
def test(loader): model.eval() loader.dataset.y_mask = loader.dataset.y_mask[:, 0:test_dataset.num_classes] correct_nodes = total_nodes = 0 intersections, unions, categories = [], [], [] for data in loader: data = data.to(device) with torch.no_grad(): out, _, _ = model(data) pred = out.max(dim=1)[1] correct_nodes += pred.eq(data.y).sum().item() total_nodes += data.num_nodes i, u = i_and_u(pred, data.y, test_dataset.num_classes, data.batch) intersections.append(i.to(torch.device('cpu'))) unions.append(u.to(torch.device('cpu'))) categories.append(data.category.to(torch.device('cpu'))) category = torch.cat(categories, dim=0) unique_cats = torch.unique(category) intersection = torch.cat(intersections, dim=0) union = torch.cat(unions, dim=0) hist = torch.zeros(16) for j in range(len(loader.dataset)): hist[category[j]] += 1 print(hist) ious = [[] for _ in range(16)] for j in range(len(loader.dataset)): i = intersection[j, loader.dataset.y_mask[category[j]]] u = union[j, loader.dataset.y_mask[category[j]]] iou = i.to(torch.float) / u.to(torch.float) iou[torch.isnan(iou)] = 1 ious[category[j]].append(iou.mean().item()) for cat in range(16): ious[cat] = torch.tensor(ious[cat]).mean().item() print("IOUS:", ious) print(unique_cats) ious = torch.tensor(ious) return correct_nodes / total_nodes, torch.tensor( ious[unique_cats]).mean().item()
def test_sem_seg(model, loader): model.eval() correct = 0 total = 0 i_total = np.zeros((13, )) u_total = np.zeros((13, )) for data in loader: out_dict = model(data) out = out_dict['out'] pred = out.argmax(dim=1) # Adaptation to the datalistloader data_y = torch.cat([d.y for d in data]).to(out.device) data_batch = torch.cat([torch.ones(d.y.shape) * i for i, d in enumerate(data)]).type(torch.LongTensor).to(out.device) correct += (pred == data_y).sum() total += pred.size(0) # S3DIS: 13 classes i, u = i_and_u(pred, data_y, 13, data_batch) i_total += i.cpu().to(torch.float).numpy().sum(axis=0) u_total += u.cpu().to(torch.float).numpy().sum(axis=0) # Compute point accuracy accuracy = correct.type(torch.FloatTensor) / total # Compute mean IoU ious = i_total / u_total mean_iou = ious.sum() / 13.0 return { 'primary': mean_iou.item(), 'i_total': i_total.tolist(), 'u_total': u_total.tolist(), 'mean_IoU': mean_iou.item(), 'correct': correct.item(), 'total': total, 'accuracy': accuracy.item() }
def test(loader): model.eval() y_mask = loader.dataset.y_mask ious = [[] for _ in range(len(loader.dataset.categories))] for data in loader: data = data.to(device) pred = model(data).argmax(dim=1) i, u = i_and_u(pred, data.y, loader.dataset.num_classes, data.batch) iou = i.cpu().to(torch.float) / u.cpu().to(torch.float) iou[torch.isnan(iou)] = 1 # Find and filter the relevant classes for each category. for iou, category in zip(iou.unbind(), data.category.unbind()): ious[category.item()].append(iou[y_mask[category]]) # Compute mean IoU. ious = [torch.stack(iou).mean(0).mean(0) for iou in ious] return torch.tensor(ious).mean().item()
def test_part_seg(model, loader): model.eval() ious = [[] for _ in range(len(loader.dataset.categories))] for data in loader: out_dict = model(data) out = out_dict['out'] pred = out.argmax(dim=1) # Adaptation to the datalistloader data_y = torch.cat([d.y for d in data]).to(out.device) data_batch = torch.cat([torch.ones(d.y.shape) * i for i, d in enumerate(data)]).type(torch.LongTensor).to(out.device) data_category = torch.cat([d.category for d in data]) # loader.dataset.num_classes -> loader.dataset.y_mask.size(-1) # source code updated, see commint: # https://github.com/rusty1s/pytorch_geometric/commit/5c5509edbfd284e8c90b55faee9e68d7ae4f806a#diff-95045fdafc8ba001ebac808e65e54457 # but the wheel is not updated to include the above changes in the ShapeNet class # (hence, missing overshadowing attribute num_classes) # this is a quick fix # update 2: we actually don't need y_mask in the computation # TODO: finalize the computation here # i, u = i_and_u(pred, data_y, loader.dataset.y_mask.size(-1), data_batch) # our modification data_y = data_y - data_y.min() # part class id should start at 0 # import pdb; pdb.set_trace() i, u = i_and_u(pred, data_y, data_y.max() + 1, data_batch) iou = i.cpu().to(torch.float) / u.cpu().to(torch.float) iou[torch.isnan(iou)] = 1 # Find and filter the relevant classes for each category. for iou, category in zip(iou.unbind(), data_category): ious[category.item()].append(iou) # Compute mean IoU. ious = [torch.stack(iou).mean(0).mean(0) for iou in ious] return torch.tensor(ious).mean().item()
def test(loader): model.eval() correct_nodes = total_nodes = 0 intersections, unions, categories = [], [], [] for data_list in loader: with torch.no_grad(): out = model(data_list) pred = out.max(dim=1)[1] y = torch.cat([data.y for data in data_list]).to(device) correct_nodes += pred.eq(y).sum().item() total_nodes += sum(data.num_nodes for data in data_list) b = torch.cat([ torch.ones_like(data.y) * i for i, data in enumerate(data_list) ]).to(device) i, u = i_and_u(pred, y, test_dataset.num_classes, b) intersections.append(i.to(torch.device('cpu'))) unions.append(u.to(torch.device('cpu'))) c = torch.cat([data.category for data in data_list]) categories.append(c.to(torch.device('cpu'))) category = torch.cat(categories, dim=0) intersection = torch.cat(intersections, dim=0) union = torch.cat(unions, dim=0) ious = [[] for _ in range(len(loader.dataset.categories))] for j in range(len(loader.dataset)): i = intersection[j, loader.dataset.y_mask[category[j]]] u = union[j, loader.dataset.y_mask[category[j]]] iou = i.to(torch.float) / u.to(torch.float) iou[torch.isnan(iou)] = 1 ious[category[j]].append(iou.mean().item()) miiou = sum(sum(ious, [])) / len(loader.dataset) for cat in range(len(loader.dataset.categories)): ious[cat] = torch.tensor(ious[cat]).mean().item() return correct_nodes / total_nodes, miiou, torch.tensor(ious).mean().item()
def test_one_epoch(args, loader, logger, epoch): model.eval() results = [] intersections, unions, categories = [], [], [] for j, data in enumerate(loader, 0): data = data.to(device) pos, batch, label = data.pos, data.batch, data.y category = data.category if args.task == 'segmentation' else None if args.is_simuOcc: data_observed = simulate_partial_point_clouds( data, args.num_pts_observed, args.task) pos_observed, batch_observed, label_observed = data_observed.pos, data_observed.batch, data_observed.y else: pos_observed, batch_observed, label_observed = pos, batch, label # inference with torch.no_grad(): pred, loss = model(None, pos_observed, batch_observed, category, label_observed) if args.task == 'completion': results.append(loss) elif args.task == 'classification': pred = pred.max(1)[1] results.append(pred.eq(label).float()) elif args.task == 'segmentation': pred = pred.max(1)[1] i, u = i_and_u(pred, label_observed, loader.dataset.num_classes, batch_observed) intersections.append(i.to(torch.device('cpu'))) unions.append(u.to(torch.device('cpu'))) categories.append(category.to(torch.device('cpu'))) if args.task == 'completion': results = torch.cat(results, dim=0).mean().item() logger.add_scalar('test_chamfer_dist', results, epoch) print('Epoch: {:03d}, Test Chamfer: {:.4f}'.format(epoch, results)) results = -results elif args.task == 'classification': results = torch.cat(results, dim=0).mean().item() logger.add_scalar('test_acc', results, epoch) print('Epoch: {:03d}, Test Acc: {:.4f}'.format(epoch, results)) elif args.task == 'segmentation': category = torch.cat(categories, dim=0) intersection = torch.cat(intersections, dim=0) union = torch.cat(unions, dim=0) ious = [[] for _ in range(len(loader.dataset.categories))] for j in range(category.size(0)): i = intersection[j, loader.dataset.y_mask[category[j]]] u = union[j, loader.dataset.y_mask[category[j]]] iou = i.to(torch.float) / u.to(torch.float) iou[torch.isnan(iou)] = 1 ious[category[j]].append(iou.mean().item()) for cat in range(len(loader.dataset.categories)): ious[cat] = torch.tensor(ious[cat]).mean().item() results = torch.tensor(ious).mean().item() logger.add_scalar('test_mIoU', results, epoch) print('Epoch: {:03d}, Test mIoU: {:.4f}'.format(epoch, results)) return results
def evaluate(args, loader, save_dir): model.eval() results = [] intersections, unions, categories = [], [], [] if args.task == 'completion': categories_summary = {k: [] for k in loader.dataset.idx2cat.keys()} idx2cat = loader.dataset.idx2cat for _ in range(1): for j, data in enumerate(loader, 0): data = data.to(device) pos, batch, label = data.pos, data.batch, data.y try: category = data.category except AttributeError: category = None if args.is_simuOcc: data_observed = simulate_partial_point_clouds( data, args.num_pts_observed, args.task) pos_observed, batch_observed, label_observed = data_observed.pos, data_observed.batch, data_observed.y else: pos_observed, batch_observed, label_observed = pos, batch, label with torch.no_grad(): pred, loss = model(None, pos_observed, batch_observed, category, label_observed) if args.task == 'completion': # sampling in the latent space to generate diverse prediction latent = model.module.optimal_z[0, :].view(1, -1) idx = np.random.choice(args.num_vote_test, 1, False) random_latent = model.module.contrib_mean[0, idx, :].view( 1, -1) random_latent = (random_latent + latent) / 2 pred_diverse = model.module.generate_pc_from_latent( random_latent) if args.task == 'classification': pred = pred.max(1)[1] results.append(pred.eq(label).float()) elif args.task == 'segmentation': pred = pred.max(1)[1] i, u = i_and_u(pred, label_observed, loader.dataset.num_classes, batch_observed) intersections.append(i.to(torch.device('cpu'))) unions.append(u.to(torch.device('cpu'))) categories.append(category.to(torch.device('cpu'))) if args.save: pos = pos.cpu().detach().numpy().reshape( -1, args.num_pts, 3)[0] pos_observed = pos_observed.cpu().detach().numpy().reshape( -1, args.num_pts_observed, 3)[0] pred = pred.cpu().detach().numpy().reshape( -1, args.num_pts_observed)[0] label = label.cpu().detach().numpy().reshape( -1, args.num_pts)[0] np.save(os.path.join(save_dir, 'pos_{}'.format(j)), pos) np.save( os.path.join(save_dir, 'pos_observed_{}'.format(j)), pos_observed) np.save(os.path.join(save_dir, 'pred_{}'.format(j)), pred) np.save(os.path.join(save_dir, 'label_{}'.format(j)), label) elif args.task == 'completion': results.append(loss) categories.append(category.to(torch.device('cpu'))) if args.save: pos = label.cpu().detach().numpy().reshape( -1, args.num_pts, 3)[0] pos_observed = pos_observed.cpu().detach().numpy().reshape( -1, args.num_pts_observed, 3)[0] pred = pred.cpu().detach().numpy()[0] pred_diverse = pred_diverse.cpu().detach().numpy()[0] np.save(os.path.join(save_dir, 'pos_{}'.format(j)), pos) np.save( os.path.join(save_dir, 'pos_observed_{}'.format(j)), pos_observed) np.save(os.path.join(save_dir, 'pred_{}'.format(j)), pred) np.save( os.path.join(save_dir, 'pred_diverse_{}'.format(j)), pred_diverse) if args.task == 'completion': results = torch.cat(results, dim=0) category = torch.cat(categories, dim=0) for i in range(category.size(0)): categories_summary[category[i].item()].append(results[i]) total_chamfer_distance = 0 for idx in categories_summary: chamfer_distance_cat = torch.stack(categories_summary[idx], dim=0).mean().item() total_chamfer_distance += chamfer_distance_cat print('{}: {:.7f}'.format(idx2cat[idx], chamfer_distance_cat)) print('Mean Class Chamfer Distance: {:.6f}'.format( total_chamfer_distance / len(categories_summary))) elif args.task == 'classification': results = torch.cat(results, dim=0).mean().item() print('Test Acc: {:.4f}'.format(results)) elif args.task == 'segmentation': category = torch.cat(categories, dim=0) intersection = torch.cat(intersections, dim=0) union = torch.cat(unions, dim=0) ious = [[] for _ in range(len(loader.dataset.categories))] for j in range(category.size(0)): i = intersection[j, loader.dataset.y_mask[category[j]]] u = union[j, loader.dataset.y_mask[category[j]]] iou = i.to(torch.float) / u.to(torch.float) iou[torch.isnan(iou)] = 1 ious[category[j]].append(iou.mean().item()) for cat in range(len(loader.dataset.categories)): ious[cat] = torch.tensor(ious[cat]).mean().item() miou = torch.tensor(ious).mean().item() print('Test class mIoU: {:.4f}'.format(miou)) if args.task == 'completion' or args.task == 'segmentation': print('Sample results are saved to: {}'.format(save_dir)) print('{} point clouds are evaluated.'.format(len(loader.dataset)))