Exemplo n.º 1
0
        running_scale_loss += scale_loss.item()
        running_center_loss += center_loss.item()
        running_rotation_loss += rotation_loss.item()
        running_miou += miou.item()

    epoch_loss = running_loss / len(data_set)
    epoch_l1_loss = running_l1_loss / len(data_set)
    epoch_scale_loss = running_scale_loss / len(data_set)
    epoch_center_loss = running_center_loss / len(data_set)
    epoch_rotation_loss = running_rotation_loss / len(data_set)
    epoch_miou = running_miou / len(data_set)

    print('Loss: {:.4f}'.format(float(epoch_loss)))
    print('L1 Loss: {:.4f}'.format(float(epoch_l1_loss)))
    print('Scale L1 Loss: {:.4f}'.format(float(epoch_scale_loss)))
    print('Center L1 Loss: {:.4f}'.format(float(epoch_center_loss)))
    print('M IOU: {:.4f}'.format(float(epoch_miou)))


if __name__ == '__main__':

    model = MeshNet(cfg=cfg['MeshNet'], require_fea=True)
    if use_gpu:
        model.cuda()
    model = nn.DataParallel(model)
    model.load_state_dict(
        torch.load(os.path.join(root_path, cfg['load_model'])))
    model.eval()

    test_model(model)
Exemplo n.º 2
0
            _, feas = model(centers, corners, normals, neighbor_index)
            ft_all = feas.cpu().squeeze(0).numpy()
            embed_dict[file_id] = ft_all

    ids = sorted(list(embed_dict.keys()))
    for i in ids:
        embed_npy.append(embed_dict[i])

    print(f'Number of embeddings in testset: {len(ids)}')
    np.save(f'./results/test/embed_{args.fold}.npy', embed_npy)


if __name__ == '__main__':
    if args.task == 'Shape':
        num_classes = 8
    else:
        num_classes = 6

    model = MeshNet(cfg=cfg['MeshNet'],
                    num_classes=num_classes,
                    require_fea=True)
    model.cuda()
    model = nn.DataParallel(model)
    model.load_state_dict(torch.load(args.weight))

    if not os.path.exists(f'./results/test'):
        os.makedirs(f'./results/test')
    model.eval()

    inference(model)