# Data print('==> Preparing data..') transform_train = transforms.Compose([ transforms.RandomCrop(cut_size), transforms.RandomHorizontalFlip(), transforms.ToTensor(), ]) transform_test = transforms.Compose([ transforms.TenCrop(cut_size), transforms.Lambda(lambda crops: torch.stack( [transforms.ToTensor()(crop) for crop in crops])), ]) trainset = CK(split='Training', fold=opt.fold, transform=transform_train) trainloader = torch.utils.data.DataLoader(trainset, batch_size=opt.bs, shuffle=True, num_workers=1) testset = CK(split='Testing', fold=opt.fold, transform=transform_test) testloader = torch.utils.data.DataLoader(testset, batch_size=5, shuffle=False, num_workers=1) # Model if opt.model == 'VGG19': net = VGG('VGG19') elif opt.model == 'Resnet18': net = ResNet18()
elif opt.model == 'Resnet18': net = ResNet18() correct = 0 total = 0 all_target = [] for i in xrange(10): print("%d fold" % (i + 1)) path = os.path.join(opt.dataset + '_' + opt.model, '%d' % (i + 1)) checkpoint = torch.load(os.path.join(path, 'Test_model.t7')) net.load_state_dict(checkpoint['net']) net.cuda() net.eval() testset = CK(split='Testing', fold=i + 1, transform=transform_test) testloader = torch.utils.data.DataLoader(testset, batch_size=5, shuffle=False, num_workers=1) for batch_idx, (inputs, targets) in enumerate(testloader): bs, ncrops, c, h, w = np.shape(inputs) inputs = inputs.view(-1, c, h, w) inputs, targets = inputs.cuda(), targets.cuda() inputs, targets = Variable(inputs, volatile=True), Variable(targets) outputs = net(inputs) outputs_avg = outputs.view(bs, ncrops, -1).mean(1) # avg over crops _, predicted = torch.max(outputs_avg.data, 1) total += targets.size(0) correct += predicted.eq(targets.data).cpu().sum()