def FER_batch_test(): global PublicTestset global PublicTestloader global PrivateTestset global PrivateTestloader data_file = './data/Fer2013.h5' t_length = 28709 v_length = 3589 te_length = 3589 re_length = 96 batchsize = 8 PublicTestset = FER2013(split='PublicTest', filename=data_file, train_length=t_length, validate_length=v_length, test_length=te_length, resize_length=re_length, transform=transform_test) PublicTestloader = torch.utils.data.DataLoader(PublicTestset, batch_size=batchsize, shuffle=False) PrivateTestset = FER2013(split='PrivateTest', filename=data_file, train_length=t_length, validate_length=v_length, test_length=te_length, resize_length=re_length, transform=transform_test) PrivateTestloader = torch.utils.data.DataLoader(PrivateTestset, batch_size=batchsize, shuffle=False) detailed_batch_test()
'Angry', 'Disgust', 'Fear', 'Happy', 'Sad', 'Surprise', 'Neutral' ] # Model if opt.model == 'VGG19': net = VGG('VGG19') elif opt.model == 'Resnet18': net = ResNet18() path = os.path.join(opt.dataset + '_' + opt.model) checkpoint = torch.load(os.path.join(path, opt.split + '_model.t7')) net.load_state_dict(checkpoint['net']) net.cuda() net.eval() Testset = FER2013(split=opt.split, transform=transform_test) Testloader = torch.utils.data.DataLoader(Testset, batch_size=128, shuffle=False, num_workers=1) correct = 0 total = 0 all_target = [] for batch_idx, (inputs, targets) in enumerate(Testloader): bs, ncrops, c, h, w = np.shape(inputs) inputs = inputs.view(-1, c, h, w) inputs, targets = inputs.cuda(), targets.cuda() inputs, targets = Variable(inputs, volatile=True), Variable(targets) outputs = net(inputs)
# Data print('==> Preparing data..') transform_train = transforms.Compose([ transforms.RandomCrop(44), transforms.RandomHorizontalFlip(), transforms.ToTensor(), ]) transform_test = transforms.Compose([ transforms.TenCrop(cut_size), transforms.Lambda(lambda crops: torch.stack( [transforms.ToTensor()(crop) for crop in crops])), ]) trainset = FER2013(split='Training', transform=transform_train) trainloader = torch.utils.data.DataLoader(trainset, batch_size=opt.bs, shuffle=True, num_workers=1) PublicTestset = FER2013(split='PublicTest', transform=transform_test) PublicTestloader = torch.utils.data.DataLoader(PublicTestset, batch_size=opt.bs, shuffle=False, num_workers=1) PrivateTestset = FER2013(split='PrivateTest', transform=transform_test) PrivateTestloader = torch.utils.data.DataLoader(PrivateTestset, batch_size=opt.bs, shuffle=False, num_workers=1)
from torch.autograd import Variable from models import * from torchvision.models import resnext101_32x8d use_cuda = torch.cuda.is_available() cut_size = 44 alpha = 0.5 transform_test = transforms.Compose([ transforms.TenCrop(cut_size), transforms.Lambda(lambda crops: torch.stack( [transforms.ToTensor()(crop) for crop in crops])), ]) PrivateTestset = FER2013(split='PrivateTest', transform=transform_test) PrivateTestloader = torch.utils.data.DataLoader(PrivateTestset, batch_size=64, shuffle=False, num_workers=1) criterion = nn.CrossEntropyLoss() def PrivateTest_adv(net): net.eval() PrivateTest_loss = 0 correct = 0 total = 0 err0 = 0.005 # for FGSM for batch_idx, (inputs, targets) in enumerate(PrivateTestloader): bs, ncrops, c, h, w = np.shape(inputs)
# Using the trained VGG19 model path = "FER2013_VGG19" model = VGG("VGG19") checkpoint = torch.load(os.path.join(path, 'PrivateTest_model.t7')) model.load_state_dict(checkpoint['net']) model.to(device) # Preprocessing the images, sample from training set cut_size = 44 transform_train = transforms.Compose([ transforms.RandomCrop(44), transforms.RandomHorizontalFlip(), transforms.ToTensor(), ]) trainset = FER2013(split='Training', transform=transform_train) trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=1) # sample the 23th batch data index = 0 for cln_data, true_label in trainloader: index += 1 if index == 23: break bs, c, h, w = np.shape(cln_data) # print true labels print(true_label)
]) transform_test = transforms.Compose([ transforms.FiveCrop(cut_size), transforms.Lambda(lambda crops: torch.stack( [transforms.ToTensor()(crop) for crop in crops])), ]) # transform_test = transforms.Compose([ # transforms.ToTensor(), # ]) trainset = FER2013(split='Training', filename=data_file, train_length=t_length, validate_length=v_length, test_length=te_length, resize_length=re_length, transform=transform_train) trainloader = torch.utils.data.DataLoader(trainset, batch_size=opt.bs, shuffle=True) PublicTestset = FER2013(split='PublicTest', filename=data_file, train_length=t_length, validate_length=v_length, test_length=te_length, resize_length=re_length, transform=transform_test) PublicTestloader = torch.utils.data.DataLoader(PublicTestset, batch_size=16,