def __getitem__(self, index): datafiles = self.files[index] image = Image.open(datafiles["img"]).convert('RGB') label = Image.open(datafiles["label"]) size = np.asarray(image).shape name = datafiles["name"] composed_transforms = transforms.Compose([ tr.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)), tr.ToTensor()]) sample = {'image': image, 'label': label} sampled = composed_transforms(sample) image, label = sampled['image'], sampled['label'] return image, label, np.array(size), name
def __getitem__(self, index): datafiles = self.files[index] image = Image.open(datafiles["img"]).convert('RGB') label = Image.open(datafiles["label"]) size = np.asarray(image).shape name = datafiles["name"] composed_transforms = transforms.Compose([ tr.RandomHorizontalFlip(), # tr.RandomRotate(180), tr.RandomScaleCrop(base_size=self.base_size, crop_size=self.crop_size, fill=255), tr.RandomGaussianBlur(), tr.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)), tr.ToTensor()]) sample = {'image': image, 'label': label} sampled = composed_transforms(sample) image, label = sampled['image'], sampled['label'] return image, label, np.array(size), name
elif args.model_name == 'FlowNet': model = FlowNet(args) elif args.model_name == 'CircleFlowSRNet': model = CircleFlowSRNet(args) elif args.model_name == 'TestSRNet': model = TestSRNet(args) elif args.model_name == 'TestCircleSRNet': model = TestCircleSRNet(args) elif args.model_name == 'TestGNet': model = TestGNet(args) elif args.model_name == 'TestContinueNet': model = TestContinueNet(args) else: raise ValueError('no model named: {}'.format(args.model_name)) transform = transforms.Compose([transforms.Normalize()]) transform = transforms.Compose([transforms.Normalize()]) if args.model_name.find('Un') >= 0: dataset = Unsupervised_Dataset_Loader(args.test_dir, args.batch_size, args.scale, args.im_crop_H, args.im_crop_W, transform, args.random_crop) else: #dataset = Dataset_Loader(args.test_dir, args.adv_data_dir,args.batch_size,args.im_crop_H,args.im_crop_W,transform,args.random_crop) dataset = Dataset_Loader(args.test_dir, args.batch_size, args.im_crop_H, args.im_crop_W, transform, args.random_crop) if args.test_new: #dataset = Dataset_Loader(args.test_dir, args.adv_data_dir,args.batch_size,args.im_crop_H,args.im_crop_W,transform,args.random_crop) test_dataloader = torch.utils.data.DataLoader(