def load_dataset(): global mean, std if args.data == './data/target_128': traindir = os.path.join(args.data, 'train') valdir = os.path.join(args.data, 'val') mean = [0.5, 0.5, 0.5] std = [0.5, 0.5, 0.5] normalize = transforms.Normalize(mean, std) post_transforms = transforms.Compose([ transforms.ToTensor(), normalize ]) val_transforms = transforms.Compose([ transforms.ToTensor(), normalize, ]) train_dataset = EasyDR(traindir, None, post_transforms, args.co_power) val_dataset = EasyDR(valdir, None, val_transforms, args.co_power) print('load targeted easy-classified diabetic retina dataset with size 128 to pretrain unet successfully!!') else: raise ValueError('') train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=4, drop_last=False, pin_memory=True if args.cuda else False) val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, drop_last=False, num_workers=4, pin_memory=True if args.cuda else False) return train_loader, val_loader
def load_dataset(data_dir): if data_dir == '../data/target_128': mean = [0.651, 0.4391, 0.2991] std = [0.1046, 0.0846, 0.0611] print('load DR with 128 successfully!!!') else: raise ValueError( "parameter 'data' that means path to dataset must be in " "['./data/target_128', ./data/split_contrast_dataset]") traindir = os.path.join(data_dir, 'train') valdir = os.path.join(data_dir, 'val') normalize = transforms.Normalize(mean, std) post_transforms = transforms.Compose([transforms.ToTensor(), normalize]) train_dataset = EasyDR(traindir, None, post_transforms, alpha=0) val_dataset = EasyDR(valdir, None, post_transforms, alpha=0) train_loader = DataLoader(train_dataset, batch_size=64, shuffle=False, num_workers=2, pin_memory=False) val_loader = DataLoader(val_dataset, batch_size=64, shuffle=False, num_workers=2, pin_memory=False) return train_loader, val_loader
def init(): data = '../data/target_128' traindir = os.path.join(data, 'train') mean = [0.5, 0.5, 0.5] std = [0.5, 0.5, 0.5] normalize = transforms.Normalize(mean, std) pre_transforms = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(0.05, 0.05, 0.05, 0.05) ]) post_transforms = transforms.Compose([ transforms.ToTensor(), normalize ]) train_dataset = EasyDR(traindir, pre_transforms, post_transforms, 2) train_loader = DataLoader(train_dataset, batch_size=1, shuffle=True, num_workers=1, drop_last=True, pin_memory=False) unet = generator() unet.load_state_dict(weight_to_cpu('../gan112/epoch_400/g.pkl')) return unet, train_loader
def load_dataset(): if args.data == './data/flip': traindir = os.path.join(args.data, 'train') valdir = os.path.join(args.data, 'val') mean = [0.5, 0.5, 0.5] std = [0.5, 0.5, 0.5] normalize = transforms.Normalize(mean, std) # pre_transforms = transforms.Compose([ # transforms.RandomHorizontalFlip(), # transforms.RandomVerticalFlip(), # transforms.RandomRotation(10), # transforms.ColorJitter(0.05, 0.05, 0.05, 0.05) # ]) pre_transforms = transforms.Compose([ transforms.RandomRotation(5), transforms.ColorJitter(0.05, 0.05, 0.05, 0.05) ]) post_transforms = transforms.Compose( [transforms.ToTensor(), normalize]) val_transforms = transforms.Compose([ transforms.ToTensor(), normalize, ]) train_dataset = EasyDR(traindir, pre_transforms, post_transforms, alpha=0) val_dataset = EasyDR(valdir, None, val_transforms, alpha=0) print('load flipped DR successfully!!!') else: raise ValueError( "parameter 'data' that means path to dataset must be in " "['./data/target_128', './data/flip']") train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=4, pin_memory=True if args.cuda else False) val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=1, pin_memory=True if args.cuda else False) return train_loader, val_loader
def load_dataset(): if args.data == './data/target_128': mean = [0.651, 0.4391, 0.2991] std = [0.1046, 0.0846, 0.0611] print('load DR with 128 successfully!!!') elif args.data == './data/split_contrast_dataset': mean = [0.7432, 0.661, 0.6283] std = [0.0344, 0.0364, 0.0413] print('load custom-defined skin dataset successfully!!!') else: raise ValueError( "parameter 'data' that means path to dataset must be in " "['./data/target_128', ./data/split_contrast_dataset]") traindir = os.path.join(args.data, 'train') valdir = os.path.join(args.data, 'val') normalize = transforms.Normalize(mean, std) pre_transforms = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.RandomRotation(10), transforms.ColorJitter(0.05, 0.05, 0.05, 0.05) ]) post_transforms = transforms.Compose([transforms.ToTensor(), normalize]) val_transforms = transforms.Compose([ transforms.ToTensor(), normalize, ]) train_dataset = EasyDR(traindir, pre_transforms, post_transforms, alpha=0) val_dataset = EasyDR(valdir, None, val_transforms, alpha=0) train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=2, pin_memory=True if args.cuda else False) val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=2, pin_memory=True if args.cuda else False) return train_loader, val_loader
def load_dataset(): if args.data == './gan174_output': mean = [0.7672, 0.5209, 0.3615] std = [0.0865, 0.079, 0.0618] print('load UNet outputs successfully!!!') else: raise ValueError("parameter 'data' that means path to dataset must be in " "['./gan174_output']") traindir = os.path.join(args.data, 'train') valdir = os.path.join(args.data, 'val') normalize = transforms.Normalize(mean, std) pre_transforms = transforms.Compose([ transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.RandomRotation(10), transforms.ColorJitter(0.05, 0.05, 0.05, 0.05) ]) post_transforms = transforms.Compose([ transforms.ToTensor(), normalize ]) val_transforms = transforms.Compose([ transforms.ToTensor(), normalize, ]) train_dataset = EasyDR(traindir, None, post_transforms, alpha=0) val_dataset = EasyDR(valdir, None, val_transforms, alpha=0) train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=2, pin_memory=True if args.cuda else False) val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=2, pin_memory=True if args.cuda else False) return train_loader, val_loader
def get_mean_and_std(path, transform=transforms.Compose([transforms.ToTensor()]), channels=3): from utils.read_data import EasyDR dataset = EasyDR(path, None, transform) dataloader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=2) mean = torch.zeros(channels) std = torch.zeros(channels) print('==> Computing mean and std..') for inputs, targets, _, _ in dataloader: for i in range(channels): mean[i] += inputs[:, i, :, :].mean() std[i] += inputs[:, i, :, :].std() mean.div_(len(dataset)) std.div_(len(dataset)) mean, std = mean.numpy().tolist(), std.numpy().tolist() return [round(x, 4) for x in mean], [round(y, 4) for y in std]