def get_dataset(args): # [-1,1] Normalize = transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]) train_transform = MatTransform(args.size, flip=True) train_set = MatDataset(args.trainList, args.imgDir, args.mskDir, args.alphaDir, normalize=Normalize, transform=train_transform) train_loader = DataLoader(dataset=train_set, num_workers=args.threads, batch_size=args.batchSize, shuffle=True) return train_loader
def get_dataset(args): train_transform = MatTransform(flip=True) args.crop_h = [int(i) for i in args.crop_h.split(',')] args.crop_w = [int(i) for i in args.crop_w.split(',')] train_set = MatDatasetOffline(args, train_transform) train_loader = DataLoader(dataset=train_set, num_workers=args.threads, batch_size=args.batchSize, shuffle=True) return train_loader
def get_dataset(args): train_transform = MatTransform(flip=True) args.crop_h = [int(i) for i in args.crop_h.split(',')] args.crop_w = [int(i) for i in args.crop_w.split(',')] normalize = transforms.Compose([ transforms.ToTensor(), transforms.Normalize(mean = [0.485, 0.456, 0.406],std = [0.229, 0.224, 0.225]) ]) train_set = MatDatasetOffline(args, train_transform, normalize) train_loader = DataLoader(dataset=train_set, num_workers=args.threads, batch_size=args.batchSize, shuffle=True) return train_loader