Exemple #1
0
    def create_test_dataloaders(self):
        if self.args.bayer:
            transform_test = transforms.Compose([
                transforms.ToTensor(),
                preprocess_pg.PowerTransform(2.5),
                preprocess_pg.Bayer([0.5, 0.5]),
            ])
        else:
            transform_test = transforms.Compose([
                img_dataset.ToGrayscale(),
                transforms.ToTensor(),
                preprocess_pg.PowerTransform(2.5),
                preprocess_pg.DiscreteIntensityScale([0.5, 1]),
            ])

        testsets = [
            (img_dataset.PlainImageFolder(root=denoising_data.set12_val_dir,
                                          transform=transform_test,
                                          cache=True), "Set12"),
            (img_dataset.PlainImageFolder(
                root=denoising_data.bsds500_val68_dir,
                transform=transform_test,
                cache=True), "val68"),
            (img_dataset.PlainImageFolder(root=denoising_data.urban_val_dir,
                                          transform=transform_test,
                                          cache=True), "Urban100")
        ]
        testloaders = [(torch.utils.data.DataLoader(testset,
                                                    batch_size=1,
                                                    shuffle=False,
                                                    num_workers=1), name)
                       for testset, name in testsets]

        return testloaders
Exemple #2
0
    def create_test_dataloaders(self):
        transform_test = transforms.Compose([
            img_dataset.ToGrayscale(),
            transforms.ToTensor(),
        ])

        testsets = [
            (img_dataset.PlainImageFolder(root=denoising_data.set12_val_dir,
                                          transform=transform_test,
                                          cache=True), "Set12"),
            (img_dataset.PlainImageFolder(
                root=denoising_data.bsds500_val68_dir,
                transform=transform_test,
                cache=True), "val68"),
            (img_dataset.PlainImageFolder(root=denoising_data.urban_val_dir,
                                          transform=transform_test,
                                          cache=True), "Urban100")
        ]
        testloaders = [(torch.utils.data.DataLoader(testset,
                                                    batch_size=1,
                                                    shuffle=False,
                                                    num_workers=1), name)
                       for testset, name in testsets]

        return testloaders
Exemple #3
0
    def create_train_dataloaders(self, patchsize, batchsize, trainsetiters):
        transform_train = transforms.Compose([
            transforms.RandomCrop(patchsize),
            preprocess.RandomOrientation90(),
            transforms.RandomVerticalFlip(),
            #img_dataset.ToGrayscale(),
            transforms.ToTensor(),
        ])
        self.batchsize = batchsize

        train_folders = [
            #denoising_data.bsds500_train_dir,
            #denoising_data.bsds500_test_dir
            #denoising_data.cmr_cine_train_dir
            denoising_data.cmr_perf_train_dir
        ]

        trainset = img_dataset.PlainImageFolder(root=train_folders,
                                                transform=transform_train,
                                                cache=True,
                                                depth=2)
        print('Input traing data has ', len(trainset))

        trainset_multiple = [trainset] * trainsetiters
        print(trainset_multiple)

        trainset_used = torch.utils.data.ConcatDataset(trainset_multiple)
        print('Total amount of images for training ', len(trainset_used))

        # try to load all data
        for n in tqdm(range(len(trainset_used))):
            try:
                img = trainset_used[n]
            except:
                print("Error in loading sample ", n)

        trainloader = torch.utils.data.DataLoader(trainset_used,
                                                  batch_size=batchsize,
                                                  shuffle=True,
                                                  num_workers=20)

        return trainloader
Exemple #4
0
    def create_train_dataloaders(self, patchsize, batchsize, trainsetiters):
        if self.args.bayer:
            transform_train = transforms.Compose([
                transforms.RandomCrop(patchsize * 2),
                preprocess.RandomOrientation90(),
                transforms.RandomVerticalFlip(),
                transforms.ToTensor(),
                preprocess_pg.PowerTransform(1.25, 10),
                preprocess_pg.Bayer([0.4, 0.7]),
            ])
        else:
            transform_train = transforms.Compose([
                transforms.RandomCrop(patchsize),
                preprocess.RandomOrientation90(),
                transforms.RandomVerticalFlip(),
                img_dataset.ToGrayscale(),
                transforms.ToTensor(),
                preprocess_pg.PowerTransform(1.25, 10),
                preprocess_pg.ContinuousIntensityScale([0.25, 1]),
            ])
        self.batchsize = batchsize

        train_folders = [
            denoising_data.bsds500_train_dir, denoising_data.bsds500_test_dir,
            denoising_data.div2k_train_dir, denoising_data.waterloo_train_dir
        ]

        trainset = img_dataset.PlainImageFolder(root=train_folders,
                                                transform=transform_train,
                                                cache=False)
        trainset = torch.utils.data.ConcatDataset([trainset] * trainsetiters)
        trainloader = torch.utils.data.DataLoader(trainset,
                                                  batch_size=batchsize,
                                                  shuffle=True,
                                                  num_workers=20)

        return trainloader
Exemple #5
0
    def create_train_dataloaders(self, patchsize, batchsize, trainsetiters):
        transform_train = transforms.Compose([
            transforms.RandomCrop(patchsize),
            preprocess.RandomOrientation90(),
            transforms.RandomVerticalFlip(),
            img_dataset.ToGrayscale(),
            transforms.ToTensor(),
        ])
        self.batchsize = batchsize

        train_folders = [
            denoising_data.bsds500_train_dir, denoising_data.bsds500_test_dir
        ]

        trainset = img_dataset.PlainImageFolder(root=train_folders,
                                                transform=transform_train,
                                                cache=True)
        trainset = torch.utils.data.ConcatDataset([trainset] * trainsetiters)
        trainloader = torch.utils.data.DataLoader(trainset,
                                                  batch_size=batchsize,
                                                  shuffle=True,
                                                  num_workers=20)

        return trainloader