示例#1
0
    def __init__(self,
                 root=DATA_PATH,
                 download=True,
                 num_bits=8,
                 pil_transforms=[],
                 conditional=False,
                 super_resolution=False,
                 sr_scale_factor=4,
                 resize_hw=None):

        self.root = root
        self.sr_scale_factor = sr_scale_factor

        # Define transformations
        trans_train = pil_transforms + [ToTensor(), Quantize(num_bits)]
        trans_test = [ToTensor(), Quantize(num_bits)]
        if resize_hw is not None:
            trans_train.insert(0, Resize((resize_hw, resize_hw)))
            trans_test.insert(0, Resize((resize_hw, resize_hw)))

        # Load data
        sub_root = os.path.join(root, 'SVHN')

        if super_resolution:
            self.train = SuperResolutionSVHNDataset(
                sub_root,
                split='train',
                transform=Compose(trans_train),
                download=download,
                sr_scale_factor=sr_scale_factor)
            self.test = SuperResolutionSVHNDataset(
                sub_root,
                split='test',
                transform=Compose(trans_test),
                download=download,
                sr_scale_factor=sr_scale_factor)
        elif conditional:
            self.train = SupervisedSVHNDataset(sub_root,
                                               split='train',
                                               transform=Compose(trans_train),
                                               download=download)
            self.test = SupervisedSVHNDataset(sub_root,
                                              split='test',
                                              transform=Compose(trans_test),
                                              download=download)
        else:
            self.train = UnsupervisedSVHNDataset(
                sub_root,
                split='train',
                transform=Compose(trans_train),
                download=download)
            self.test = UnsupervisedSVHNDataset(sub_root,
                                                split='test',
                                                transform=Compose(trans_test),
                                                download=download)
示例#2
0
    def __init__(self,
                 root=DATA_PATH,
                 download=True,
                 num_bits=8,
                 pil_transforms=[],
                 conditional=False,
                 super_resolution=False,
                 sr_scale_factor=4,
                 resize_hw=None):

        self.root = root
        self.y_classes = 10
        self.sr_scale_factor = sr_scale_factor

        trans_train = pil_transforms + [ToTensor(), Quantize(num_bits)]
        trans_test = [ToTensor(), Quantize(num_bits)]
        if resize_hw is not None:
            trans_train.insert(0, Resize((resize_hw, resize_hw)))
            trans_test.insert(0, Resize((resize_hw, resize_hw)))

        # Load data
        if super_resolution:
            self.train = SuperResolutionCIFAR10Dataset(
                root,
                train=True,
                transform=Compose(trans_train),
                download=download,
                sr_scale_factor=sr_scale_factor)
            self.test = SuperResolutionCIFAR10Dataset(
                root,
                train=False,
                transform=Compose(trans_test),
                sr_scale_factor=sr_scale_factor)
        elif conditional:
            one_hot_encode = lambda target: F.one_hot(torch.tensor(target),
                                                      self.y_classes)
            self.train = SupervisedCIFAR10Dataset(
                root,
                train=True,
                transform=Compose(trans_train),
                target_transform=one_hot_encode,
                download=download)
            self.test = SupervisedCIFAR10Dataset(
                root,
                train=False,
                transform=Compose(trans_test),
                target_transform=one_hot_encode)
        else:
            self.train = UnsupervisedCIFAR10Dataset(
                root,
                train=True,
                transform=Compose(trans_train),
                download=download)
            self.test = UnsupervisedCIFAR10Dataset(
                root, train=False, transform=Compose(trans_test))
示例#3
0
    def __init__(self, root=DATA_PATH, download=True, num_bits=8, pil_transforms=[]):

        self.root = root

        # Define transformations
        trans_train = pil_transforms + [ToTensor(), Quantize(num_bits)]
        trans_test = [ToTensor(), Quantize(num_bits)]

        # Load data
        self.train = UnsupervisedMNIST(root, train=True, transform=Compose(trans_train), download=download)
        self.test = UnsupervisedMNIST(root, train=False, transform=Compose(trans_test))
示例#4
0
    def __init__(self, root=DATA_PATH, num_bits=8, pil_transforms=[]):

        self.root = root

        # Define transformations
        trans_train = pil_transforms + [ToTensor(), Quantize(num_bits)]
        trans_test = [ToTensor(), Quantize(num_bits)]

        # Load data
        self.train = CelebADataset(root, split='train', transform=Compose(trans_train))
        self.valid = CelebADataset(root, split='valid', transform=Compose(trans_test))
        self.test = CelebADataset(root, split='test', transform=Compose(trans_test))
示例#5
0
    def __init__(self, resize_hw, root=DATA_PATH, num_bits=8, pil_transforms=[], sr_scale_factor=4, bicubic=False, crop=None, repeats=1):
        super(Set5, self).__init__()
        self.root = root

        if crop is not None:
            if crop == "random":
                trans_test = Compose([RandomCrop(resize_hw), ToTensor(), Quantize(num_bits)])
            elif crop == "center":
                trans_test = Compose([CenterCrop(resize_hw), ToTensor(), Quantize(num_bits)])
            else:
                raise ValueError("crop must be None, 'random', or 'center'")
                                 
            self.test = Set5Dataset(resize_hw=None, root=root, split='test', transform=trans_test, sr_scale_factor=sr_scale_factor, bicubic=bicubic, repeats=repeats)
        else:
            trans_test = Compose([ToTensor(), Quantize(num_bits)])
            self.test = Set5Dataset(resize_hw=resize_hw, root=root, split='test', transform=trans_test, sr_scale_factor=sr_scale_factor, bicubic=bicubic, repeats=repeats)
示例#6
0
    def __init__(self,
                 root=DATA_PATH,
                 download=True,
                 num_bits=8,
                 pil_transforms=[],
                 conditional=False,
                 super_resolution=False,
                 sr_scale_factor=4,
                 resize_hw=None):

        self.root = root

        trans_train = pil_transforms + [ToTensor(), Quantize(num_bits)]
        trans_test = [ToTensor(), Quantize(num_bits)]
        if resize_hw is not None:
            trans_train.insert(0, Resize((resize_hw, resize_hw)))
            trans_test.insert(0, Resize((resize_hw, resize_hw)))

        # Load data
        if super_resolution:
            self.train = SuperResolutionImageNet32Dataset(
                root,
                train=True,
                transform=Compose(trans_train),
                download=download,
                sr_scale_factor=sr_scale_factor)
            self.test = SuperResolutionImageNet32Dataset(
                root,
                train=False,
                transform=Compose(trans_test),
                sr_scale_factor=sr_scale_factor)
        else:
            self.train = UnsupervisedImageNet32Dataset(
                root,
                train=True,
                transform=Compose(trans_train),
                download=download)
            self.test = UnsupervisedImageNet32Dataset(
                root, train=False, transform=Compose(trans_test))
示例#7
0
    def __init__(self,
                 root=DATA_PATH,
                 download=True,
                 num_bits=8,
                 pil_transforms=[]):

        self.root = root

        # Define transformations
        trans_train = pil_transforms + [ToTensor(), Quantize(num_bits)]
        trans_test = [ToTensor(), Quantize(num_bits)]

        # Load data
        sub_root = os.path.join(root, 'SVHN')
        self.train = UnsupervisedSVHN(sub_root,
                                      split='train',
                                      transform=Compose(trans_train),
                                      download=download)
        self.test = UnsupervisedSVHN(sub_root,
                                     split='test',
                                     transform=Compose(trans_test),
                                     download=download)
示例#8
0
    def __init__(self,
                 input_size,
                 root=DATA_PATH,
                 num_bits=8,
                 pil_transforms=[],
                 conditional=False,
                 super_resolution=False,
                 sr_scale_factor=4,
                 resize_hw=None,
                 bicubic=False):
        super(CelebA, self).__init__()

        assert len(input_size) == 3

        self.root = root
        self.input_size = input_size
        self.y_classes = 40

        trans_train = pil_transforms + [ToTensor(), Quantize(num_bits)]
        trans_test = [ToTensor(), Quantize(num_bits)]
        if resize_hw is not None:
            trans_train.insert(0, Resize((resize_hw, resize_hw)))
            trans_test.insert(0, Resize((resize_hw, resize_hw)))

        if conditional:
            raise ValueError(f"Conditional CelebA dataset not available yet.")

        # Load data
        if super_resolution:
            if input_size[-1] == 32:
                Dataset = SuperResolutionCelebA32Dataset
            elif input_size[-1] == 64:
                Dataset = SuperResolutionCelebA64Dataset
            elif input_size[-1] == 128:
                Dataset = SuperResolutionCelebA128Dataset
            else:
                raise ValueError(f"Invalid input size {input_size}")

            self.train = Dataset(root,
                                 split='train',
                                 transform=Compose(trans_train),
                                 sr_scale_factor=sr_scale_factor,
                                 bicubic=bicubic)
            self.valid = Dataset(root,
                                 split='valid',
                                 transform=Compose(trans_test),
                                 sr_scale_factor=sr_scale_factor,
                                 bicubic=bicubic)
            self.test = Dataset(root,
                                split='test',
                                transform=Compose(trans_test),
                                sr_scale_factor=sr_scale_factor,
                                bicubic=bicubic)

        else:
            if input_size[-1] == 32:
                Dataset = UnsupervisedCelebA32Dataset
            elif input_size[-1] == 64:
                Dataset = UnsupervisedCelebA64Dataset
            elif input_size[-1] == 128:
                Dataset = UnsupervisedCelebA128Dataset
            else:
                raise ValueError(f"Invalid input size {input_size}")

            self.train = Dataset(root,
                                 split='train',
                                 transform=Compose(trans_train))
            self.valid = Dataset(root,
                                 split='valid',
                                 transform=Compose(trans_test))
            self.test = Dataset(root,
                                split='test',
                                transform=Compose(trans_test))