Пример #1
0
    def get_data_loaders(self, nomask=False):
        transform = self.TRANSFORM

        test_transform = transforms.Compose(
            [transforms.ToTensor(), self.get_normalization_transform()])

        train_dataset = MyCIFAR100(base_path() + 'CIFAR100', train=True,
                                   download=True, transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(train_dataset,
                                                    test_transform, self.NAME)
        else:
            test_dataset = CIFAR100(base_path() + 'CIFAR100', train=False,
                                    download=True, transform=test_transform)

        if not nomask:
            if isinstance(train_dataset.targets, list):
                train_dataset.targets = torch.tensor(train_dataset.targets, dtype=torch.long)
            if isinstance(test_dataset.targets, list):
                test_dataset.targets = torch.tensor(test_dataset.targets, dtype=torch.long)
            train, test = store_masked_loaders(train_dataset, test_dataset, self)
            return train, test
        else:
            train_loader = DataLoader(train_dataset,
                                      batch_size=32, shuffle=True, num_workers=2)
            test_loader = DataLoader(test_dataset,
                                     batch_size=32, shuffle=False, num_workers=2)
            return train_loader, test_loader
Пример #2
0
    def get_joint_loaders(self, nomask=False):
        transform = self.TRANSFORM

        test_transform = transforms.Compose(
            [transforms.ToTensor(),
             self.get_normalization_transform()])

        train_dataset = MyCore50(base_path() + 'CORE50',
                                 train=True,
                                 download=True,
                                 transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(
                train_dataset, test_transform, self.NAME)
        else:
            test_dataset = Core50(base_path() + 'CORE50',
                                  train=False,
                                  download=True,
                                  transform=test_transform)

        train_loader = DataLoader(train_dataset,
                                  batch_size=32,
                                  shuffle=True,
                                  num_workers=2)
        test_loader = DataLoader(test_dataset,
                                 batch_size=32,
                                 shuffle=False,
                                 num_workers=2)
        return train_loader, test_loader
Пример #3
0
    def init_train_loaders(self) -> None:
        """
        Initializes the test loader.
        """
        train_dataset = MyMNIST(base_path() + 'MNIST',
                                train=True,
                                download=True)
        if self.args.validation:
            test_transform = transforms.ToTensor()
            train_dataset, self.val_dataset = get_train_val(
                train_dataset, test_transform, self.NAME)

        for j in range(self.N_CLASSES):
            self.train_loaders.append([])
            self.remaining_training_items.append([])
            train_mask = np.isin(np.array(train_dataset.targets), [j])
            train_rotation = IncrementalRotation(init_deg=(j - 1) * 60,
                                                 increase_per_iteration=360.0 /
                                                 train_mask.sum())
            for k in range(self.num_rounds * 2):
                tmp_train_dataset = deepcopy(train_dataset)
                numbers_per_batch = train_mask.sum() // (self.num_rounds *
                                                         2) + 1
                tmp_train_dataset.data = tmp_train_dataset.data[train_mask][
                    k * numbers_per_batch:(k + 1) * numbers_per_batch]
                tmp_train_dataset.targets = tmp_train_dataset.targets[
                    train_mask][k * numbers_per_batch:(k + 1) *
                                numbers_per_batch]
                tmp_train_dataset.transform = transforms.Compose(
                    [train_rotation, transforms.ToTensor()])
                self.train_loaders[-1].append(
                    DataLoader(tmp_train_dataset, batch_size=1, shuffle=True))
                self.remaining_training_items[-1].append(
                    tmp_train_dataset.data.shape[0])
Пример #4
0
    def get_data_loaders(self):
        transform = transforms.ToTensor()
        train_dataset = MyMNIST(base_path() + 'MNIST',
                                train=True, download=True, transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(train_dataset,
                                                        transform, self.NAME)
        else:
            test_dataset = MNIST(base_path() + 'MNIST',
                                train=False, download=True, transform=transform)

        train, test = store_masked_loaders(train_dataset, test_dataset, self)
        return train, test
Пример #5
0
    def get_data_loaders(self):
        transform = self.TRANSFORM

        test_transform = transforms.Compose(
            [transforms.ToTensor(), self.get_normalization_transform()])

        train_dataset = MyTinyImagenet(base_path() + 'TINYIMG',
                                 train=True, download=True, transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(train_dataset,
                                                    test_transform, self.NAME)
        else:
            test_dataset = TinyImagenet(base_path() + 'TINYIMG',
                        train=False, download=True, transform=test_transform)

        train, test = store_masked_loaders(train_dataset, test_dataset, self)
        return train, test
Пример #6
0
def store_mnist_loaders(transform, setting):
    train_dataset = MyMNIST(base_path() + 'MNIST',
                            train=True,
                            download=True,
                            transform=transform)
    if setting.args.validation:
        train_dataset, test_dataset = get_train_val(train_dataset, transform,
                                                    setting.NAME)
    else:
        test_dataset = MNIST(base_path() + 'MNIST',
                             train=False,
                             download=True,
                             transform=transform)

    train_loader = DataLoader(train_dataset,
                              batch_size=setting.args.batch_size,
                              shuffle=True)
    test_loader = DataLoader(test_dataset,
                             batch_size=setting.args.batch_size,
                             shuffle=False)
    setting.test_loaders.append(test_loader)
    setting.train_loader = train_loader

    return train_loader, test_loader