Ejemplo n.º 1
0
    def get_joint_loaders(self, nomask=False):
        transform = self.TRANSFORM

        test_transform = transforms.Compose(
            [transforms.ToTensor(),
             self.get_normalization_transform()])

        train_dataset = MyCore50(base_path() + 'CORE50',
                                 train=True,
                                 download=True,
                                 transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(
                train_dataset, test_transform, self.NAME)
        else:
            test_dataset = Core50(base_path() + 'CORE50',
                                  train=False,
                                  download=True,
                                  transform=test_transform)

        train_loader = DataLoader(train_dataset,
                                  batch_size=32,
                                  shuffle=True,
                                  num_workers=2)
        test_loader = DataLoader(test_dataset,
                                 batch_size=32,
                                 shuffle=False,
                                 num_workers=2)
        return train_loader, test_loader
Ejemplo n.º 2
0
    def get_data_loaders(self):
        transform = transforms.ToTensor()
        train_dataset = MyMNIST(base_path() + 'MNIST',
                                train=True, download=True, transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(train_dataset,
                                                        transform, self.NAME)
        else:
            test_dataset = MNIST(base_path() + 'MNIST',
                                train=False, download=True, transform=transform)

        train, test = store_masked_loaders(train_dataset, test_dataset, self)
        return train, test
Ejemplo n.º 3
0
    def not_aug_dataloader(self, args, batch_size):

        if hasattr(args, 'iba') and args.iba:
            transform = transforms.Compose([transforms.ToTensor()])
        else:
            transform = transforms.Compose(
                [transforms.ToTensor(),
                 self.get_normalization_transform()])

        train_dataset = MyCIFAR10(base_path() + 'CIFAR10',
                                  train=True,
                                  download=True,
                                  transform=transform)

        if isinstance(train_dataset.targets, list):
            train_dataset.targets = torch.tensor(train_dataset.targets,
                                                 dtype=torch.long)

        train_mask = np.logical_and(
            np.array(train_dataset.targets) >=
            (self.i - 1) * self.N_CLASSES_PER_TASK,
            np.array(train_dataset.targets) < self.i * self.N_CLASSES_PER_TASK)

        train_dataset.data = train_dataset.data[train_mask]
        train_dataset.targets = np.array(train_dataset.targets)[train_mask]

        train_loader = get_previous_train_loader(train_dataset, batch_size,
                                                 self)

        return train_loader
Ejemplo n.º 4
0
    def init_train_loaders(self) -> None:
        """
        Initializes the test loader.
        """
        train_dataset = MyMNIST(base_path() + 'MNIST',
                                train=True,
                                download=True)
        if self.args.validation:
            test_transform = transforms.ToTensor()
            train_dataset, self.val_dataset = get_train_val(
                train_dataset, test_transform, self.NAME)

        for j in range(self.N_CLASSES):
            self.train_loaders.append([])
            self.remaining_training_items.append([])
            train_mask = np.isin(np.array(train_dataset.targets), [j])
            train_rotation = IncrementalRotation(init_deg=(j - 1) * 60,
                                                 increase_per_iteration=360.0 /
                                                 train_mask.sum())
            for k in range(self.num_rounds * 2):
                tmp_train_dataset = deepcopy(train_dataset)
                numbers_per_batch = train_mask.sum() // (self.num_rounds *
                                                         2) + 1
                tmp_train_dataset.data = tmp_train_dataset.data[train_mask][
                    k * numbers_per_batch:(k + 1) * numbers_per_batch]
                tmp_train_dataset.targets = tmp_train_dataset.targets[
                    train_mask][k * numbers_per_batch:(k + 1) *
                                numbers_per_batch]
                tmp_train_dataset.transform = transforms.Compose(
                    [train_rotation, transforms.ToTensor()])
                self.train_loaders[-1].append(
                    DataLoader(tmp_train_dataset, batch_size=1, shuffle=True))
                self.remaining_training_items[-1].append(
                    tmp_train_dataset.data.shape[0])
Ejemplo n.º 5
0
    def not_aug_dataloader(self, batch_size):
        transform = transforms.Compose([transforms.ToTensor(), self.get_denormalization_transform()])

        train_dataset = MyTinyImagenet(base_path() + 'TINYIMG',
                            train=True, download=True, transform=transform)
        train_loader = get_previous_train_loader(train_dataset, batch_size, self)

        return train_loader
Ejemplo n.º 6
0
    def get_data_loaders(self):
        transform = self.TRANSFORM

        test_transform = transforms.Compose(
            [transforms.ToTensor(), self.get_normalization_transform()])

        train_dataset = MyTinyImagenet(base_path() + 'TINYIMG',
                                 train=True, download=True, transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(train_dataset,
                                                    test_transform, self.NAME)
        else:
            test_dataset = TinyImagenet(base_path() + 'TINYIMG',
                        train=False, download=True, transform=test_transform)

        train, test = store_masked_loaders(train_dataset, test_dataset, self)
        return train, test
Ejemplo n.º 7
0
    def get_data_loaders(self, nomask=False):
        transform = self.TRANSFORM

        test_transform = transforms.Compose(
            [transforms.ToTensor(),
             self.get_normalization_transform()])

        train_dataset = MyCIFAR10(base_path() + 'CIFAR10',
                                  train=True,
                                  download=True,
                                  transform=transform)
        if self.args.validation:
            train_dataset, test_dataset = get_train_val(
                train_dataset, test_transform, self.NAME)
        else:
            test_dataset = CIFAR10(base_path() + 'CIFAR10',
                                   train=False,
                                   download=True,
                                   transform=test_transform)

        if not nomask:
            if isinstance(train_dataset.targets, list):
                train_dataset.targets = torch.tensor(train_dataset.targets,
                                                     dtype=torch.long)
            if isinstance(test_dataset.targets, list):
                test_dataset.targets = torch.tensor(test_dataset.targets,
                                                    dtype=torch.long)
            train, test = store_masked_loaders(train_dataset, test_dataset,
                                               self)
            return train, test
        else:
            train_loader = DataLoader(train_dataset,
                                      batch_size=32,
                                      shuffle=True,
                                      num_workers=2)
            test_loader = DataLoader(test_dataset,
                                     batch_size=32,
                                     shuffle=False,
                                     num_workers=2)
            return train_loader, test_loader
Ejemplo n.º 8
0
    def not_aug_dataloader(self, batch_size):
        transform = transforms.ToTensor()
        train_dataset = MyMNIST(base_path() + 'MNIST',
                                train=True, download=True, transform=transform)
        train_mask = np.logical_and(np.array(train_dataset.targets) >= self.i -
            self.N_CLASSES_PER_TASK, np.array(train_dataset.targets) < self.i)

        train_dataset.data = train_dataset.data[train_mask]
        train_dataset.targets = np.array(train_dataset.targets)[train_mask]

        train_loader = DataLoader(train_dataset,
                                  batch_size=batch_size, shuffle=True)
        return train_loader
Ejemplo n.º 9
0
    def write(self, args: Dict[str, Any]) -> None:
        """
        writes out the logged value along with its arguments.
        :param args: the namespace of the current experiment
        """
        for cc in useless_args:
            if cc in args:
                del args[cc]

        columns = list(args.keys())

        new_cols = []
        for i, acc in enumerate(self.accs):
            args['task' + str(i + 1)] = acc
            new_cols.append('task' + str(i + 1))

        columns = new_cols + columns

        create_if_not_exists(base_path() + "results/" + self.setting)
        create_if_not_exists(base_path() + "results/" + self.setting + "/" +
                             self.dataset)
        create_if_not_exists(base_path() + "results/" + self.setting + "/" +
                             self.dataset + "/" + self.model)

        write_headers = False
        path = base_path() + "results/" + self.setting + "/" + self.dataset\
               + "/" + self.model + "/mean_accs.csv"
        if not os.path.exists(path):
            write_headers = True
        with open(path, 'a') as tmp:
            writer = csv.DictWriter(tmp, fieldnames=columns)
            if write_headers:
                writer.writeheader()
            writer.writerow(args)

        if self.setting == 'class-il':
            create_if_not_exists(base_path() + "results/task-il/" +
                                 self.dataset)
            create_if_not_exists(base_path() + "results/task-il/" +
                                 self.dataset + "/" + self.model)

            for i, acc in enumerate(self.accs_mask_classes):
                args['task' + str(i + 1)] = acc
            write_headers = False
            path = base_path() + "results/task-il" + "/" + self.dataset + "/"\
                   + self.model + "/mean_accs.csv"
            if not os.path.exists(path):
                write_headers = True
            with open(path, 'a') as tmp:
                writer = csv.DictWriter(tmp, fieldnames=columns)
                if write_headers:
                    writer.writeheader()
                writer.writerow(args)
Ejemplo n.º 10
0
def store_mnist_loaders(transform, setting):
    train_dataset = MyMNIST(base_path() + 'MNIST',
                            train=True,
                            download=True,
                            transform=transform)
    if setting.args.validation:
        train_dataset, test_dataset = get_train_val(train_dataset, transform,
                                                    setting.NAME)
    else:
        test_dataset = MNIST(base_path() + 'MNIST',
                             train=False,
                             download=True,
                             transform=transform)

    train_loader = DataLoader(train_dataset,
                              batch_size=setting.args.batch_size,
                              shuffle=True)
    test_loader = DataLoader(test_dataset,
                             batch_size=setting.args.batch_size,
                             shuffle=False)
    setting.test_loaders.append(test_loader)
    setting.train_loader = train_loader

    return train_loader, test_loader
Ejemplo n.º 11
0
    def not_aug_dataloader(self, args, batch_size):
        if hasattr(args, 'iba') and args.iba:
            transform = transforms.Compose([transforms.ToTensor()])
        else:
            transform = transforms.Compose(
                [transforms.ToTensor(),
                 self.get_normalization_transform()])

        train_dataset = MyCore50(base_path() + 'CORE50',
                                 train=True,
                                 download=True,
                                 transform=transform)

        train_loader = get_previous_train_loader(train_dataset, batch_size,
                                                 self)

        return train_loader
Ejemplo n.º 12
0
    def not_aug_dataloader(self, args, batch_size):
        if hasattr(args, 'iba') and args.iba:
            raise ValueError('IBA is not compatible with F-MNIST')
        transform = transforms.ToTensor()
        train_dataset = MyFMNIST(base_path() + 'FMNIST',
                                 train=True,
                                 download=True,
                                 transform=transform)

        train_mask = np.logical_and(
            np.array(train_dataset.targets) >=
            (self.i - 1) * self.N_CLASSES_PER_TASK,
            np.array(train_dataset.targets) < self.i * self.N_CLASSES_PER_TASK)

        train_dataset.data = train_dataset.data[train_mask]
        train_dataset.targets = np.array(train_dataset.targets)[train_mask]

        train_loader = DataLoader(train_dataset,
                                  batch_size=batch_size,
                                  shuffle=True)
        return train_loader
Ejemplo n.º 13
0
def create_stash(model: nn.Module, args: Namespace,
                 dataset: ContinualDataset) -> Dict[Any, str]:
    """
    Creates the dictionary where to save the model status.
    :param model: the model
    :param args: the current arguments
    :param dataset: the dataset at hand
    """
    now = datetime.now()
    model_stash = {'task_idx': 0, 'epoch_idx': 0, 'batch_idx': 0}
    name_parts = [args.dataset, model.NAME]
    if 'buffer_size' in vars(args).keys():
        name_parts.append('buf_' + str(args.buffer_size))
    name_parts.append(now.strftime("%Y%m%d_%H%M%S_%f"))
    model_stash['model_name'] = '/'.join(name_parts)
    model_stash['mean_accs'] = []
    model_stash['args'] = args
    model_stash['backup_folder'] = os.path.join(base_path(), 'backups',
                                                dataset.SETTING,
                                                model_stash['model_name'])
    return model_stash
Ejemplo n.º 14
0
    def __init__(self,
                 args: Namespace,
                 setting: str,
                 stash: Dict[Any, str] = None) -> None:
        from torch.utils.tensorboard import SummaryWriter

        self.settings = [setting]
        if setting == 'class-il':
            self.settings.append('task-il')
        self.loggers = {}
        self.name = stash['model_name']
        for a_setting in self.settings:
            self.loggers[a_setting] = SummaryWriter(
                os.path.join(base_path(), 'tensorboard_runs', a_setting,
                             self.name),
                purge_step=stash['task_idx'] * args.n_epochs +
                stash['epoch_idx'] + 1)
        config_text = ', '.join([
            "%s=%s" % (name, getattr(args, name)) for name in args.__dir__()
            if not name.startswith('_')
        ])
        for a_logger in self.loggers.values():
            a_logger.add_text('config', config_text)
Ejemplo n.º 15
0
 def init_test_loaders(self) -> None:
     """
     Initializes the test loader.
     """
     if self.args.validation:
         test_dataset = self.val_dataset
     else:
         test_dataset = MNIST(base_path() + 'MNIST',
                              train=False,
                              download=True)
     for j in range(self.N_CLASSES):
         tmp_test_dataset = deepcopy(test_dataset)
         test_mask = np.isin(np.array(tmp_test_dataset.targets), [j])
         tmp_test_dataset.data = tmp_test_dataset.data[test_mask]
         tmp_test_dataset.targets = tmp_test_dataset.targets[test_mask]
         test_rotation = IncrementalRotation(increase_per_iteration=360.0 /
                                             test_mask.sum())
         tmp_test_dataset.transform = transforms.Compose(
             [test_rotation, transforms.ToTensor()])
         self.test_loaders.append(
             DataLoader(tmp_test_dataset,
                        batch_size=self.args.batch_size,
                        shuffle=True))