def _before_task(self, train_loader, val_loader):
        if self._task != 0:
            self._network = self._first_model.copy()
            self._nb_inc_classes += self._task_size
            utils.add_new_weights(self._network, "basic", self._n_classes,
                                  self._nb_inc_classes, self.inc_dataset)
        else:
            utils.add_new_weights(self._network, "basic", self._n_classes,
                                  self._task_size, self.inc_dataset)
        self._network.classifier.reset_weights()

        self._n_classes += self._task_size
        logger.info("Now {} examplars per class.".format(
            self._memory_per_class))
        logger.info(
            f"Nb classes in classifier {len(self._network.classifier.weights)}"
        )

        if self._groupwise_factors and isinstance(self._groupwise_factors,
                                                  dict):
            if self._groupwise_factors_bis and self._task > 0:
                logger.info("Using second set of groupwise lr.")
                groupwise_factor = self._groupwise_factors_bis
            else:
                groupwise_factor = self._groupwise_factors

            params = []
            for group_name, group_params in self._network.get_group_parameters(
            ).items():
                if group_params is None or group_name == "last_block":
                    continue
                factor = groupwise_factor.get(group_name, 1.0)
                if factor == 0.:
                    continue
                params.append({
                    "params": group_params,
                    "lr": self._lr * factor
                })
                print(f"Group: {group_name}, lr: {self._lr * factor}.")
        else:
            params = self._network.parameters()

        self._optimizer = factory.get_optimizer(params, self._opt_name,
                                                self._lr, self.weight_decay)

        self._scheduler = factory.get_lr_scheduler(self._scheduling,
                                                   self._optimizer,
                                                   nb_epochs=self._n_epochs,
                                                   lr_decay=self._lr_decay,
                                                   task=self._task)
    def _before_task(self, train_loader, val_loader):
        utils.add_new_weights(self._network, {"type": "basic"},
                              self._n_classes, self._task_size,
                              self.inc_dataset)
        self._n_classes += self._task_size

        self._optimizer = factory.get_optimizer(
            [{
                "params": self._network.convnet.parameters(),
            }], self.unsupervised_training["optimizer"],
            self.unsupervised_training["lr"], self._weight_decay)

        self._scheduler = factory.get_lr_scheduler(
            self.unsupervised_training["scheduling"],
            self._optimizer,
            nb_epochs=self.unsupervised_training["epochs"],
            lr_decay=self.unsupervised_training.get("lr_decay", 0.1),
            task=self._task)
Exemplo n.º 3
0
 def _gen_weights(self):
     if self._weight_generation:
         utils.add_new_weights(
             self._network,
             self._weight_generation if self._task != 0 else "basic",
             self._n_classes, self._task_size, self.inc_dataset)