Exemplo n.º 1
0
    def __init__(self, args):
        super().__init__()

        self._device = args["device"]
        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]

        self._scheduling = args["scheduling"]
        self._lr_decay = args["lr_decay"]

        self._k = args["memory_size"]
        self._n_classes = 0
        self._epochs = args["epochs"]

        self._network = network.BasicNet(args["convnet"],
                                         use_bias=True,
                                         use_multi_fc=False,
                                         device=self._device)

        self._examplars = {}
        self._old_model = []

        self._task_idxes = []
Exemplo n.º 2
0
    def __init__(self, args):
        self._device = args["device"][0]
        self._multiple_devices = args["device"]

        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._lr_decay = args["lr_decay"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]
        self._scheduling = args["scheduling"]

        self._distillation_config = args["distillation_config"]
        self._attention_config = args.get("attention_config", {})

        logger.info("Initializing LwM")

        self._network = network.BasicNet(
            args["convnet"],
            convnet_kwargs=args.get("convnet_config", {}),
            classifier_kwargs=args.get("classifier_config", {
                "type": "fc",
                "use_bias": True
            }),
            device=self._device,
            gradcam_hook=True)

        self._n_classes = 0
        self._old_model = None
Exemplo n.º 3
0
    def __init__(self, args):
        super().__init__()

        self._device = args["device"]
        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]

        self._scheduling = args["scheduling"]
        self._lr_decay = args["lr_decay"]

        self._memory_size = args["memory_size"]
        self._n_classes = 0

        self._network = network.BasicNet(args["convnet"],
                                         device=self._device,
                                         use_bias=True)

        self._examplars = {}
        self._means = None

        self._old_model = None

        self._clf_loss = F.binary_cross_entropy_with_logits
        self._distil_loss = F.binary_cross_entropy_with_logits

        self._herding_matrix = []
    def __init__(self, args):
        self._disable_progressbar = args.get("no_progressbar", False)

        self._device = args["device"][0]
        self._multiple_devices = args["device"]

        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]

        self._scheduling = args["scheduling"]
        self._lr_decay = args["lr_decay"]
        self._herding_selection = args.get("herding_selection", {"type": "icarl"})

        self._memory_size = args["memory_size"]
        self._fixed_memory = args["fixed_memory"]
        self._n_classes = 0

        self._eval_every_x_epochs = args.get("eval_every_x_epochs")

        self._use_mimic_score = args.get("mimic_score")
        self._use_less_forget = args.get("less_forget")
        self._lambda_schedule = args.get("lambda_schedule", True)
        self._use_ranking = args.get("ranking_loss")

        self._network = network.BasicNet(
            args["convnet"],
            convnet_kwargs=args.get("convnet_config", {}),
            classifier_kwargs=args.get("classifier_config", {}),
            postprocessor_kwargs=args.get("postprocessor_config", {}),
            device=self._device,
            return_features=True,
            extract_no_act=True,
            classifier_no_act=True,
        )

        self._examplars = {}
        self._means = None

        self._old_model = None

        self._finetuning_config = args.get("finetuning_config")

        self._lambda = args.get("base_lambda", 5)
        self._nb_negatives = args.get("nb_negatives", 2)
        self._margin = args.get("ranking_margin", 0.2)

        self._weight_generation = args.get("weight_generation")

        self._herding_indexes = []

        self._eval_type = args.get("eval_type", "nme")

        self._meta_transfer = args.get("meta_transfer", False)
        if self._meta_transfer:
            assert args["convnet"] == "rebuffi_mtl"

        self._args = args
        self._args["_logs"] = {}
Exemplo n.º 5
0
    def __init__(self, args):
        super().__init__()

        self._device = args["device"]
        self._opt_name = args["optimizer"]
        #self._lr = args["lr"]
        #self._weight_decay = args["weight_decay"]
        #self._n_epochs = args["epochs"]

        #self._scheduling = args["scheduling"]
        #self._lr_decay = args["lr_decay"]

        self._k = args["memory_size"]
        self._n_classes = 0

        self._temperature = args["temperature"]

        self._network = network.BasicNet(args["convnet"],
                                         use_bias=True,
                                         use_multi_fc=True,
                                         device=self._device,
                                         extract_no_act=True,
                                         classifier_no_act=False)

        self._data_memory, self._targets_memory = {}, {}
        self._old_model = []

        self._task_idxes = []
    def __init__(self, args):
        super().__init__()

        self._disable_progressbar = args.get("no_progressbar", False)

        self._device = args["device"][0]
        self._multiple_devices = args["device"]

        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]

        self._scheduling = args["scheduling"]
        self._lr_decay = args["lr_decay"]

        self._warmup_config = args.get("warmup", {})
        if self._warmup_config and self._warmup_config["total_epoch"] > 0:
            self._lr /= self._warmup_config["multiplier"]

        self._eval_every_x_epochs = args.get("eval_every_x_epochs")
        self._early_stopping = args.get("early_stopping", {})

        self._memory_size = args["memory_size"]
        self._fixed_memory = args["fixed_memory"]
        self._herding_selection = args.get("herding_selection",
                                           {"type": "icarl"})
        self._n_classes = 0
        self._last_results = None
        self._validation_percent = args["validation"]

        self._rotations_config = args.get("rotations_config", {})
        self._random_noise_config = args.get("random_noise_config", {})

        self._network = network.BasicNet(
            args["convnet"],
            convnet_kwargs=args.get("convnet_config", {}),
            classifier_kwargs=args.get("classifier_config", {
                "type": "fc",
                "use_bias": True
            }),
            device=self._device,
            extract_no_act=True,
            classifier_no_act=False,
            rotations_predictor=bool(self._rotations_config))

        self._examplars = {}
        self._means = None
        self._herding_indexes = []
        self._data_memory, self._targets_memory = None, None

        self._old_model = None

        self._clf_loss = F.binary_cross_entropy_with_logits
        self._distil_loss = F.binary_cross_entropy_with_logits

        self._epoch_metrics = collections.defaultdict(list)

        self._meta_transfer = args.get("meta_transfer", {})
Exemplo n.º 7
0
    def __init__(self, args):
        super().__init__()

        self._device = args["device"]
        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]

        self._scheduling = args["scheduling"]
        self._lr_decay = args["lr_decay"]

        self._memory_size = args["memory_size"]
        self._n_classes = 0

        self._network = network.BasicNet(args["convnet"],
                                         device=self._device,
                                         use_bias=True)

        self._examplars = {}
        self._means = None

        self._old_model = None

        self._clf_loss = F.binary_cross_entropy_with_logits
        self._distil_loss = F.binary_cross_entropy_with_logits

        self._herding_matrix = []

        # store the original data
        self._data_memory = None
        self._targets_memory = None

        # stroe the transform data
        self._data_transform_memory = None
        self._targets_transform_memory = None

        # add exemplar metric to store the distance
        self.metric = None
        self.metric_2 = None
        self.metric_3 = None
        self.margin = 10.0
        self.idx = 0
    def __init__(self, args):
        self._disable_progressbar = args.get("no_progressbar", False)

        self._device = args["device"][0]
        self._multiple_devices = args["device"]

        self.unsupervised_training = args["unsupervised_training"]
        self.new_supervised_training = args["new_supervised_training"]
        self.all_supervised_training = args["all_supervised_training"]
        self._weight_decay = args["weight_decay"]

        self._finetuning_config = args.get("finetuning", {})

        # Losses definition
        self.memory_bank = args["memory_bank"]
        self.nce_loss = args["nce_loss"]

        self.rotations_prediction = args.get("rotation_prediction")

        logger.info("Initializing ULL")

        self._network = network.BasicNet(
            args["convnet"],
            convnet_kwargs=args.get("convnet_config", {}),
            classifier_kwargs=args.get("classifier_config", {"type": "fc"}),
            device=self._device,
            extract_no_act=True,
            classifier_no_act=True,
            return_features=True,
            rotations_predictor=bool(self.rotations_prediction))

        self._n_classes = 0
        self._old_model = None

        self._data_memory, self._targets_memory = None, None
        self._examplars = {}
        self._means = None
        self._herding_indexes = []
        self._fixed_memory = args.get("fixed_memory", True)
        self._memory_size = args["memory_size"]
        self._herding_selection = {"type": "icarl"}
    def _before_task(self, data_loader, val_loader):
        self._n_classes += self._task_size

        self._network = network.BasicNet(
            self._args["convnet"],
            convnet_kwargs=self._args.get("convnet_config", {}),
            classifier_kwargs=self._args.get("classifier_config", {
                "type": "fc",
                "use_bias": True
            }),
            device=self._device)
        self._network.add_classes(self._n_classes)

        self._optimizer = factory.get_optimizer(self._network.parameters(),
                                                self._opt_name, self._lr,
                                                self._weight_decay)
        if self._scheduling is None:
            self._scheduler = None
        else:
            self._scheduler = torch.optim.lr_scheduler.MultiStepLR(
                self._optimizer, self._scheduling, gamma=self._lr_decay)
Exemplo n.º 10
0
    def __init__(self, args):
        self._disable_progressbar = args.get("no_progressbar", False)

        self._device = args["device"][0]
        self._multiple_devices = args["device"]

        # Optimization:
        self._batch_size = args["batch_size"]
        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]
        self._scheduling = args["scheduling"]
        self._lr_decay = args["lr_decay"]

        # Rehearsal Learning:
        self._memory_size = args["memory_size"]
        self._fixed_memory = args["fixed_memory"]
        self._herding_selection = args.get("herding_selection",
                                           {"type": "icarl"})
        self._n_classes = 0
        self._last_results = None
        self._validation_percent = args.get("validation")

        self._pod_flat_config = args.get("pod_flat", {})
        self._pod_spatial_config = args.get("pod_spatial", {})

        self._nca_config = args.get("nca", {})
        self._softmax_ce = args.get("softmax_ce", False)

        self._perceptual_features = args.get("perceptual_features")
        self._perceptual_style = args.get("perceptual_style")

        self._groupwise_factors = args.get("groupwise_factors", {})
        self._groupwise_factors_bis = args.get("groupwise_factors_bis", {})

        self._class_weights_config = args.get("class_weights_config", {})

        self._evaluation_type = args.get("eval_type", "icarl")
        self._evaluation_config = args.get("evaluation_config", {})

        self._eval_every_x_epochs = args.get("eval_every_x_epochs")
        self._early_stopping = args.get("early_stopping", {})

        self._gradcam_distil = args.get("gradcam_distil", {})

        classifier_kwargs = args.get("classifier_config", {})
        self._network = network.BasicNet(
            args["convnet"],
            convnet_kwargs=args.get("convnet_config", {}),
            classifier_kwargs=classifier_kwargs,
            postprocessor_kwargs=args.get("postprocessor_config", {}),
            device=self._device,
            return_features=True,
            extract_no_act=True,
            classifier_no_act=args.get("classifier_no_act", True),
            attention_hook=True,
            gradcam_hook=bool(self._gradcam_distil),
            args=args)

        self._examplars = {}
        self._means = None

        self._old_model = None

        self._finetuning_config = args.get("finetuning_config")

        self._herding_indexes = []

        self._weight_generation = args.get("weight_generation")

        self._meta_transfer = args.get("meta_transfer", {})
        if self._meta_transfer:
            assert "mtl" in args["convnet"]

        self._post_processing_type = None
        self._data_memory, self._targets_memory = None, None

        self._args = args
        self._args["_logs"] = {}

        # sim clr
        self.nt_xent_loss = NT_Xent(batch_size=args['batch_size'],
                                    temperature=args['nt_xent_temperature'],
                                    device=self._device)

        # sv regularization
        self.sv_loss = SV_regularization_loss(args)
    def __init__(self, args):
        self._disable_progressbar = args.get("no_progressbar", False)

        self._device = args["device"][0]
        self._multiple_devices = args["device"]

        # Optimization:
        self._batch_size = args["batch_size"]
        self._opt_name = args["optimizer"]
        self._lr = args["lr"]
        self._weight_decay = args["weight_decay"]
        self._n_epochs = args["epochs"]
        self._first_task_n_epochs = args["first_task_epochs"]
        self._scheduling = args["scheduling"]
        self._lr_decay = args["lr_decay"]

        # Rehearsal Learning:
        self._memory_size = args["memory_size"]
        self._fixed_memory = args["fixed_memory"]
        self._herding_selection = {"type": "first"}
        self._n_classes = 0
        self._nb_inc_classes = 0
        self._last_results = None
        self._validation_percent = args.get("validation")

        self._groupwise_factors = args.get("groupwise_factors", {})
        self._groupwise_factors_bis = args.get("groupwise_factors_bis", {})

        self._evaluation_type = "cnn"

        self._eval_every_x_epochs = args.get("eval_every_x_epochs")
        self._early_stopping = args.get("early_stopping", {})

        self._network = network.BasicNet(
            args["convnet"],
            convnet_kwargs=args.get("convnet_config", {}),
            classifier_kwargs={
                "type": "fc",
                "use_bias": True
            },
            device=self._device,
            return_features=True,
            extract_no_act=True,
            classifier_no_act=False,
        )

        self._cutmix_alpha = args.get("cutmix_alpha", 1.0)
        self._cutmix_prob = args.get("cutmix_prob", 0.5)

        self._grad_clip = args.get("grad_clip", 10.0)

        self._examplars = {}
        self._means = None

        self._old_model = None
        self._first_model = None

        self._herding_indexes = []

        self._post_processing_type = None
        self._data_memory, self._targets_memory = None, None

        self._args = args
        self._args["_logs"] = {}