Exemplo n.º 1
0
    def prune_updates(self, previous_weights):
        """ Prune aggregated updates. """

        updates = self.compute_weight_updates(previous_weights)
        updates_model = models_registry.get()
        updates_model.load_state_dict(updates, strict=True)

        parameters_to_prune = []
        for _, module in updates_model.named_modules():
            if isinstance(module, torch.nn.Conv2d) or isinstance(
                    module, torch.nn.Linear):
                parameters_to_prune.append((module, 'weight'))

        if hasattr(Config().clients, 'pruning_method') and Config(
        ).clients.pruning_method == 'random':
            pruning_method = prune.RandomUnstructured
        else:
            pruning_method = prune.L1Unstructured

        prune.global_unstructured(
            parameters_to_prune,
            pruning_method=pruning_method,
            amount=Config().clients.pruning_amount,
        )

        for module, name in parameters_to_prune:
            prune.remove(module, name)

        return updates_model.cpu().state_dict()
Exemplo n.º 2
0
    def __init__(self, model=None):
        """Initializing the trainer with the provided model.

        Arguments:
        client_id: The ID of the client using this trainer (optional).
        model: The model to train.
        """
        super().__init__()

        if hasattr(Config().trainer, 'cpuonly') and Config().trainer.cpuonly:
            mindspore.context.set_context(mode=mindspore.context.PYNATIVE_MODE,
                                          device_target='CPU')
        else:
            mindspore.context.set_context(mode=mindspore.context.PYNATIVE_MODE,
                                          device_target='GPU')

        if model is None:
            self.model = models_registry.get()

        # Initializing the loss criterion
        loss_criterion = SoftmaxCrossEntropyWithLogits(sparse=True,
                                                       reduction='mean')

        # Initializing the optimizer
        optimizer = nn.Momentum(self.model.trainable_params(),
                                Config().trainer.learning_rate,
                                Config().trainer.momentum)

        self.mindspore_model = mindspore.Model(
            self.model,
            loss_criterion,
            optimizer,
            metrics={"Accuracy": Accuracy()})
Exemplo n.º 3
0
    def __init__(self, model=None):
        """Initializing the trainer with the provided model.

        Arguments:
        model: The model to train.
        client_id: The ID of the client using this trainer (optional).
        """
        super().__init__()

        if model is None:
            model = models_registry.get()

        # Use data parallelism if multiple GPUs are available and the configuration specifies it
        if Config().is_parallel():
            logging.info("Using Data Parallelism.")
            # DataParallel will divide and allocate batch_size to all available GPUs
            self.model = nn.DataParallel(model)
        else:
            self.model = model

        if hasattr(Config().trainer, 'differential_privacy') and Config(
        ).trainer.differential_privacy:
            logging.info("Using differential privacy during training.")

            errors = ModuleValidator.validate(self.model, strict=False)
            if len(errors) > 0:
                self.model = ModuleValidator.fix(self.model)
                errors = ModuleValidator.validate(self.model, strict=False)
                assert len(errors) == 0

            self.model = GradSampleModule(self.model)
Exemplo n.º 4
0
    def load_trainer(self):
        """ Setting up the global model to be trained via federated learning. """
        if self.trainer is None:
            self.trainer = trainers_registry.get(model=self.model)

        self.trainer.set_client_id(0)

        # Reset model for new episode
        self.trainer.model = models_registry.get()

        self.algorithm = algorithms_registry.get(self.trainer)
Exemplo n.º 5
0
    def __init__(self, model=None):
        """Initializing the trainer with the provided model.

        Arguments:
        client_id: The ID of the client using this trainer (optional).
        model: The model to train.
        """
        super().__init__()

        if model is None:
            self.model = models_registry.get()
        else:
            self.model = model
Exemplo n.º 6
0
    def setUp(self):
        super().setUp()
        __ = Config()

        fields = [
            'optimizer', 'lr_schedule', 'learning_rate', 'momentum',
            'weight_decay', 'lr_gamma', 'lr_milestone_steps',
            'lr_warmup_steps', 'model_name'
        ]
        params = ['SGD', 'LambdaLR', 0.1, 0.5, 0.0, 0.0, '', '', 'resnet_18']
        Config().trainer = namedtuple('trainer', fields)(*params)

        self.model = models_registry.get()
        self.optimizer = optimizers.get_optimizer(self.model)