예제 #1
0
파일: optimizers.py 프로젝트: lyouqi/plato
def get_optimizer(model: Model) -> optim.Optimizer:
    """Obtain the optimizer used for training the model."""
    if Config().trainer.optimizer == 'SGD':
        return optim.SGD(model.parameters(),
                         lr=Config().trainer.learning_rate,
                         momentum=Config().trainer.momentum,
                         weight_decay=Config().trainer.weight_decay)
    elif Config().trainer.optimizer == 'Adam':
        return optim.Adam(model.parameters(),
                          lr=Config().trainer.learning_rate,
                          weight_decay=Config().trainer.weight_decay)
    elif Config().trainer.optimizer == 'FedProx':
        return FedProxOptimizer(model.parameters(),
                                lr=Config().trainer.learning_rate,
                                momentum=Config().trainer.momentum,
                                weight_decay=Config().trainer.weight_decay)
    elif Config().trainer.optimizer == 'Scaffold':
        return ScaffoldOptimizer(model.parameters(),
                                 lr=Config().trainer.learning_rate,
                                 momentum=Config().trainer.momentum,
                                 weight_decay=Config().trainer.weight_decay)
    elif Config().trainer.optimizer == 'FedSarah':
        return FedSarahOptimizer(model.parameters(),
                                 lr=Config().trainer.learning_rate,
                                 momentum=Config().trainer.momentum,
                                 weight_decay=Config().trainer.weight_decay)

    raise ValueError('No such optimizer: {}'.format(
        Config().trainer.optimizer))
예제 #2
0
def get_optimizer(training_hparams: TrainingHparams, model: Model) -> torch.optim.Optimizer:
    if training_hparams.optimizer_name == 'sgd':
        return torch.optim.SGD(
            model.parameters(),
            lr=training_hparams.lr,
            momentum=training_hparams.momentum or training_hparams.nesterov_momentum or 0,
            weight_decay=training_hparams.weight_decay or 0,
            nesterov=training_hparams.nesterov_momentum is not None and training_hparams.nesterov_momentum > 0
        )
    elif training_hparams.optimizer_name == 'adam':
        return torch.optim.Adam(
            model.parameters(),
            lr=training_hparams.lr,
            weight_decay=training_hparams.weight_decay or 0
        )

    raise ValueError('No such optimizer: {}'.format(training_hparams.optimizer_name))