Exemple #1
0
def unfreeze_and_add_param_group(module: torch.nn.Module,
                                 optimizer: Optimizer,
                                 lr: Optional[float] = None,
                                 unfreeze_end: Optional[str] = None,
                                 unfreeze_start: Optional[str] = None,
                                 train_bn: bool = True):
    """Unfreezes a module and adds its parameters to an optimizer."""
    if (unfreeze_start is not None) or (unfreeze_end is not None):
        unfreeze_modules = []
        unfreeze_flag = True if unfreeze_start is None else False
        # the reason for [1:] is because the named_modules return the full model
        # as an unnamed nn.Sequential module as 1st member
        # https://discuss.pytorch.org/t/module-children-vs-module-modules/4551/4
        for name, _module in named_child_modules(module):
            if unfreeze_flag:
                unfreeze_modules.append(_module)
            if unfreeze_start is not None and name == unfreeze_start:
                unfreeze_flag = True
            if unfreeze_end is not None and unfreeze_end == name:
                break
        module = torch.nn.Sequential(*unfreeze_modules)
    _make_trainable(module)
    params_lr = optimizer.param_groups[0]['lr'] if lr is None else float(lr)
    optimizer.add_param_group({
        'params':
        filter_params(module=module, train_bn=train_bn),
        'lr':
        params_lr
    })
    def unfreeze_and_add_param_group(
        modules: Union[Module, Iterable[Union[Module, Iterable]]],
        optimizer: Optimizer,
        lr: Optional[float] = None,
        initial_denom_lr: float = 10.0,
        train_bn: bool = True,
    ) -> None:
        """Unfreezes a module and adds its parameters to an optimizer.

        Args:
            modules: A module or iterable of modules to unfreeze.
                Their parameters will be added to an optimizer as a new param group.
            optimizer: The provided optimizer will receive new parameters and will add them to
                `add_param_group`
            lr: Learning rate for the new param group.
            initial_denom_lr: If no lr is provided, the learning from the first param group will be used
                and divided by `initial_denom_lr`.
            train_bn: Whether to train the BatchNormalization layers.
        """
        BaseFinetuning.make_trainable(modules)
        params_lr = optimizer.param_groups[0]["lr"] if lr is None else float(
            lr)
        denom_lr = initial_denom_lr if lr is None else 1.0
        params = BaseFinetuning.filter_params(modules,
                                              train_bn=train_bn,
                                              requires_grad=True)
        params = BaseFinetuning.filter_on_optimizer(optimizer, params)
        if params:
            optimizer.add_param_group({
                "params": params,
                "lr": params_lr / denom_lr
            })
Exemple #3
0
def _unfreeze_and_add_param_group(module: Module,
                                  optimizer: Optimizer,
                                  lr: Optional[float] = None,
                                  train_bn: bool = True):
    """Unfreezes a module and adds its parameters to an optimizer."""
    _make_trainable(module)
    params_lr = optimizer.param_groups[0]['lr'] if lr is None else float(lr)
    optimizer.add_param_group(
        {'params': filter_params(module=module, train_bn=train_bn),
         'lr': params_lr / 10.,
         })
Exemple #4
0
def add_model_to_optimizer(optimizer: Optimizer, model: Module):
    optimizer.add_param_group(model.parameters())