def build_optimizer(conf: DictConfig, model: nn.Module) -> Optimizer:
    parameters = model.parameters()
    p = conf.params
    if 'weight_decay' in p and p.weight_decay > 0:
        parameters = add_weight_decay(model, p.weight_decay)
        p.weight_decay = 0.0
    return instantiate(conf, parameters)
 def _reset_sync_opt(self):
     """Rest sysnc opt."""
     params = add_weight_decay(self.model, self.cfg.optim.weight_decay)
     self.optimizer.param_groups = []
     param_groups = list(params)
     if not isinstance(param_groups[0], dict):
         param_groups = [{'params': param_groups}]
     for param_group in param_groups:
         self.optimizer.add_param_group(param_group)
 def build_param_dicts(self):
     """
     Builds up the Paramters dicts for optimization.
     """
     if self.filter_wd:
         param_lists = add_weight_decay(
             self.model,
             weight_decay=self.wd,
             skip_list=self.model.no_weight_decay(),
         )
         param_lists[0]["lr"] = self.lr
         param_lists[1]["lr"] = self.lr
     else:
         ps = trainable_params(self.model)
         param_lists = dict(params=ps, lr=self.lr, wd=self.wd)
     return param_lists