Esempio n. 1
0
    def _scheduler_step(scheduler, reduced_metric=None):
        if isinstance(scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau):
            scheduler.step(reduced_metric)
        else:
            scheduler.step()

        lr_list = [param_group["lr"] for param_group in scheduler.optimizer.param_groups]
        momentum_list = get_optimizer_momentum_list(scheduler.optimizer)
        return lr_list, momentum_list
Esempio n. 2
0
 def _get_lr_momentum_stats(self) -> Dict:
     lr_list = [param_group["lr"] for param_group in self.optimizer.param_groups]
     momentum_list = get_optimizer_momentum_list(self.optimizer)
     stats = {self._prefix_lr: lr_list[0], self._prefix_momentum: momentum_list[0]}
     return stats