def load_scheduler( cfg: Config, optimizer: Optimizer ) -> Union[torch.optim.lr_scheduler.LambdaLR, Type[DummyScheduler]]: cls_str = get_by_dotkey(cfg, "scheduler.class") if not cls_str: return DummyScheduler cls = cast(Type[torch.optim.lr_scheduler.LambdaLR], import_attr(cls_str)) params = OmegaConf.to_container(cfg.scheduler.params) or {} return cls(optimizer, **params)
def create_optimizer(self, params: OptimizerParameters, **kwargs) -> Optimizer: cls = import_attr(self.optimizer_config["class"]) return cls(params, **self.optimizer_config.get("params", {}))
def create_optimizer(self, params: Iterable[Union[torch.Tensor, Dict[str, Any]]], **kwargs) -> Optimizer: cls = import_attr(self.optimizer_config["class"]) return cls(params, **self.optimizer_config.get("params", {}))