def get_lr_scheduler(self) -> Optional[pytorch.LRScheduler]: """ Get the scheduler associated with the trial, if one is defined. This function should not be called from: * ``__init__`` * ``build_model()`` * ``optimizer()`` * ``create_lr_scheduler()`` """ # TODO(DET-3267): deprecate this when releasing pytorch flexible primitives. check.lt_eq(len(self.lr_schedulers), 1) if len(self.lr_schedulers) == 1: return self.lr_schedulers[0] return None
def get_lr_scheduler(self) -> Optional[pytorch.LRScheduler]: """ Get the scheduler associated with the trial, if one is defined. This function should not be called from: * ``__init__`` * ``build_model()`` * ``optimizer()`` * ``create_lr_scheduler()`` .. warning:: This is deprecated. """ # TODO(DET-3262): remove this backward compatibility of old interface. logging.warning( "PyTorchTrialContext.get_lr_scheduler is deprecated. " "Please directly use the model wrapped by context.wrap_lr_scheduler()." ) check.lt_eq(len(self.lr_schedulers), 1) if len(self.lr_schedulers) == 1: return self.lr_schedulers[0] return None