def on_stage_start(self, state: State) -> None: """Stage start hook. Args: state (State): current state """ self.reduced_metric = self.reduced_metric or state.main_metric scheduler = state.get_attr( key="scheduler", inner_key=self.scheduler_key ) assert scheduler is not None self._scheduler = scheduler if self.mode is None: if isinstance(scheduler, BatchScheduler): self.mode = "batch" else: self.mode = "epoch" if ( isinstance(scheduler, OneCycleLRWithWarmup) and self.mode == "batch" ): scheduler.reset() assert self.mode is not None
def on_stage_start(self, state: State): optimizer = state.get_attr( key="optimizer", inner_key=self.optimizer_key ) assert optimizer is not None self._optimizer = optimizer self.init_lr = optimizer.defaults["lr"]
def on_stage_start(self, state: State): """ Checks that the current stage has correct optimizer """ self._optimizer = state.get_attr(key="optimizer", inner_key=self.optimizer_key) assert self._optimizer is not None
def on_stage_start(self, state: State): """ Checks that the current stage has correct criterion """ criterion = state.get_attr(key="criterion", inner_key=self.criterion_key) assert criterion is not None self._criterion = criterion
def on_stage_start(self, state: State) -> None: """Stage start hook. Args: state (State): current state """ optimizer = state.get_attr(key="optimizer", inner_key=self.optimizer_key) assert optimizer is not None self._optimizer = optimizer self.init_lr = optimizer.defaults["lr"]