def connect(self, trainer: 'pl.Trainer', *args: Any, **kwargs: Any) -> None: """Connects the loop with all necessary parts like trainer and accelerators""" super().connect(trainer, *args, **kwargs) self.batch_loop = TrainingBatchLoop() self.batch_loop.connect(trainer) self.val_loop = loops.EvaluationLoop() self.val_loop.connect(trainer)
def __init__(self, min_steps: Optional[int] = None, max_steps: int = -1) -> None: super().__init__() if max_steps is None: rank_zero_deprecation( "Setting `max_steps = None` is deprecated in v1.5 and will no longer be supported in v1.7." " Use `max_steps = -1` instead.") max_steps = -1 elif max_steps < -1: raise MisconfigurationException( f"`max_steps` must be a non-negative integer or -1 (infinite steps). You passed in {max_steps}." ) self.min_steps = min_steps self.max_steps = max_steps self.batch_progress = BatchProgress() self.scheduler_progress = SchedulerProgress() self.batch_loop = TrainingBatchLoop() self.val_loop = loops.EvaluationLoop(verbose=False) self._results = _ResultCollection(training=True) self._outputs: _OUTPUTS_TYPE = [] self._warning_cache = WarningCache() # caches the loaded dataloader state until dataloader objects are available self._dataloader_state_dict: Dict[str, Any] = {} self._batches_that_stepped: int = 0
def __init__(self, min_steps: int, max_steps: int): super().__init__() self.min_steps: int = min_steps self.max_steps: int = max_steps self.global_step: int = 0 # the total batch index across all epochs self.total_batch_idx: int = 0 # the current batch index in the loop that runs over the dataloader(s) self.iteration_count: int = 0 # the current split index when the batch gets split into chunks in truncated backprop through time self.split_idx: Optional[int] = None # the number of batches seen this run, updates immediately after batch_loop.run() self.batches_seen: int = 0 self.is_last_batch: Optional[bool] = None self.progress = TrainingEpochProgress() self.batch_loop = TrainingBatchLoop() self.val_loop = loops.EvaluationLoop() self._results = ResultCollection(training=True) self._dataloader_idx: Optional[int] = None self._warning_cache: WarningCache = WarningCache() self._epoch_output: Optional[List[List[STEP_OUTPUT]]] = None