Пример #1
0
    def __init__(self, verbose: bool = True) -> None:
        super().__init__()

        rank_zero_deprecation(
            "The `XLAStatsMonitor` callback was deprecated in v1.5 and will be removed in v1.7."
            " Please use the `DeviceStatsMonitor` callback instead.")

        if not _TPU_AVAILABLE:
            raise MisconfigurationException(
                "Cannot use XLAStatsMonitor with TPUs are not available")

        self._verbose = verbose
Пример #2
0
 def __init__(self) -> None:
     super().__init__()
     # TODO: remove in 1.7
     if hasattr(self, "is_using_lsf") and callable(self.is_using_lsf):
         rank_zero_deprecation(
             f"`{self.__class__.__name__}.is_using_lsf` has been deprecated in v1.6 and will be removed in v1.7."
             " Implement the static method `detect()` instead (do not forget to add the `@staticmethod` decorator)."
         )
     self._main_address = self._get_main_address()
     self._main_port = self._get_main_port()
     self._node_rank = self._get_node_rank()
     self._set_init_progress_group_env_vars()
Пример #3
0
    def size(self, dim=None) -> Union[Tuple, List[Tuple]]:
        """Return the dimension of each input either as a tuple or list of tuples. You can index this just as you
        would with a torch tensor.

        .. deprecated:: v1.5     Will be removed in v1.7.0.
        """
        rank_zero_deprecation("DataModule property `size` was deprecated in v1.5 and will be removed in v1.7.")

        if dim is not None:
            return self.dims[dim]

        return self.dims
Пример #4
0
 def is_using_torchelastic(self) -> bool:
     """
     .. deprecated:: v1.3
         Will be removed in v1.5.0.
     Returns:
         ``True`` if the current process was launched using the torchelastic command.
     """
     rank_zero_deprecation(
         "The property `AcceleratorConnector.is_using_torchelastic` was deprecated in v1.3"
         " and will be removed in 1.5. Use `TorchElasticEnvironment.is_using_torchelastic()` instead.",
     )
     return TorchElasticEnvironment.is_using_torchelastic()
Пример #5
0
    def hpc_load(self, checkpoint_path: str) -> None:
        """
        Attempts to restore the full training and model state from a HPC checkpoint file.

        .. deprecated::v1.4
            Will be removed in v1.6. Use :meth:`restore` instead.
        """
        rank_zero_deprecation(
            "`CheckpointConnector.hpc_load()` was deprecated in v1.4 and will be removed in v1.6."
            " Use `CheckpointConnector.restore()` instead."
        )
        self.restore(checkpoint_path)
Пример #6
0
    def test_transforms(self):
        """
        Optional transforms (or collection of transforms) you can apply to test dataset

        .. deprecated:: v1.5
            Will be removed in v1.7.0.
        """

        rank_zero_deprecation(
            "DataModule property `test_transforms` was deprecated in v1.5 and will be removed in v1.7."
        )
        return self._test_transforms
Пример #7
0
    def on_before_optimizer_step(self, optimizer, optimizer_idx):
        r"""
        .. deprecated:: v1.6
            `TrainerCallbackHookMixin.on_before_optimizer_step` was deprecated in v1.6 and will be removed in v1.8.

        Called after on_after_backward() once the gradient is accumulated and before optimizer.step().
        """
        rank_zero_deprecation(
            "`TrainerCallbackHookMixin.on_before_optimizer_step` was deprecated in v1.6 and will be removed in v1.8."
        )
        for callback in self.callbacks:
            callback.on_before_optimizer_step(self, self.lightning_module,
                                              optimizer, optimizer_idx)
Пример #8
0
def auc(
    x: torch.Tensor,
    y: torch.Tensor,
) -> torch.Tensor:
    """
    .. deprecated::
        Use :func:`torchmetrics.functional.auc`. Will be removed in v1.4.0.
    """
    rank_zero_deprecation(
        "This `auc` was deprecated in v1.2.0 in favor of"
        " `pytorch_lightning.metrics.functional.auc import auc`."
        " It will be removed in v1.4.0")
    return __auc(x, y)
Пример #9
0
    def on_before_zero_grad(self, optimizer):
        r"""
        .. deprecated:: v1.6
            `TrainerCallbackHookMixin.on_before_zero_grad` was deprecated in v1.6 and will be removed in v1.8.

        Called after optimizer.step() and before optimizer.zero_grad().
        """
        rank_zero_deprecation(
            "`TrainerCallbackHookMixin.on_before_zero_grad` was deprecated in v1.6 and will be removed in v1.8."
        )
        for callback in self.callbacks:
            callback.on_before_zero_grad(self, self.lightning_module,
                                         optimizer)
Пример #10
0
 def init_optimizers(
         self,
         model: Optional["pl.LightningModule"]) -> Tuple[List, List, List]:
     r"""
     .. deprecated:: v1.6
         `TrainerOptimizersMixin.init_optimizers` was deprecated in v1.6 and will be removed in v1.8.
     """
     rank_zero_deprecation(
         "`TrainerOptimizersMixin.init_optimizers` was deprecated in v1.6 and will be removed in v1.8."
     )
     pl_module = self.lightning_module or model
     assert isinstance(pl_module, pl.LightningModule)
     return _init_optimizers_and_lr_schedulers(pl_module)
Пример #11
0
    def on_test_batch_start(self, batch, batch_idx, dataloader_idx):
        r"""
        .. deprecated:: v1.6
            `TrainerCallbackHookMixin.on_test_batch_start` was deprecated in v1.6 and will be removed in v1.8.

        Called when the test batch begins.
        """
        rank_zero_deprecation(
            "`TrainerCallbackHookMixin.on_test_batch_start` was deprecated in v1.6 and will be removed in v1.8."
        )
        for callback in self.callbacks:
            callback.on_test_batch_start(self, self.lightning_module, batch,
                                         batch_idx, dataloader_idx)
Пример #12
0
    def has_setup_test(self) -> bool:
        """Return bool letting you know if ``datamodule.setup(stage='test')`` has been called or not.

        Returns:
            bool: True if ``datamodule.setup(stage='test')`` has been called. False by default.

        .. deprecated:: v1.4
            Will be removed in v1.6.0.
        """
        rank_zero_deprecation(
            "DataModule property `has_setup_test` was deprecated in v1.4 and will be removed in v1.6."
        )
        return self._has_setup_test
Пример #13
0
    def has_teardown_predict(self) -> bool:
        """Return bool letting you know if ``datamodule.teardown(stage='predict')`` has been called or not.

        Returns:
            bool: True if ``datamodule.teardown(stage='predict')`` has been called. False by default.

        .. deprecated:: v1.4
            Will be removed in v1.6.0.
        """
        rank_zero_deprecation(
            'DataModule property `has_teardown_predict` was deprecated in v1.4 and will be removed in v1.6.'
        )
        return self._has_teardown_predict
Пример #14
0
 def max_steps(self, value: int) -> None:
     """Sets the maximum number of steps (forwards to epoch_loop)"""
     # TODO(@awaelchli): This setter is required by debugging connector (fast dev run), should be avoided
     if value is None:
         rank_zero_deprecation(
             "Setting `max_steps = None` is deprecated in v1.5 and will no longer be supported in v1.7."
             " Use `max_steps = -1` instead.")
         value = -1
     elif value < -1:
         raise MisconfigurationException(
             f"`max_steps` must be a non-negative integer or -1 (infinite steps). You passed in {value}."
         )
     self.epoch_loop.max_steps = value
Пример #15
0
def is_overridden(
    method_name: str,
    instance: Optional[object] = None,
    parent: Optional[Type[object]] = None,
    model: Optional[Union['pl.LightningModule',
                          'pl.LightningDataModule']] = None,
) -> bool:
    if model is not None and instance is None:
        rank_zero_deprecation(
            '`is_overriden(model=...)` has been deprecated and will be removed in v1.6.'
            'Please use `is_overriden(instance=...)`')
        instance = model

    if instance is None:
        # if `self.lightning_module` was passed as instance, it can be `None`
        return False

    if parent is None:
        if isinstance(instance, pl.LightningModule):
            parent = pl.LightningModule
        elif isinstance(instance, pl.LightningDataModule):
            parent = pl.LightningDataModule
        if parent is None:
            raise ValueError("Expected a parent")

    instance_attr = getattr(instance, method_name, None)
    # `functools.wraps()` support
    if hasattr(instance_attr, '__wrapped__'):
        instance_attr = instance_attr.__wrapped__
    # `Mock(wraps=...)` support
    if isinstance(instance_attr, Mock):
        # access the wrapped function
        instance_attr = instance_attr._mock_wraps
    # `partial` support
    elif isinstance(instance_attr, partial):
        instance_attr = instance_attr.func
    if instance_attr is None:
        return False

    parent_attr = getattr(parent, method_name, None)
    if parent_attr is None:
        raise ValueError("The parent should define the method")

    # cannot pickle `__code__` so cannot verify if `PatchDataloader`
    # exists which shows dataloader methods have been overwritten.
    # so, we hack it by using the string representation
    instance_code = getattr(instance_attr, 'patch_loader_code',
                            None) or instance_attr.__code__
    parent_code = parent_attr.__code__

    return instance_code != parent_code
Пример #16
0
    def on_predict_batch_end(self, outputs: STEP_OUTPUT, batch: Any,
                             batch_idx: int, dataloader_idx: int) -> None:
        r"""
        .. deprecated:: v1.6
            `TrainerCallbackHookMixin.on_predict_batch_end` was deprecated in v1.6 and will be removed in v1.8.

        Called when the predict batch ends.
        """
        rank_zero_deprecation(
            "`TrainerCallbackHookMixin.on_predict_batch_end` was deprecated in v1.6 and will be removed in v1.8."
        )
        for callback in self.callbacks:
            callback.on_predict_batch_end(self, self.lightning_module, outputs,
                                          batch, batch_idx, dataloader_idx)
Пример #17
0
    def on_before_accelerator_backend_setup(self) -> None:
        r"""
        .. deprecated:: v1.6
            `TrainerCallbackHookMixin.on_before_accelerator_backend_setup` was deprecated in v1.6
            and will be removed in v1.8.

        Called at the beginning of fit (train + validate), validate, test, or predict, or tune.
        """
        rank_zero_deprecation(
            "`TrainerCallbackHookMixin.on_before_accelerator_backend_setup` was deprecated in v1.6 "
            "and will be removed in v1.8.")
        for callback in self.callbacks:
            callback.on_before_accelerator_backend_setup(
                self, self.lightning_module)
Пример #18
0
def auto_move_data(fn: Callable) -> Callable:
    """
    Decorator for :class:`~pytorch_lightning.core.lightning.LightningModule` methods for which
    input arguments should be moved automatically to the correct device.
    It as no effect if applied to a method of an object that is not an instance of
    :class:`~pytorch_lightning.core.lightning.LightningModule` and is typically applied to ``__call__``
    or ``forward``.

    Args:
        fn: A LightningModule method for which the arguments should be moved to the device
            the parameters are on.

    Example::

        # directly in the source code
        class LitModel(LightningModule):

            @auto_move_data
            def forward(self, x):
                return x

        # or outside
        LitModel.forward = auto_move_data(LitModel.forward)

        model = LitModel()
        model = model.to('cuda')
        model(torch.zeros(1, 3))

        # input gets moved to device
        # tensor([[0., 0., 0.]], device='cuda:0')

    """
    @wraps(fn)
    def auto_transfer_args(self, *args, **kwargs):
        from pytorch_lightning.core.lightning import LightningModule

        if not isinstance(self, LightningModule):
            return fn(self, *args, **kwargs)

        args, kwargs = self.transfer_batch_to_device((args, kwargs),
                                                     device=self.device,
                                                     dataloader_idx=None)
        return fn(self, *args, **kwargs)

    rank_zero_deprecation(
        "The `@auto_move_data` decorator is deprecated in v1.3 and will be removed in v1.5."
        f" Please use `trainer.predict` instead for inference. The decorator was applied to `{fn.__name__}`"
    )

    return auto_transfer_args
 def __init__(self,
              trainer: "pl.Trainer",
              resume_from_checkpoint: Optional[_PATH] = None) -> None:
     self.trainer = trainer
     self.resume_checkpoint_path: Optional[_PATH] = None
     # TODO: remove resume_from_checkpoint_fit_path in v1.7
     self.resume_from_checkpoint_fit_path: Optional[
         _PATH] = resume_from_checkpoint
     if resume_from_checkpoint is not None:
         rank_zero_deprecation(
             "Setting `Trainer(resume_from_checkpoint=)` is deprecated in v1.5 and"
             " will be removed in v1.7. Please pass `Trainer.fit(ckpt_path=)` directly instead."
         )
     self._loaded_checkpoint: Dict[str, Any] = {}
Пример #20
0
    def __init__(self,
                 train_transforms=None,
                 val_transforms=None,
                 test_transforms=None,
                 dims=None):
        super().__init__()
        if train_transforms is not None:
            rank_zero_deprecation(
                "DataModule property `train_transforms` was deprecated in v1.5 and will be removed in v1.7."
            )
        if val_transforms is not None:
            rank_zero_deprecation(
                "DataModule property `val_transforms` was deprecated in v1.5 and will be removed in v1.7."
            )
        if test_transforms is not None:
            rank_zero_deprecation(
                "DataModule property `test_transforms` was deprecated in v1.5 and will be removed in v1.7."
            )
        if dims is not None:
            rank_zero_deprecation(
                "DataModule property `dims` was deprecated in v1.5 and will be removed in v1.7."
            )
        self._train_transforms = train_transforms
        self._val_transforms = val_transforms
        self._test_transforms = test_transforms
        self._dims = dims if dims is not None else ()

        # Pointer to the trainer object
        self.trainer = None
    def _map_deprecated_devices_specific_info_to_accelerator_and_device_flag(
        self,
        devices: Optional[Union[List[int], str, int]],
        num_processes: Optional[int],
        gpus: Optional[Union[List[int], str, int]],
        ipus: Optional[int],
        tpu_cores: Optional[Union[List[int], str, int]],
    ) -> None:
        """Emit deprecation warnings for num_processes, gpus, ipus, tpu_cores and set the `devices_flag` and
        `accelerator_flag`."""
        if num_processes is not None:
            rank_zero_deprecation(
                f"Setting `Trainer(num_processes={num_processes})` is deprecated in v1.7 and will be removed"
                f" in v2.0. Please use `Trainer(accelerator='cpu', devices={num_processes})` instead."
            )
        if gpus is not None:
            rank_zero_deprecation(
                f"Setting `Trainer(gpus={gpus!r})` is deprecated in v1.7 and will be removed"
                f" in v2.0. Please use `Trainer(accelerator='gpu', devices={gpus!r})` instead."
            )
        if tpu_cores is not None:
            rank_zero_deprecation(
                f"Setting `Trainer(tpu_cores={tpu_cores!r})` is deprecated in v1.7 and will be removed"
                f" in v2.0. Please use `Trainer(accelerator='tpu', devices={tpu_cores!r})` instead."
            )
        if ipus is not None:
            rank_zero_deprecation(
                f"Setting `Trainer(ipus={ipus})` is deprecated in v1.7 and will be removed"
                f" in v2.0. Please use `Trainer(accelerator='ipu', devices={ipus})` instead."
            )
        self._gpus: Optional[Union[List[int], str, int]] = gpus
        self._tpu_cores: Optional[Union[List[int], str, int]] = tpu_cores
        deprecated_devices_specific_flag = num_processes or gpus or ipus or tpu_cores
        if deprecated_devices_specific_flag and deprecated_devices_specific_flag not in (
            [], 0, "0"):
            if devices:
                # TODO: @awaelchli improve error message
                rank_zero_warn(
                    f"The flag `devices={devices}` will be ignored, "
                    f"instead the device specific number {deprecated_devices_specific_flag} will be used"
                )

            if [(num_processes is not None), (gpus is not None),
                (ipus is not None), (tpu_cores is not None)].count(True) > 1:
                # TODO: @awaelchli improve error message
                rank_zero_warn(
                    "more than one device specific flag has been set")
            self._devices_flag = deprecated_devices_specific_flag

            if self._accelerator_flag is None:
                # set accelerator type based on num_processes, gpus, ipus, tpu_cores
                if ipus:
                    self._accelerator_flag = "ipu"
                if tpu_cores:
                    self._accelerator_flag = "tpu"
                if gpus:
                    self._accelerator_flag = "cuda"
                if num_processes:
                    self._accelerator_flag = "cpu"
Пример #22
0
    def prepare_dataloader(self, dataloader: Any, shuffle: bool, mode: Optional[RunningStage] = None) -> Any:
        r"""
        .. deprecated:: v1.6
            `TrainerDataLoadingMixin.prepare_dataloader` was deprecated in v1.6
            and will be removed in v1.8.

        This function handles to following functionalities:

        - Injecting a `DistributedDataSampler` into the `DataLoader` if on a distributed environment
        - Wrapping the datasets and samplers into fault-tolerant components
        """
        rank_zero_deprecation(
            "`TrainerDataLoadingMixin.prepare_dataloader` was deprecated in v1.6 and will be removed in v1.8."
        )
        return self._data_connector._prepare_dataloader(dataloader, shuffle, mode)
Пример #23
0
 def on_save_checkpoint(self, checkpoint: Dict[str, Any]) -> Dict[Type, dict]:
     """Called when saving a model checkpoint."""
     callback_states = {}
     for callback in self.callbacks:
         if self.__is_old_signature(callback.on_save_checkpoint):
             rank_zero_deprecation(
                 "`Callback.on_save_checkpoint` signature has changed in v1.3."
                 " A `checkpoint` parameter has been added."
                 " Support for the old signature will be removed in v1.5"
             )
             state = callback.on_save_checkpoint(self, self.lightning_module)  # noqa: parameter-unfilled
         else:
             state = callback.on_save_checkpoint(self, self.lightning_module, checkpoint)
         if state:
             callback_states[type(callback)] = state
     return callback_states
Пример #24
0
    def __init__(
        self,
        dirpath: Optional[Union[str, Path]] = None,
        filename: Optional[str] = None,
        monitor: Optional[str] = None,
        verbose: bool = False,
        save_last: Optional[bool] = None,
        save_top_k: int = 1,
        save_weights_only: bool = False,
        mode: str = "min",
        auto_insert_metric_name: bool = True,
        every_n_train_steps: Optional[int] = None,
        train_time_interval: Optional[timedelta] = None,
        every_n_epochs: Optional[int] = None,
        save_on_train_epoch_end: Optional[bool] = None,
        period: Optional[int] = None,
        every_n_val_epochs: Optional[int] = None,
    ):
        super().__init__()
        self.monitor = monitor
        self.verbose = verbose
        self.save_last = save_last
        self.save_top_k = save_top_k
        self.save_weights_only = save_weights_only
        self.auto_insert_metric_name = auto_insert_metric_name
        self._save_on_train_epoch_end = save_on_train_epoch_end
        self._last_global_step_saved = -1
        self._last_time_checked: Optional[float] = None
        self.current_score = None
        self.best_k_models = {}
        self.kth_best_model_path = ""
        self.best_model_score = None
        self.best_model_path = ""
        self.last_model_path = ""

        if every_n_val_epochs is not None:
            rank_zero_deprecation(
                '`ModelCheckpoint(every_n_val_epochs)` is deprecated in v1.4 and will be removed in v1.6.'
                ' Please use `every_n_epochs` instead.')
            every_n_epochs = every_n_val_epochs

        self.__init_monitor_mode(mode)
        self.__init_ckpt_dir(dirpath, filename)
        self.__init_triggers(every_n_train_steps, every_n_epochs,
                             train_time_interval, period)
        self.__validate_init_configuration()
        self._save_function = None
Пример #25
0
def _normalize_parse_gpu_string_input(
        s: Union[int, str, List[int]]) -> Union[int, List[int]]:
    if not isinstance(s, str):
        return s
    if s == '-1':
        return -1
    if ',' in s:
        return [int(x.strip()) for x in s.split(',') if len(x) > 0]
    num_gpus = int(s.strip())
    if _compare_version("pytorch_lightning", operator.lt, "1.5"):
        rank_zero_deprecation(
            f"Parsing of the Trainer argument gpus='{s}' (string) will change in the future."
            " In the current version of Lightning, this will select"
            f" CUDA device with index {num_gpus}, but from v1.5 it will select gpus"
            f" {list(range(num_gpus))} (same as gpus={s} (int)).", )
        return [num_gpus]
    return num_gpus
Пример #26
0
    def __deprecation_check(self, profiled_functions: Optional[List[str]],
                            record_functions: Optional[Set[str]]) -> Set[str]:
        if record_functions is None:
            record_functions = set()

        if profiled_functions is not None:
            rank_zero_deprecation(
                "`PyTorchProfiler.profiled_functions` has been renamed to"
                " `record_functions` in v1.3 and will be removed in v1.5")
            if not record_functions:
                record_functions |= set(profiled_functions)
            else:
                raise MisconfigurationException(
                    "You set `PytorchProfiler.profiled_functions` and `PyTorchProfiler.record_functions`."
                    "  Please use only the later.")

        return record_functions
Пример #27
0
    def request_dataloader(
        self, stage: RunningStage, model: Optional["pl.LightningModule"] = None
    ) -> Union[DataLoader, List[DataLoader]]:
        r"""
        .. deprecated:: v1.6
            `TrainerDataLoadingMixin.request_dataloader` was deprecated in v1.6
            and will be removed in v1.8.

        Requests a dataloader from the given model by calling dataloader hooks corresponding to the given stage.

        Returns:
            The requested dataloader
        """
        rank_zero_deprecation(
            "`TrainerDataLoadingMixin.request_dataloader` was deprecated in v1.6 and will be removed in v1.8."
        )
        return self._data_connector._request_dataloader(stage)
Пример #28
0
 def __init__(
     self,
     accelerator: Optional["pl.accelerators.accelerator.Accelerator"] = None,
     checkpoint_io: Optional[CheckpointIO] = None,
     precision_plugin: Optional[PrecisionPlugin] = None,
 ) -> None:
     self.accelerator = accelerator
     self._model: Optional[Module] = None
     self.checkpoint_io = checkpoint_io
     self.precision_plugin = precision_plugin
     self.optimizers: List[Optimizer] = []
     self.lr_schedulers: List[LRSchedulerConfig] = []
     self.optimizer_frequencies: List[int] = []
     if is_overridden("post_dispatch", self, parent=Strategy):
         rank_zero_deprecation(
             f"`{self.__class__.__name__}.post_dispatch()` has been deprecated in v1.6 and will be removed in v1.7."
             f" Move your implementation to `{self.__class__.__name__}.teardown()` instead."
         )
Пример #29
0
    def on_save_checkpoint(self, checkpoint: Dict[str,
                                                  Any]) -> Dict[str, dict]:
        r"""
        .. deprecated:: v1.6
            `TrainerCallbackHookMixin.on_save_checkpoint` was deprecated in v1.6 and will be removed in v1.8.

        Called when saving a model checkpoint.
        """
        rank_zero_deprecation(
            "`TrainerCallbackHookMixin.on_save_checkpoint` was deprecated in v1.6 and will be removed in v1.8."
        )
        callback_states = {}
        for callback in self.callbacks:
            state = callback.on_save_checkpoint(self, self.lightning_module,
                                                checkpoint)
            if state:
                callback_states[callback.state_key] = state
        return callback_states
Пример #30
0
def multiclass_auroc(
    pred: torch.Tensor,
    target: torch.Tensor,
    sample_weight: Optional[Sequence] = None,
    num_classes: Optional[int] = None,
) -> torch.Tensor:
    """
    .. deprecated::
        Use :func:`torchmetrics.functional.auroc`. Will be removed in v1.4.0.
    """
    rank_zero_deprecation(
        "This `multiclass_auroc` was deprecated in v1.2.0 in favor of"
        " `pytorch_lightning.metrics.functional.auroc import auroc`."
        " It will be removed in v1.4.0")

    return __auroc(preds=pred,
                   target=target,
                   sample_weights=sample_weight,
                   num_classes=num_classes)