Пример #1
0
def test_module_exists():
    """Test if the some 3rd party libs are available."""
    assert _module_available("torch")
    assert _module_available("torch.nn.parallel")
    assert not _module_available("torch.nn.asdf")
    assert not _module_available("asdf")
    assert not _module_available("asdf.bla.asdf")
Пример #2
0
import logging
import os
from argparse import Namespace
from typing import Any, Dict, Optional, Union

import torch
from torch import is_tensor

import pytorch_lightning as pl
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available, rank_zero_only
from pytorch_lightning.utilities.exceptions import MisconfigurationException

log = logging.getLogger(__name__)
_COMET_AVAILABLE = _module_available("comet_ml")

if _COMET_AVAILABLE:
    import comet_ml
    from comet_ml import ExistingExperiment as CometExistingExperiment
    from comet_ml import Experiment as CometExperiment
    from comet_ml import OfflineExperiment as CometOfflineExperiment

    try:
        from comet_ml.api import API
    except ImportError:  # pragma: no-cover
        # For more information, see: https://www.comet.ml/docs/python-sdk/releases/#release-300
        from comet_ml.papi import API  # pragma: no-cover
else:
    # needed for test mocks, these tests shall be updated
    comet_ml = None
Пример #3
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test Tube Logger
----------------
"""
from argparse import Namespace
from typing import Any, Dict, Optional, Union

from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn

_TESTTUBE_AVAILABLE = _module_available("test_tube")

if _TESTTUBE_AVAILABLE:
    from test_tube import Experiment
else:
    Experiment = None


class TestTubeLogger(LightningLoggerBase):
    r"""
    Log to local file system in `TensorBoard <https://www.tensorflow.org/tensorboard>`_ format
    but using a nicer folder structure (see `full docs <https://williamfalcon.github.io/test-tube>`_).

    Install it with pip:

    .. code-block:: bash
Пример #4
0
    >>> _compare_version("torch", operator.ge, "0.1")
    True
    """
    try:
        pkg = importlib.import_module(package)
    except (ModuleNotFoundError, DistributionNotFound):
        return False
    try:
        pkg_version = Version(pkg.__version__)
    except TypeError:
        # this is mock by sphinx, so it shall return True ro generate all summaries
        return True
    return op(pkg_version, Version(version))


_NATIVE_AMP_AVAILABLE: bool = _module_available("torch.cuda.amp") and hasattr(
    torch.cuda.amp, "autocast")

_TORCHVISION_AVAILABLE: bool = _module_available("torchvision")
_GYM_AVAILABLE: bool = _module_available("gym")
_SKLEARN_AVAILABLE: bool = _module_available("sklearn")
_PIL_AVAILABLE: bool = _module_available("PIL")
_OPENCV_AVAILABLE: bool = _module_available("cv2")
_WANDB_AVAILABLE: bool = _module_available("wandb")
_MATPLOTLIB_AVAILABLE: bool = _module_available("matplotlib")
_TORCHVISION_LESS_THAN_0_9_1: bool = _compare_version("torchvision",
                                                      operator.lt, "0.9.1")
_PL_GREATER_EQUAL_1_4 = _compare_version("pytorch_lightning", operator.ge,
                                         "1.4.0")
_PL_GREATER_EQUAL_1_4_5 = _compare_version("pytorch_lightning", operator.ge,
                                           "1.4.5")
Пример #5
0
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from argparse import Namespace
from typing import Any, Dict, Optional, Type, Union

from pytorch_lightning.callbacks import Callback
from pytorch_lightning.core.datamodule import LightningDataModule
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.trainer.trainer import Trainer
from pytorch_lightning.utilities import _module_available
from pytorch_lightning.utilities.seed import seed_everything

_JSONARGPARSE_AVAILABLE = _module_available("jsonargparse")
if _JSONARGPARSE_AVAILABLE:
    from jsonargparse import ActionConfigFile, ArgumentParser
else:
    ArgumentParser = object


class LightningArgumentParser(ArgumentParser):
    """Extension of jsonargparse's ArgumentParser for pytorch-lightning"""
    def __init__(self, *args, parse_as_dict: bool = True, **kwargs) -> None:
        """Initialize argument parser that supports configuration file input

        For full details of accepted arguments see `ArgumentParser.__init__
        <https://jsonargparse.readthedocs.io/en/stable/#jsonargparse.core.ArgumentParser.__init__>`_.
        """
        if not _JSONARGPARSE_AVAILABLE:
Пример #6
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Optional

import torch

from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.plugins.ddp_plugin import DDPPlugin
from pytorch_lightning.utilities import _module_available, RPC_AVAILABLE

DEFAULT_RPC_TIMEOUT_SEC = 60.
if RPC_AVAILABLE:
    from torch.distributed import rpc
    if _module_available("torch.distributed.rpc.constants") and hasattr(torch.distributed.rpc.constants, "DEFAULT_RPC_TIMEOUT_SEC"):
        from torch.distributed.rpc.constants import DEFAULT_RPC_TIMEOUT_SEC


class RPCPlugin(DDPPlugin):
    """
    Backbone for RPC Plugins built on top of DDP.
    RPC introduces different communication behaviour than DDP. Unlike DDP, processes potentially are not
    required to run the same code as the main process.
    This leads to edge cases where logic needs to be re-defined. This class contains special cases
    that need to be addressed when using RPC communication when building custom RPC Plugins.
    """

    def __init__(self, rpc_timeout_sec: float = DEFAULT_RPC_TIMEOUT_SEC, **kwargs):
        self.rpc_timeout_sec = rpc_timeout_sec
        self.rpc_initialized = False
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import operator

from pytorch_lightning.utilities import _module_available
from pytorch_lightning.utilities.imports import _compare_version

_BOLTS_AVAILABLE = _module_available("pl_bolts") and _compare_version(
    "pl_bolts", operator.ge, "0.4.0")
_BOLTS_GREATER_EQUAL_0_5_0 = _module_available(
    "pl_bolts") and _compare_version("pl_bolts", operator.ge, "0.5.0")
_WANDB_AVAILABLE = _module_available("wandb")
Пример #8
0
import os

from pytorch_lightning.utilities import _module_available

_EXAMPLES_ROOT = os.path.dirname(__file__)
_PACKAGE_ROOT = os.path.dirname(_EXAMPLES_ROOT)
_DATASETS_PATH = os.path.join(_PACKAGE_ROOT, 'Datasets')

_TORCHVISION_AVAILABLE = _module_available("torchvision")
_DALI_AVAILABLE = _module_available("nvidia.dali")

LIGHTNING_LOGO = """
                    ####
                ###########
             ####################
         ############################
    #####################################
##############################################
#########################  ###################
#######################    ###################
####################      ####################
##################       #####################
################        ######################
#####################        #################
######################     ###################
#####################    #####################
####################   #######################
###################  #########################
##############################################
    #####################################
         ############################
Пример #9
0
# Copyright 2020 The PyTorch Lightning team and The HuggingFace Team. All rights reserved.

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re

from pytorch_lightning.utilities import _module_available

nltk = None
if _module_available('nltk'):
    import nltk

    nltk.download("punkt", quiet=True)


def add_newline_to_end_of_each_sentence(x: str) -> str:
    """This was added to get rougeLsum scores matching published rougeL scores for BART and PEGASUS."""
    re.sub("<n>", "", x)  # remove pegasus newline char
    assert nltk, "nltk must be installed to separate newlines between sentences. (pip install nltk)"
    return "\n".join(nltk.sent_tokenize(x))
Пример #10
0
import json
import os
from pathlib import Path

import pytest
import torch
from PIL import Image
from pytorch_lightning.utilities import _module_available
from torchvision import transforms as T

from flash.vision.detection.data import ImageDetectionData

_COCO_AVAILABLE = _module_available("pycocotools")
if _COCO_AVAILABLE:
    from pycocotools.coco import COCO


def _create_dummy_coco_json(dummy_json_path):

    dummy_json = {
        "images": [{
            "id": 0,
            'width': 1920,
            'height': 1080,
            'file_name': 'sample_one.png',
        }, {
            "id": 1,
            "width": 1920,
            "height": 1080,
            "file_name": "sample_two.png",
        }],
Пример #11
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional

import torch
from torch.utils.data import DataLoader

from pytorch_lightning.core.datamodule import LightningDataModule
from pytorch_lightning.utilities import _module_available
from tests.helpers.datasets import MNIST, SklearnDataset, TrialMNIST

_SKLEARN_AVAILABLE = _module_available("sklearn")
if _SKLEARN_AVAILABLE:
    from sklearn.datasets import make_classification, make_regression
    from sklearn.model_selection import train_test_split
else:
    make_classification = None
    make_regression = None
    train_test_split = None


class MNISTDataModule(LightningDataModule):
    def __init__(self, data_dir: str = "./", batch_size: int = 32, use_trials: bool = False) -> None:
        super().__init__()

        self.data_dir = data_dir
        self.batch_size = batch_size
import torch
from pytorch_lightning.utilities import _module_available

_NATIVE_AMP_AVAILABLE = _module_available("torch.cuda.amp") and hasattr(
    torch.cuda.amp, "autocast")

_TORCHVISION_AVAILABLE = _module_available("torchvision")
_GYM_AVAILABLE = _module_available("gym")
_SKLEARN_AVAILABLE = _module_available("sklearn")
_PIL_AVAILABLE = _module_available("PIL")
_OPENCV_AVAILABLE = _module_available("cv2")
Пример #13
0
# Copyright 2020 The PyTorch Lightning team and The HuggingFace Team. All rights reserved.

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re

from pytorch_lightning.utilities import _module_available

nltk = None
if _module_available("nltk"):
    import nltk

    nltk.download("punkt", quiet=True)


def add_newline_to_end_of_each_sentence(x: str) -> str:
    """This was added to get rougeLsum scores matching published rougeL scores for BART and PEGASUS."""
    re.sub("<n>", "", x)  # remove pegasus newline char
    assert nltk, "nltk must be installed to separate newlines between sentences. (pip install nltk)"
    return "\n".join(nltk.sent_tokenize(x))
from importlib.util import find_spec

import torch
from pytorch_lightning.utilities import _module_available

_NATIVE_AMP_AVAILABLE = _module_available("torch.cuda.amp") and hasattr(
    torch.cuda.amp, "autocast")

_TORCHVISION_AVAILABLE = find_spec("torchvision") is not None
_GYM_AVAILABLE = find_spec("gym") is not None
_SKLEARN_AVAILABLE = find_spec("sklearn") is not None
_PIL_AVAILABLE = find_spec("PIL") is not None
_OPENCV_AVAILABLE = find_spec("cv2") is not None
Пример #15
0
MLflow Logger
-------------
"""
import logging
import os
import re
from argparse import Namespace
from time import time
from typing import Any, Dict, Optional, Union

from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available, rank_zero_only, rank_zero_warn

log = logging.getLogger(__name__)
LOCAL_FILE_URI_PREFIX = "file:"
_MLFLOW_AVAILABLE = _module_available("mlflow")
try:
    import mlflow
    from mlflow.tracking import context, MlflowClient
    from mlflow.utils.mlflow_tags import MLFLOW_RUN_NAME
# todo: there seems to be still some remaining import error with Conda env
except ImportError:
    _MLFLOW_AVAILABLE = False
    mlflow, MlflowClient, context = None, None, None
    MLFLOW_RUN_NAME = "mlflow.runName"

# before v1.1.0
if hasattr(context, "resolve_tags"):
    from mlflow.tracking.context import resolve_tags

Пример #16
0
"""
Neptune Logger
--------------
"""
import logging
from argparse import Namespace
from typing import Any, Dict, Iterable, Optional, Union

import torch
from torch import is_tensor

from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available, rank_zero_only

log = logging.getLogger(__name__)
_NEPTUNE_AVAILABLE = _module_available("neptune")

if _NEPTUNE_AVAILABLE:
    import neptune
    from neptune.experiments import Experiment
else:
    # needed for test mocks, these tests shall be updated
    neptune, Experiment = None, None


class NeptuneLogger(LightningLoggerBase):
    r"""
    Log using `Neptune <https://neptune.ai>`_.

    Install it with pip:
Пример #17
0
"""
Weights and Biases Logger
-------------------------
"""
import os
from argparse import Namespace
from typing import Any, Dict, Optional, Union

import torch.nn as nn

from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_experiment
from pytorch_lightning.utilities import _module_available, rank_zero_only
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.warnings import WarningCache

_WANDB_AVAILABLE = _module_available("wandb")

try:
    import wandb
    from wandb.wandb_run import Run
except ImportError:
    # needed for test mocks, these tests shall be updated
    wandb, Run = None, None


class WandbLogger(LightningLoggerBase):
    r"""
    Log using `Weights and Biases <https://www.wandb.com/>`_.

    Install it with pip:
Пример #18
0
from six.moves import urllib

from pytorch_lightning.utilities import _module_available

# TorchVision hotfix https://github.com/pytorch/vision/issues/1938
opener = urllib.request.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
urllib.request.install_opener(opener)

_EXAMPLES_ROOT = os.path.dirname(__file__)
_PACKAGE_ROOT = os.path.dirname(_EXAMPLES_ROOT)
_DATASETS_PATH = os.path.join(_PACKAGE_ROOT, 'Datasets')

_TORCHVISION_MNIST_AVAILABLE = not bool(
    os.environ.get("PL_USE_MOCKED_MNIST", False))
_DALI_AVAILABLE = _module_available("nvidia.dali")

if _TORCHVISION_MNIST_AVAILABLE:
    try:
        from torchvision.datasets.mnist import MNIST
        MNIST(_DATASETS_PATH, download=True)
    except HTTPError:
        _TORCHVISION_MNIST_AVAILABLE = False

LIGHTNING_LOGO = """
                    ####
                ###########
             ####################
         ############################
    #####################################
##############################################
    Compare package version with some requirements
    >>> _compare_version("torch", operator.ge, "0.1")
    True
    """
    try:
        pkg = importlib.import_module(package)
    except (ModuleNotFoundError, DistributionNotFound):
        return False
    try:
        pkg_version = Version(pkg.__version__)
    except TypeError:
        # this is mock by sphinx, so it shall return True ro generate all summaries
        return True
    return op(pkg_version, Version(version))


_NATIVE_AMP_AVAILABLE: bool = _module_available("torch.cuda.amp") and hasattr(
    torch.cuda.amp, "autocast")

_TORCHVISION_AVAILABLE: bool = _module_available("torchvision")
_GYM_AVAILABLE: bool = _module_available("gym")
_SKLEARN_AVAILABLE: bool = _module_available("sklearn")
_PIL_AVAILABLE: bool = _module_available("PIL")
_OPENCV_AVAILABLE: bool = _module_available("cv2")
_WANDB_AVAILABLE: bool = _module_available("wandb")
_MATPLOTLIB_AVAILABLE: bool = _module_available("matplotlib")
_TORCHVISION_LESS_THAN_0_9_1: bool = _compare_version("torchvision",
                                                      operator.lt, "0.9.1")

__all__ = ["BatchGradientVerification"]
Пример #20
0
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch.nn as nn

import pytorch_lightning as pl
from pytorch_lightning.overrides.base import _LightningModuleWrapperBase, unwrap_lightning_module
from pytorch_lightning.utilities import _IS_WINDOWS, _module_available

_FAIRSCALE_AVAILABLE = not _IS_WINDOWS and _module_available("fairscale.nn")

if _FAIRSCALE_AVAILABLE:
    from fairscale.nn.data_parallel.sharded_ddp import ShardedDataParallel

    class LightningShardedDataParallel(_LightningModuleWrapperBase):
        # Just do this for later docstrings
        pass

    def unwrap_lightning_module_sharded(
            wrapped_model: nn.Module) -> "pl.LightningModule":
        model = wrapped_model
        if isinstance(model, ShardedDataParallel):
            model = model.module

        return unwrap_lightning_module(model)