Ejemplo n.º 1
0
def _test_loggers_pickle(tmpdir, monkeypatch, logger_class):
    """Verify that pickling trainer with logger works."""
    _patch_comet_atexit(monkeypatch)

    logger_args = _get_logger_args(logger_class, tmpdir)
    logger = logger_class(**logger_args)

    # this can cause pickle error if the experiment object is not picklable
    # the logger needs to remove it from the state before pickle
    _ = logger.experiment

    # logger also has to avoid adding un-picklable attributes to self in .save
    logger.log_metrics({"a": 1})
    logger.save()

    # test pickling loggers
    pickle.dumps(logger)

    trainer = Trainer(max_epochs=1, logger=logger)
    pkl_bytes = pickle.dumps(trainer)

    trainer2 = pickle.loads(pkl_bytes)
    trainer2.logger.log_metrics({"acc": 1.0})

    # make sure we restored properly
    assert trainer2.logger.name == logger.name
    assert trainer2.logger.save_dir == logger.save_dir
Ejemplo n.º 2
0
def test_loggers_pickle(neptune, tmpdir, monkeypatch, logger_class):
    """Verify that pickling trainer with logger works."""
    _patch_comet_atexit(monkeypatch)

    logger_args = _get_logger_args(logger_class, tmpdir)
    logger = logger_class(**logger_args)

    # this can cause pickle error if the experiment object is not picklable
    # the logger needs to remove it from the state before pickle
    _ = logger.experiment

    # test pickling loggers
    pickle.dumps(logger)

    trainer = Trainer(
        max_epochs=1,
        logger=logger,
    )
    pkl_bytes = pickle.dumps(trainer)

    trainer2 = pickle.loads(pkl_bytes)
    trainer2.logger.log_metrics({'acc': 1.0})

    # make sure we restord properly
    assert trainer2.logger.name == logger.name
    assert trainer2.logger.save_dir == logger.save_dir
Ejemplo n.º 3
0
def test_loggers_fit_test_all(tmpdir, monkeypatch):
    """Verify that basic functionality of all loggers."""

    _test_loggers_fit_test(tmpdir, TensorBoardLogger)

    with mock.patch("pytorch_lightning.loggers.comet.comet_ml"), mock.patch(
            "pytorch_lightning.loggers.comet.CometOfflineExperiment"):
        _patch_comet_atexit(monkeypatch)
        _test_loggers_fit_test(tmpdir, CometLogger)

    with mock.patch("pytorch_lightning.loggers.mlflow.mlflow"), mock.patch(
            "pytorch_lightning.loggers.mlflow.MlflowClient"):
        _test_loggers_fit_test(tmpdir, MLFlowLogger)

    with mock.patch("pytorch_lightning.loggers.neptune.neptune",
                    new_callable=create_neptune_mock):
        _test_loggers_fit_test(tmpdir, NeptuneLogger)

    with mock.patch("pytorch_lightning.loggers.test_tube.Experiment"):
        _test_loggers_fit_test(tmpdir, TestTubeLogger)

    with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb:
        wandb.run = None
        wandb.init().step = 0
        _test_loggers_fit_test(tmpdir, WandbLogger)
Ejemplo n.º 4
0
def test_logger_created_on_rank_zero_only(tmpdir, monkeypatch, logger_class):
    """ Test that loggers get replaced by dummy loggers on global rank > 0"""
    _patch_comet_atexit(monkeypatch)
    try:
        _test_logger_created_on_rank_zero_only(tmpdir, logger_class)
    except (ImportError, ModuleNotFoundError):
        pytest.xfail(f"multi-process test requires {logger_class.__class__} dependencies to be installed.")
Ejemplo n.º 5
0
def test_loggers_pickle_all(tmpdir, monkeypatch, logger_class):
    """ Test that the logger objects can be pickled. This test only makes sense if the packages are installed. """
    _patch_comet_atexit(monkeypatch)
    try:
        _test_loggers_pickle(tmpdir, monkeypatch, logger_class)
    except (ImportError, ModuleNotFoundError):
        pytest.xfail(f"pickle test requires {logger_class.__class__} dependencies to be installed.")
Ejemplo n.º 6
0
def test_logger_with_prefix_all(tmpdir, monkeypatch):
    """Test that prefix is added at the beginning of the metric keys."""
    prefix = "tmp"

    # Comet
    with mock.patch("pytorch_lightning.loggers.comet.comet_ml"), mock.patch(
            "pytorch_lightning.loggers.comet.CometOfflineExperiment"):
        _patch_comet_atexit(monkeypatch)
        logger = _instantiate_logger(CometLogger,
                                     save_dir=tmpdir,
                                     prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log_metrics.assert_called_once_with(
            {"tmp-test": 1.0}, epoch=None, step=0)

    # MLflow
    with mock.patch("pytorch_lightning.loggers.mlflow.mlflow"), mock.patch(
            "pytorch_lightning.loggers.mlflow.MlflowClient"):
        logger = _instantiate_logger(MLFlowLogger,
                                     save_dir=tmpdir,
                                     prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log_metric.assert_called_once_with(
            ANY, "tmp-test", 1.0, ANY, 0)

    # Neptune
    with mock.patch("pytorch_lightning.loggers.neptune.neptune"):
        logger = _instantiate_logger(NeptuneLogger,
                                     api_key="test",
                                     project="project",
                                     save_dir=tmpdir,
                                     prefix=prefix)
        assert logger.experiment.__getitem__.call_count == 2
        logger.log_metrics({"test": 1.0}, step=0)
        assert logger.experiment.__getitem__.call_count == 3
        logger.experiment.__getitem__.assert_called_with("tmp/test")
        logger.experiment.__getitem__().log.assert_called_once_with(1.0)

    # TensorBoard
    with mock.patch("pytorch_lightning.loggers.tensorboard.SummaryWriter"):
        logger = _instantiate_logger(TensorBoardLogger,
                                     save_dir=tmpdir,
                                     prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.add_scalar.assert_called_once_with(
            "tmp-test", 1.0, 0)

    # WandB
    with mock.patch("pytorch_lightning.loggers.wandb.wandb") as wandb:
        logger = _instantiate_logger(WandbLogger,
                                     save_dir=tmpdir,
                                     prefix=prefix)
        wandb.run = None
        wandb.init().step = 0
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log.assert_called_once_with({
            "tmp-test": 1.0,
            "trainer/global_step": 0
        })
Ejemplo n.º 7
0
def test_loggers_save_dir_and_weights_save_path_all(tmpdir, monkeypatch, logger_class):
    """Test the combinations of save_dir, weights_save_path and default_root_dir."""

    with contextlib.ExitStack() as stack:
        for mgr in LOGGER_CTX_MANAGERS:
            stack.enter_context(mgr)
        _patch_comet_atexit(monkeypatch)
        _test_loggers_save_dir_and_weights_save_path(tmpdir, CometLogger)
Ejemplo n.º 8
0
def test_loggers_save_dir_and_weights_save_path(wandb, tmpdir, monkeypatch,
                                                logger_class):
    """ Test the combinations of save_dir, weights_save_path and default_root_dir.  """
    _patch_comet_atexit(monkeypatch)

    class TestLogger(logger_class):
        # for this test it does not matter what these attributes are
        # so we standardize them to make testing easier
        @property
        def version(self):
            return 'version'

        @property
        def name(self):
            return 'name'

    model = EvalModelTemplate()
    trainer_args = dict(
        default_root_dir=tmpdir,
        max_steps=1,
    )

    # no weights_save_path given
    save_dir = tmpdir / 'logs'
    weights_save_path = None
    logger = TestLogger(**_get_logger_args(TestLogger, save_dir))
    trainer = Trainer(**trainer_args,
                      logger=logger,
                      weights_save_path=weights_save_path)
    trainer.fit(model)
    assert trainer.weights_save_path == trainer.default_root_dir
    assert trainer.checkpoint_callback.dirpath == os.path.join(
        logger.save_dir, 'name', 'version', 'checkpoints')
    assert trainer.default_root_dir == tmpdir

    # with weights_save_path given, the logger path and checkpoint path should be different
    save_dir = tmpdir / 'logs'
    weights_save_path = tmpdir / 'weights'
    logger = TestLogger(**_get_logger_args(TestLogger, save_dir))
    trainer = Trainer(**trainer_args,
                      logger=logger,
                      weights_save_path=weights_save_path)
    trainer.fit(model)
    assert trainer.weights_save_path == weights_save_path
    assert trainer.logger.save_dir == save_dir
    assert trainer.checkpoint_callback.dirpath == weights_save_path / 'name' / 'version' / 'checkpoints'
    assert trainer.default_root_dir == tmpdir

    # no logger given
    weights_save_path = tmpdir / 'weights'
    trainer = Trainer(**trainer_args,
                      logger=False,
                      weights_save_path=weights_save_path)
    trainer.fit(model)
    assert trainer.weights_save_path == weights_save_path
    assert trainer.checkpoint_callback.dirpath == weights_save_path / 'checkpoints'
    assert trainer.default_root_dir == tmpdir
Ejemplo n.º 9
0
def test_logger_created_on_rank_zero_only(tmpdir, monkeypatch, logger_class):
    """Test that loggers get replaced by dummy loggers on global rank > 0."""
    _patch_comet_atexit(monkeypatch)
    try:
        if logger_class is TestTubeLogger:
            with pytest.deprecated_call(match="TestTubeLogger is deprecated since v1.5"):
                _test_logger_created_on_rank_zero_only(tmpdir, logger_class)
        else:
            _test_logger_created_on_rank_zero_only(tmpdir, logger_class)
    except (ImportError, ModuleNotFoundError):
        pytest.xfail(f"multi-process test requires {logger_class.__class__} dependencies to be installed.")
Ejemplo n.º 10
0
def test_loggers_pickle_all(tmpdir, monkeypatch, logger_class):
    """Test that the logger objects can be pickled.

    This test only makes sense if the packages are installed.
    """
    _patch_comet_atexit(monkeypatch)
    try:
        if logger_class is TestTubeLogger:
            with pytest.deprecated_call(match="TestTubeLogger is deprecated since v1.5"):
                _test_loggers_pickle(tmpdir, monkeypatch, logger_class)
        else:
            _test_loggers_pickle(tmpdir, monkeypatch, logger_class)
    except (ImportError, ModuleNotFoundError):
        pytest.xfail(f"pickle test requires {logger_class.__class__} dependencies to be installed.")
Ejemplo n.º 11
0
def test_loggers_fit_test(wandb, neptune, tmpdir, monkeypatch, logger_class):
    """Verify that basic functionality of all loggers."""
    os.environ['PL_DEV_DEBUG'] = '0'
    _patch_comet_atexit(monkeypatch)

    model = EvalModelTemplate()

    class StoreHistoryLogger(logger_class):
        def __init__(self, *args, **kwargs):
            super().__init__(*args, **kwargs)
            self.history = []

        def log_metrics(self, metrics, step):
            super().log_metrics(metrics, step)
            self.history.append((step, metrics))

    logger_args = _get_logger_args(logger_class, tmpdir)
    logger = StoreHistoryLogger(**logger_args)

    if logger_class == WandbLogger:
        # required mocks for Trainer
        logger.experiment.id = 'foo'
        logger.experiment.project_name.return_value = 'bar'

    trainer = Trainer(
        max_epochs=1,
        logger=logger,
        limit_train_batches=0.2,
        limit_val_batches=0.5,
        fast_dev_run=True,
        default_root_dir=tmpdir,
    )
    trainer.fit(model)
    trainer.test()

    log_metric_names = [(s, sorted(m.keys())) for s, m in logger.history]
    if logger_class == TensorBoardLogger:
        expected = [(0, ['hp_metric']), (0, ['epoch', 'train_some_val']),
                    (0, ['early_stop_on', 'epoch', 'val_acc']),
                    (0, ['hp_metric']), (1, ['epoch', 'test_acc',
                                             'test_loss'])]
        assert log_metric_names == expected
    else:
        expected = [(0, ['epoch', 'train_some_val']),
                    (0, ['early_stop_on', 'epoch', 'val_acc']),
                    (1, ['epoch', 'test_acc', 'test_loss'])]
        assert log_metric_names == expected
Ejemplo n.º 12
0
def test_logger_with_prefix_all(tmpdir, monkeypatch):
    """
    Test that prefix is added at the beginning of the metric keys.
    """
    prefix = 'tmp'

    # Comet
    with mock.patch('pytorch_lightning.loggers.comet.comet_ml'), \
         mock.patch('pytorch_lightning.loggers.comet.CometOfflineExperiment'):
        _patch_comet_atexit(monkeypatch)
        logger = _instantiate_logger(CometLogger, save_idr=tmpdir, prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log_metrics.assert_called_once_with({"tmp-test": 1.0}, epoch=None, step=0)

    # MLflow
    with mock.patch('pytorch_lightning.loggers.mlflow.mlflow'), \
         mock.patch('pytorch_lightning.loggers.mlflow.MlflowClient'):
        logger = _instantiate_logger(MLFlowLogger, save_idr=tmpdir, prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log_metric.assert_called_once_with(ANY, "tmp-test", 1.0, ANY, 0)

    # Neptune
    with mock.patch('pytorch_lightning.loggers.neptune.neptune'):
        logger = _instantiate_logger(NeptuneLogger, save_idr=tmpdir, prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log_metric.assert_called_once_with("tmp-test", x=0, y=1.0)

    # TensorBoard
    with mock.patch('pytorch_lightning.loggers.tensorboard.SummaryWriter'):
        logger = _instantiate_logger(TensorBoardLogger, save_idr=tmpdir, prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.add_scalar.assert_called_once_with("tmp-test", 1.0, 0)

    # TestTube
    with mock.patch('pytorch_lightning.loggers.test_tube.Experiment'):
        logger = _instantiate_logger(TestTubeLogger, save_idr=tmpdir, prefix=prefix)
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log.assert_called_once_with({"tmp-test": 1.0}, global_step=0)

    # WandB
    with mock.patch('pytorch_lightning.loggers.wandb.wandb') as wandb:
        logger = _instantiate_logger(WandbLogger, save_idr=tmpdir, prefix=prefix)
        wandb.run = None
        wandb.init().step = 0
        logger.log_metrics({"test": 1.0}, step=0)
        logger.experiment.log.assert_called_once_with({'tmp-test': 1.0}, step=0)
Ejemplo n.º 13
0
def test_logger_created_on_rank_zero_only(neptune, tmpdir, monkeypatch,
                                          logger_class):
    """ Test that loggers get replaced by dummy loggers on global rank > 0"""
    _patch_comet_atexit(monkeypatch)

    logger_args = _get_logger_args(logger_class, tmpdir)
    logger = logger_class(**logger_args)
    model = EvalModelTemplate()
    trainer = Trainer(
        logger=logger,
        default_root_dir=tmpdir,
        distributed_backend='ddp_cpu',
        num_processes=2,
        max_steps=1,
        checkpoint_callback=True,
        callbacks=[RankZeroLoggerCheck()],
    )
    result = trainer.fit(model)
    assert result == 1
Ejemplo n.º 14
0
def test_loggers_save_dir_and_weights_save_path_all(tmpdir, monkeypatch):
    """Test the combinations of save_dir, weights_save_path and default_root_dir."""

    _test_loggers_save_dir_and_weights_save_path(tmpdir, TensorBoardLogger)

    with mock.patch("pytorch_lightning.loggers.comet.comet_ml"), mock.patch(
            "pytorch_lightning.loggers.comet.CometOfflineExperiment"):
        _patch_comet_atexit(monkeypatch)
        _test_loggers_save_dir_and_weights_save_path(tmpdir, CometLogger)

    with mock.patch("pytorch_lightning.loggers.mlflow.mlflow"), mock.patch(
            "pytorch_lightning.loggers.mlflow.MlflowClient"):
        _test_loggers_save_dir_and_weights_save_path(tmpdir, MLFlowLogger)

    with mock.patch("pytorch_lightning.loggers.test_tube.Experiment"):
        _test_loggers_save_dir_and_weights_save_path(tmpdir, TestTubeLogger)

    with mock.patch("pytorch_lightning.loggers.wandb.wandb"):
        _test_loggers_save_dir_and_weights_save_path(tmpdir, WandbLogger)
Ejemplo n.º 15
0
def test_loggers_fit_test_all(tmpdir, monkeypatch):
    """ Verify that basic functionality of all loggers. """

    _test_loggers_fit_test(tmpdir, TensorBoardLogger)

    with mock.patch('pytorch_lightning.loggers.comet.comet_ml'), \
         mock.patch('pytorch_lightning.loggers.comet.CometOfflineExperiment'):
        _patch_comet_atexit(monkeypatch)
        _test_loggers_fit_test(tmpdir, CometLogger)

    with mock.patch('pytorch_lightning.loggers.mlflow.mlflow'), \
         mock.patch('pytorch_lightning.loggers.mlflow.MlflowClient'):
        _test_loggers_fit_test(tmpdir, MLFlowLogger)

    with mock.patch('pytorch_lightning.loggers.neptune.neptune'):
        _test_loggers_fit_test(tmpdir, NeptuneLogger)

    with mock.patch('pytorch_lightning.loggers.test_tube.Experiment'):
        _test_loggers_fit_test(tmpdir, TestTubeLogger)

    with mock.patch('pytorch_lightning.loggers.wandb.wandb'):
        _test_loggers_fit_test(tmpdir, WandbLogger)