Пример #1
0
def test_resolve_callbacks_multi_error(tmpdir):
    model = DummyClassifier()
    trainer = Trainer(fast_dev_run=True, default_root_dir=tmpdir)
    task = MultiFinetuneClassificationTask(model, loss_fn=F.nll_loss)
    with pytest.raises(MisconfigurationException,
                       match="should create a list with only 1 callback"):
        trainer._resolve_callbacks(task, None)
Пример #2
0
def test_resolve_callbacks_override_warning(tmpdir):
    model = DummyClassifier()
    trainer = Trainer(fast_dev_run=True, default_root_dir=tmpdir)
    task = FinetuneClassificationTask(model, loss_fn=F.nll_loss)
    with pytest.warns(UserWarning,
                      match="The model contains a default finetune callback"):
        trainer._resolve_callbacks(task, "test")
Пример #3
0
def test_resolve_callbacks_invalid_strategy(tmpdir):
    model = DummyClassifier()
    trainer = Trainer(fast_dev_run=True, default_root_dir=tmpdir)
    task = ClassificationTask(model, loss_fn=F.nll_loss)
    with pytest.raises(
            MisconfigurationException,
            match="should be a ``pytorch_lightning.callbacks.BaseFinetuning``"
    ):
        trainer._resolve_callbacks(task, EarlyStopping())