示例#1
0
def test_optimizers_and_schedulers(tmpdir, optim, sched, interval):

    model = nn.Sequential(nn.Flatten(), nn.Linear(28 * 28, 10),
                          nn.LogSoftmax())
    task = ClassificationTask(model, optimizer=optim, lr_scheduler=sched)
    train_dl = torch.utils.data.DataLoader(DummyDataset())

    if sched is None:
        optimizer = task.configure_optimizers()
        assert isinstance(optimizer, torch.optim.Adadelta)
    else:
        optimizer, scheduler = task.configure_optimizers()
        assert isinstance(optimizer[0], torch.optim.Adadelta)

        scheduler = scheduler[0]
        assert isinstance(scheduler["scheduler"],
                          torch.optim.lr_scheduler.StepLR)
        assert scheduler["interval"] == interval

    # generate a checkpoint
    trainer = flash.Trainer(
        default_root_dir=tmpdir,
        limit_train_batches=10,
        max_epochs=1,
    )
    trainer.fit(task, train_dl)
示例#2
0
def test_optimization(tmpdir):

    model = nn.Sequential(nn.Flatten(), nn.Linear(28 * 28, 10), nn.LogSoftmax())
    optim = torch.optim.Adam(model.parameters())
    task = ClassificationTask(model, optimizer=optim, scheduler=None)

    optimizer = task.configure_optimizers()
    assert optimizer == optim

    task = ClassificationTask(model, optimizer=torch.optim.Adadelta, optimizer_kwargs={"eps": 0.5}, scheduler=None)
    optimizer = task.configure_optimizers()
    assert isinstance(optimizer, torch.optim.Adadelta)
    assert optimizer.defaults["eps"] == 0.5

    task = ClassificationTask(
        model,
        optimizer=torch.optim.Adadelta,
        scheduler=torch.optim.lr_scheduler.StepLR,
        scheduler_kwargs={"step_size": 1}
    )
    optimizer, scheduler = task.configure_optimizers()
    assert isinstance(optimizer[0], torch.optim.Adadelta)
    assert isinstance(scheduler[0], torch.optim.lr_scheduler.StepLR)

    optim = torch.optim.Adadelta(model.parameters())
    task = ClassificationTask(model, optimizer=optim, scheduler=torch.optim.lr_scheduler.StepLR(optim, step_size=1))
    optimizer, scheduler = task.configure_optimizers()
    assert isinstance(optimizer[0], torch.optim.Adadelta)
    assert isinstance(scheduler[0], torch.optim.lr_scheduler.StepLR)

    if _TRANSFORMERS_AVAILABLE:
        from transformers.optimization import get_linear_schedule_with_warmup

        assert task.available_schedulers() == [
            'constant_schedule', 'constant_schedule_with_warmup', 'cosine_schedule_with_warmup',
            'cosine_with_hard_restarts_schedule_with_warmup', 'linear_schedule_with_warmup',
            'polynomial_decay_schedule_with_warmup'
        ]

        optim = torch.optim.Adadelta(model.parameters())
        with pytest.raises(MisconfigurationException, match="The LightningModule isn't attached to the trainer yet."):
            task = ClassificationTask(model, optimizer=optim, scheduler="linear_schedule_with_warmup")
            optimizer, scheduler = task.configure_optimizers()

        task = ClassificationTask(
            model,
            optimizer=optim,
            scheduler="linear_schedule_with_warmup",
            scheduler_kwargs={"num_warmup_steps": 0.1},
            loss_fn=F.nll_loss,
        )
        trainer = flash.Trainer(max_epochs=1, limit_train_batches=2)
        ds = DummyDataset()
        trainer.fit(task, train_dataloader=DataLoader(ds))
        optimizer, scheduler = task.configure_optimizers()
        assert isinstance(optimizer[0], torch.optim.Adadelta)
        assert isinstance(scheduler[0], torch.optim.lr_scheduler.LambdaLR)
        expected = get_linear_schedule_with_warmup.__name__
        assert scheduler[0].lr_lambdas[0].__qualname__.split('.')[0] == expected
示例#3
0
def test_external_optimizers_torch_optimizer(tmpdir, optim):

    model = nn.Sequential(nn.Flatten(), nn.Linear(28 * 28, 10),
                          nn.LogSoftmax())
    task = ClassificationTask(model,
                              optimizer=optim,
                              lr_scheduler=None,
                              loss_fn=F.nll_loss)
    trainer = flash.Trainer(max_epochs=1,
                            limit_train_batches=2,
                            gpus=torch.cuda.device_count())
    ds = DummyDataset()
    trainer.fit(task, train_dataloader=DataLoader(ds))

    from torch_optimizer import Yogi

    optimizer = task.configure_optimizers()
    assert isinstance(optimizer, Yogi)
示例#4
0
def test_errors_and_exceptions_optimizers_and_schedulers():
    model = nn.Sequential(nn.Flatten(), nn.Linear(28 * 28, 10),
                          nn.LogSoftmax())

    with pytest.raises(TypeError):
        task = ClassificationTask(model,
                                  optimizer=[1, 2, 3, 4],
                                  lr_scheduler=None)
        task.configure_optimizers()

    with pytest.raises(KeyError):
        task = ClassificationTask(model,
                                  optimizer="not_a_valid_key",
                                  lr_scheduler=None)
        task.configure_optimizers()

    with pytest.raises(TypeError):
        task = ClassificationTask(model,
                                  optimizer=(["not", "a", "valid", "type"], {
                                      "random_kwarg": 10
                                  }),
                                  lr_scheduler=None)
        task.configure_optimizers()

    with pytest.raises(TypeError):
        task = ClassificationTask(model,
                                  optimizer=("Adam", ["non", "dict", "type"]),
                                  lr_scheduler=None)
        task.configure_optimizers()

    with pytest.raises(KeyError):
        task = ClassificationTask(model,
                                  optimizer="Adam",
                                  lr_scheduler="not_a_valid_key")
        task.configure_optimizers()

    with pytest.raises(TypeError):
        task = ClassificationTask(model,
                                  optimizer="Adam",
                                  lr_scheduler=["not", "a", "valid", "type"])
        task.configure_optimizers()

    with pytest.raises(TypeError):
        task = ClassificationTask(model,
                                  optimizer="Adam",
                                  lr_scheduler=(["not", "a", "valid",
                                                 "type"], {
                                                     "random_kwarg": 10
                                                 }))
        task.configure_optimizers()