コード例 #1
0
def test_call_to_trainer_method(tmpdir, optimizer):
    """Test that directly calling the trainer method works"""

    hparams = EvalModelTemplate.get_default_hparams()
    model = EvalModelTemplate(**hparams)
    if optimizer == "adagrad":
        model.configure_optimizers = model.configure_optimizers__adagrad

    before_lr = hparams.get("learning_rate")
    # logger file to get meta
    trainer = Trainer(default_root_dir=tmpdir, max_epochs=2)

    lrfinder = trainer.tuner.lr_find(model, mode="linear")
    after_lr = lrfinder.suggestion()
    model.learning_rate = after_lr
    trainer.tune(model)

    assert before_lr != after_lr, "Learning rate was not altered after running learning rate finder"
コード例 #2
0
def test_call_to_trainer_method(tmpdir):
    """ Test that directly calling the trainer method works """

    hparams = EvalModelTemplate.get_default_hparams()
    model = EvalModelTemplate(**hparams)

    before_lr = hparams.get('learning_rate')
    # logger file to get meta
    trainer = Trainer(
        default_save_path=tmpdir,
        max_epochs=2,
    )

    lrfinder = trainer.lr_find(model, mode='linear')
    after_lr = lrfinder.suggestion()
    model.learning_rate = after_lr
    trainer.fit(model)

    assert before_lr != after_lr, \
        'Learning rate was not altered after running learning rate finder'
コード例 #3
0
def test_datamodule_parameter(tmpdir):
    """ Test that the datamodule parameter works """

    # trial datamodule
    dm = TrialMNISTDataModule(tmpdir)

    hparams = EvalModelTemplate.get_default_hparams()
    model = EvalModelTemplate(**hparams)

    before_lr = hparams.get('learning_rate')
    # logger file to get meta
    trainer = Trainer(
        default_root_dir=tmpdir,
        max_epochs=2,
    )

    lrfinder = trainer.tuner.lr_find(model, datamodule=dm)
    after_lr = lrfinder.suggestion()
    model.learning_rate = after_lr

    assert before_lr != after_lr, \
        'Learning rate was not altered after running learning rate finder'