コード例 #1
0
def test_model_reset_correctly(tmpdir):
    """ Check that model weights are correctly reset after lr_find() """

    model = EvalModelTemplate()

    # logger file to get meta
    trainer = Trainer(default_save_path=tmpdir, max_epochs=1)

    before_state_dict = model.state_dict()

    _ = trainer.lr_find(model, num_training=5)

    after_state_dict = model.state_dict()

    for key in before_state_dict.keys():
        assert torch.all(torch.eq(before_state_dict[key], after_state_dict[key])), \
            'Model was not reset correctly after learning rate finder'
コード例 #2
0
def test_model_reset_correctly(tmpdir):
    """ Check that model weights are correctly reset after scaling batch size. """
    tutils.reset_seed()

    model = EvalModelTemplate()

    # logger file to get meta
    trainer = Trainer(default_save_path=tmpdir, max_epochs=1)

    before_state_dict = model.state_dict()

    trainer.scale_batch_size(model, max_trials=5)

    after_state_dict = model.state_dict()

    for key in before_state_dict.keys():
        assert torch.all(torch.eq(before_state_dict[key], after_state_dict[key])), \
            'Model was not reset correctly after scaling batch size'
コード例 #3
0
def test_model_reset_correctly(tmpdir):
    """Check that model weights are correctly reset after lr_find()"""

    model = EvalModelTemplate()

    # logger file to get meta
    trainer = Trainer(default_root_dir=tmpdir, max_epochs=1)

    before_state_dict = deepcopy(model.state_dict())

    trainer.tuner.lr_find(model, num_training=5)

    after_state_dict = model.state_dict()

    for key in before_state_dict.keys():
        assert torch.all(
            torch.eq(before_state_dict[key], after_state_dict[key])
        ), "Model was not reset correctly after learning rate finder"

    assert not os.path.exists(tmpdir / "lr_find_temp_model.ckpt")