def test_horovod_transfer_batch_to_gpu(tmpdir):
    class TestTrainingStepModel(EvalModelTemplate):
        def training_step(self, batch, *args, **kwargs):
            x, y = batch
            assert str(x.device) != 'cpu'
            assert str(y.device) != 'cpu'
            return super(TestTrainingStepModel,
                         self).training_step(batch, *args, **kwargs)

        def validation_step(self, batch, *args, **kwargs):
            x, y = batch
            assert str(x.device) != 'cpu'
            assert str(y.device) != 'cpu'
            return super(TestTrainingStepModel,
                         self).validation_step(batch, *args, **kwargs)

    hparams = EvalModelTemplate.get_default_hparams()
    model = TestTrainingStepModel(hparams)

    trainer_options = dict(default_root_dir=str(tmpdir),
                           progress_bar_refresh_rate=0,
                           max_epochs=1,
                           train_percent_check=0.4,
                           val_percent_check=0.2,
                           gpus=1,
                           deterministic=True,
                           distributed_backend='horovod')
    tutils.run_model_test_without_loggers(trainer_options, model)
Example #2
0
def test_lbfgs_cpu_model(tmpdir):
    """Test each of the trainer options."""
    tutils.reset_seed()

    trainer_options = dict(
        default_root_dir=tmpdir,
        max_epochs=2,
        progress_bar_refresh_rate=0,
        weights_summary='top',
        train_percent_check=1.0,
        val_percent_check=0.2,
    )

    model, hparams = tutils.get_default_model(lbfgs=True)
    tutils.run_model_test_without_loggers(trainer_options, model, min_acc=0.5)
Example #3
0
def test_lbfgs_cpu_model(tmpdir):
    """Test each of the trainer options."""
    trainer_options = dict(
        default_root_dir=tmpdir,
        max_epochs=1,
        progress_bar_refresh_rate=0,
        weights_summary='top',
        train_percent_check=0.2,
        val_percent_check=0.2,
    )

    hparams = EvalModelTemplate.get_default_hparams()
    hparams.update(optimizer_name='lbfgs', learning_rate=0.004)
    model = EvalModelTemplate(**hparams)
    model.configure_optimizers = model.configure_optimizers__lbfgs
    tutils.run_model_test_without_loggers(trainer_options, model, min_acc=0.25)
def test_default_logger_callbacks_cpu_model(tmpdir):
    """Test each of the trainer options."""
    trainer_options = dict(
        default_root_dir=tmpdir,
        max_epochs=1,
        gradient_clip_val=1.0,
        overfit_pct=0.20,
        progress_bar_refresh_rate=0,
        train_percent_check=0.01,
        val_percent_check=0.01,
    )

    model = EvalModelTemplate()
    tutils.run_model_test_without_loggers(trainer_options, model)

    # test freeze on cpu
    model.freeze()
    model.unfreeze()