def test_multi_gpu_model_dp(tmpdir):
    """
    Make sure DP works
    :return:
    """
    tutils.reset_seed()

    if not tutils.can_run_gpu_test():
        return

    model, hparams = tutils.get_model()
    trainer_options = dict(
        default_save_path=tmpdir,
        show_progress_bar=False,
        distributed_backend='dp',
        max_nb_epochs=1,
        train_percent_check=0.1,
        val_percent_check=0.1,
        gpus='-1'
    )

    tutils.run_model_test(trainer_options, model, hparams)

    # test memory helper functions
    memory.get_memory_profile('min_max')
def test_early_stopping_cpu_model(tmpdir):
    """Test each of the trainer options."""
    tutils.reset_seed()

    stopping = EarlyStopping(monitor='val_loss', min_delta=0.1)
    trainer_options = dict(
        default_save_path=tmpdir,
        min_epochs=2,
        early_stop_callback=stopping,
        gradient_clip_val=1.0,
        overfit_pct=0.20,
        track_grad_norm=2,
        print_nan_grads=True,
        show_progress_bar=True,
        logger=tutils.get_test_tube_logger(tmpdir),
        train_percent_check=0.1,
        val_percent_check=0.1,
    )

    model, hparams = tutils.get_model()
    tutils.run_model_test(trainer_options,
                          model,
                          on_gpu=False,
                          early_stop=True)

    # test freeze on cpu
    model.freeze()
    model.unfreeze()
def test_cpu_model(tmpdir):
    """Make sure model trains on CPU."""
    tutils.reset_seed()

    trainer_options = dict(default_save_path=tmpdir,
                           show_progress_bar=False,
                           logger=tutils.get_test_tube_logger(tmpdir),
                           max_epochs=1,
                           train_percent_check=0.4,
                           val_percent_check=0.4)

    model, hparams = tutils.get_model()

    tutils.run_model_test(trainer_options, model, on_gpu=False)
Exemplo n.º 4
0
def test_cpu_model():
    """
    Make sure model trains on CPU
    :return:
    """
    tutils.reset_seed()

    trainer_options = dict(show_progress_bar=False,
                           logger=tutils.get_test_tube_logger(),
                           max_nb_epochs=1,
                           train_percent_check=0.4,
                           val_percent_check=0.4)

    model, hparams = tutils.get_model()

    tutils.run_model_test(trainer_options, model, hparams, on_gpu=False)
Exemplo n.º 5
0
def test_cpu_model_with_amp(tmpdir):
    """Make sure model trains on CPU."""
    tutils.reset_seed()

    trainer_options = dict(default_save_path=tmpdir,
                           show_progress_bar=False,
                           logger=tutils.get_test_tube_logger(tmpdir),
                           max_epochs=1,
                           train_percent_check=0.4,
                           val_percent_check=0.4,
                           use_amp=True)

    model, hparams = tutils.get_model()

    with pytest.raises((MisconfigurationException, ModuleNotFoundError)):
        tutils.run_model_test(trainer_options, model, on_gpu=False)
Exemplo n.º 6
0
def test_amp_gpu_dp(tmpdir):
    """Make sure DP + AMP work."""
    tutils.reset_seed()

    if not tutils.can_run_gpu_test():
        return

    model, hparams = tutils.get_model()
    trainer_options = dict(
        default_save_path=tmpdir,
        max_num_epochs=1,
        gpus='0, 1',  # test init with gpu string
        distributed_backend='dp',
        use_amp=True)
    with pytest.raises(MisconfigurationException):
        tutils.run_model_test(trainer_options, model, hparams)
Exemplo n.º 7
0
def test_multi_gpu_none_backend(tmpdir):
    """Make sure when using multiple GPUs the user can't use `distributed_backend = None`."""
    tutils.reset_seed()

    if not tutils.can_run_gpu_test():
        return

    model, hparams = tutils.get_model()
    trainer_options = dict(default_save_path=tmpdir,
                           show_progress_bar=False,
                           max_num_epochs=1,
                           train_percent_check=0.1,
                           val_percent_check=0.1,
                           gpus='-1')

    with pytest.raises(MisconfigurationException):
        tutils.run_model_test(trainer_options, model)
def test_single_gpu_model(tmpdir):
    """Make sure single GPU works (DP mode)."""
    tutils.reset_seed()

    if not torch.cuda.is_available():
        warnings.warn('test_single_gpu_model cannot run.'
                      ' Rerun on a GPU node to run this test')
        return
    model, hparams = tutils.get_model()

    trainer_options = dict(default_save_path=tmpdir,
                           show_progress_bar=False,
                           max_epochs=1,
                           train_percent_check=0.1,
                           val_percent_check=0.1,
                           gpus=1)

    tutils.run_model_test(trainer_options, model)
def test_all_features_cpu_model(tmpdir):
    """Test each of the trainer options."""
    tutils.reset_seed()

    trainer_options = dict(default_save_path=tmpdir,
                           gradient_clip_val=1.0,
                           overfit_pct=0.20,
                           track_grad_norm=2,
                           print_nan_grads=True,
                           show_progress_bar=False,
                           logger=tutils.get_test_tube_logger(tmpdir),
                           accumulate_grad_batches=2,
                           max_epochs=1,
                           train_percent_check=0.4,
                           val_percent_check=0.4)

    model, hparams = tutils.get_model()
    tutils.run_model_test(trainer_options, model, on_gpu=False)
Exemplo n.º 10
0
def test_amp_single_gpu(tmpdir):
    """Make sure DDP + AMP work."""
    tutils.reset_seed()

    if not tutils.can_run_gpu_test():
        return

    hparams = tutils.get_hparams()
    model = LightningTestModel(hparams)

    trainer_options = dict(default_save_path=tmpdir,
                           show_progress_bar=True,
                           max_epochs=1,
                           gpus=1,
                           distributed_backend='ddp',
                           use_amp=True)

    tutils.run_model_test(trainer_options, model)
Exemplo n.º 11
0
def test_multi_gpu_model_ddp(tmpdir):
    """Make sure DDP works."""
    if not tutils.can_run_gpu_test():
        return

    tutils.reset_seed()
    tutils.set_random_master_port()

    model, hparams = tutils.get_model()
    trainer_options = dict(default_save_path=tmpdir,
                           show_progress_bar=False,
                           max_num_epochs=1,
                           train_percent_check=0.4,
                           val_percent_check=0.2,
                           gpus=[0, 1],
                           distributed_backend='ddp')

    tutils.run_model_test(trainer_options, model)
Exemplo n.º 12
0
def test_no_amp_single_gpu(tmpdir):
    """Make sure DDP + AMP work."""
    tutils.reset_seed()

    if not tutils.can_run_gpu_test():
        return

    hparams = tutils.get_hparams()
    model = LightningTestModel(hparams)

    trainer_options = dict(default_save_path=tmpdir,
                           show_progress_bar=True,
                           max_num_epochs=1,
                           gpus=1,
                           distributed_backend='dp',
                           use_amp=True)

    with pytest.raises((MisconfigurationException, ModuleNotFoundError)):
        tutils.run_model_test(trainer_options, model)
Exemplo n.º 13
0
def test_amp_gpu_ddp():
    """
    Make sure DDP + AMP work
    :return:
    """
    if not tutils.can_run_gpu_test():
        return

    tutils.reset_seed()
    tutils.set_random_master_port()

    hparams = tutils.get_hparams()
    model = LightningTestModel(hparams)

    trainer_options = dict(show_progress_bar=True,
                           max_nb_epochs=1,
                           gpus=2,
                           distributed_backend='ddp',
                           use_amp=True)

    tutils.run_model_test(trainer_options, model, hparams)
Exemplo n.º 14
0
def test_multi_gpu_model_ddp2():
    """
    Make sure DDP2 works
    :return:
    """
    if not tutils.can_run_gpu_test():
        return

    tutils.reset_seed()
    tutils.set_random_master_port()

    model, hparams = tutils.get_model()
    trainer_options = dict(show_progress_bar=True,
                           max_nb_epochs=1,
                           train_percent_check=0.4,
                           val_percent_check=0.2,
                           gpus=2,
                           weights_summary=None,
                           distributed_backend='ddp2')

    tutils.run_model_test(trainer_options, model, hparams)