def test_hyperparameter_optimization_integration(dataloaders_with_coveratiates,
                                                 tmp_path):
    train_dataloader = dataloaders_with_coveratiates["train"]
    val_dataloader = dataloaders_with_coveratiates["val"]
    try:
        optimize_hyperparameters(
            train_dataloader=train_dataloader,
            val_dataloader=val_dataloader,
            model_path=tmp_path,
            max_epochs=1,
            n_trials=3,
            log_dir=tmp_path,
            trainer_kwargs=dict(fast_dev_run=True, limit_train_batches=5),
        )
    finally:
        shutil.rmtree(tmp_path, ignore_errors=True)
Ejemplo n.º 2
0
def test_hyperparameter_optimization_integration(dataloaders_with_covariates, tmp_path, use_learning_rate_finder):
    train_dataloader = dataloaders_with_covariates["train"]
    val_dataloader = dataloaders_with_covariates["val"]
    try:
        optimize_hyperparameters(
            train_dataloader=train_dataloader,
            val_dataloader=val_dataloader,
            model_path=tmp_path,
            max_epochs=1,
            n_trials=3,
            log_dir=tmp_path,
            trainer_kwargs=dict(
                fast_dev_run=True,
                limit_train_batches=5,
                # overwrite default trainer kwargs
                progress_bar_refresh_rate=20,
            ),
            use_learning_rate_finder=use_learning_rate_finder,
        )
    finally:
        shutil.rmtree(tmp_path, ignore_errors=True)
Ejemplo n.º 3
0
#     val_dataloaders=val_dataloader,
# )

# # make a prediction on entire validation set
# preds, index = tft.predict(val_dataloader, return_index=True, fast_dev_run=True)


# tune
study = optimize_hyperparameters(
    train_dataloader,
    val_dataloader,
    model_path="optuna_test",
    n_trials=200,
    max_epochs=50,
    gradient_clip_val_range=(0.01, 1.0),
    hidden_size_range=(8, 128),
    hidden_continuous_size_range=(8, 128),
    attention_head_size_range=(1, 4),
    learning_rate_range=(0.001, 0.1),
    dropout_range=(0.1, 0.3),
    trainer_kwargs=dict(limit_train_batches=30),
    reduce_on_plateau_patience=4,
    use_learning_rate_finder=False,
)
with open("test_study.pkl", "wb") as fout:
    pickle.dump(study, fout)


# profile speed
# profile(
#     trainer.fit,
#     profile_fname="profile.prof",