Ejemplo n.º 1
0
        optimizer,
        criterion,
        lr_scheduler=scheduler,
        metrics={
            "acc2": MoseiAcc2(exclude_neutral=True),
            "acc2_zero": MoseiAcc2(exclude_neutral=False),
            "acc5": MoseiAcc5(),
            "acc7": MoseiAcc7(),
            "mae": torchmetrics.MeanAbsoluteError(),
        },
    )

    trainer = make_trainer(
        EXPERIMENT_NAME,
        max_epochs=100,
        patience=10,
        gpus=1,
        save_top_k=1,
    )
    # watch_model(trainer, model)

    trainer.fit(lm, datamodule=ldm)
    # trainer.test(ckpt_path="best", test_dataloaders=ldm.test_dataloader())

    best_model = RnnPLModule.load_from_checkpoint(
        trainer.checkpoint_callback.best_model_path,
        model=model,
        optimizer=optimizer,
        criterion=criterion,
        lr_scheduler=scheduler,
    )
Ejemplo n.º 2
0
    lm = MultimodalTransformerClassificationPLModule(
        model,
        optimizer,
        criterion,
        metrics={
            "acc2": MoseiAcc2(exclude_neutral=True),
            "acc2_zero": MoseiAcc2(exclude_neutral=False),
            "acc5": MoseiAcc5(),
            "acc7": MoseiAcc7(),
            "f1": MoseiF1(exclude_neutral=True),
            "f1_zero": MoseiF1(exclude_neutral=False),
            "mae": torchmetrics.MeanAbsoluteError(),
        },
    )

    trainer = make_trainer(**config.trainer)
    watch_model(trainer, model)

    trainer.fit(lm, datamodule=ldm)

    results = trainer.test(ckpt_path="best",
                           test_dataloaders=ldm.test_dataloader())

    import csv
    import os

    csv_folder_path = os.path.join(config.trainer.experiments_folder,
                                   config.trainer.experiment_name,
                                   "results_csv")

    csv_name = os.path.join(csv_folder_path, "results.csv")

if __name__ == "__main__":

    EXPERIMENT_NAME = "mnist-autoencoder"

    configure_logging(f"logs/{EXPERIMENT_NAME}")

    train, test = get_data()

    ldm = PLDataModuleFromDatasets(train,
                                   test=test,
                                   batch_size=128,
                                   batch_size_eval=256)

    model = Net()
    optimizer = Adam(model.parameters(), lr=1e-3)
    criterion = nn.MSELoss()

    lm = AutoEncoderPLModule(model, optimizer, criterion)

    trainer = make_trainer(EXPERIMENT_NAME,
                           max_epochs=5,
                           gpus=1,
                           wandb_project="testpl")
    watch_model(trainer, model)

    trainer.fit(lm, datamodule=ldm)

    trainer.test(ckpt_path="best", test_dataloaders=ldm.test_dataloader())
Ejemplo n.º 4
0
        num_layers=2,
        num_heads=2,
        dropout=0.3,
        hidden_size=100,
        inner_size=200,
        prenorm=True,
        scalenorm=True,
    )

    optimizer = AdamW([p for p in model.parameters() if p.requires_grad],
                      lr=1e-4)
    criterion = nn.MSELoss()

    lm = MultimodalTransformerClassificationPLModule(
        model,
        optimizer,
        criterion,
        # metrics={"acc": FromLogits(pl.metrics.classification.Accuracy())},
    )

    trainer = make_trainer(EXPERIMENT_NAME,
                           max_epochs=100,
                           gpus=1,
                           save_top_k=1,
                           gradient_clip_val=1.0)
    watch_model(trainer, model)

    trainer.fit(lm, datamodule=ldm)

    trainer.test(ckpt_path="best", test_dataloaders=ldm.test_dataloader())