Esempio n. 1
0
    "num_classes": 4,
    "activation": "relu",
    "normalization": "group_normalization",
    "mode": "trilinear",
    "with_vae": True,
    "debug": False
}
if os.path.isfile(trained_model):
    nvnet = NvNetSegmenter(nvnet_kwargs,
                           optimizer_name="Adam",
                           learning_rate=1e-4,
                           weight_decay=1e-5,
                           loss=my_loss,
                           pretrained=trained_model,
                           use_cuda=True)
    train_history = History.load(os.path.join(outdir, "train_0_epoch_9.pkl"))
    valid_history = History.load(
        os.path.join(outdir, "validation_0_epoch_9.pkl"))
else:
    nvnet = NvNetSegmenter(nvnet_kwargs,
                           optimizer_name="Adam",
                           learning_rate=1e-4,
                           weight_decay=1e-5,
                           loss=my_loss,
                           use_cuda=True)
    scheduler = lr_scheduler.ReduceLROnPlateau(optimizer=nvnet.optimizer,
                                               mode="min",
                                               factor=0.5,
                                               patience=5)
    train_history, valid_history = nvnet.training(
        manager=manager,
                                            pb=pb,
                                            hyper=hyper,
                                            n_finetuning=n_finetuning,
                                            block=b,
                                            f=fold,
                                            e=e))) for fold in range(folds)
                ]
            else:
                if CV:
                    if pb == 'Alzheimer':
                        filename = "Validation_DenseNet_{pb}_{db}_CV_%i_epoch_{e}.pkl"
                    else:
                        filename = "Validation_DenseNet_{pb}_{db}_%i_epoch_{e}.pkl"
                    results[db][name][n_finetuning] = History.load(
                        os.path.join(root, path.format(n=N_pretraining,
                                                       n_finetune=n_finetuning, pb=pb), filename.
                                     format(db=db, pb=pb, e=e)), folds=list(range(folds))). \
                        to_dict(patterns_to_del=patterns_to_del)
                else:
                    filename = "Test_DenseNet_{pb}_{db}_fold{f}_epoch{e}.pkl"
                    results[db][name][n_finetuning] = [
                        get_pickle_obj(
                            os.path.join(
                                root,
                                path.format(n=N_pretraining,
                                            n_finetune=n_finetuning,
                                            pb=pb),
                                filename.format(db=db, pb=pb, f=fold, e=e)))
                        for fold in range(folds)
                    ]
Esempio n. 3
0
                model=net,
                metrics=["accuracy"])
test_history, train_history = cl.training(manager=manager,
                                          nb_epochs=3,
                                          checkpointdir="/tmp/pynet",
                                          fold_index=0,
                                          with_validation=True)

#############################################################################
# You can reload the optimization history at any time and any step.

from pprint import pprint
from pynet.history import History
from pynet.plotting import plot_history

history = History.load("/tmp/pynet/train_0_epoch_2.pkl")
print(history)
plot_history(history)

#############################################################################
# And now predict the labels on the test set.

import numpy as np
from sklearn.metrics import classification_report
from pynet.plotting import plot_data

y_pred, X, y_true, loss, values = cl.testing(manager=manager,
                                             with_logit=True,
                                             predict=True)
pprint(data.labels)
print(classification_report(y_true, y_pred, target_names=data.labels.values()))
Esempio n. 4
0
test_history, train_history, valid_history = training(net=net,
                                                      dataset=dataset,
                                                      optimizer=optim.Adam(
                                                          net.parameters(),
                                                          lr=0.01),
                                                      criterion=my_loss,
                                                      nb_epochs=3,
                                                      metrics={"mse": my_loss},
                                                      use_cuda=False,
                                                      outdir="/tmp/pynet",
                                                      verbose=1)

#############################################################################
# You can reload the optimization history at any time and any step

from pprint import pprint
from pynet.history import History

valid_history = History.load("/tmp/pynet/history/valid_1_epoch_3.pkl")
pprint(valid_history.history)
pprint(valid_history["loss"])

#############################################################################
# You can finally display the optimization cost

from pynet.plotting import plot_data

x, y = valid_history["loss"]
plot_data(y)