Esempio n. 1
0
def start_training(model_class, model_args, model_kwargs, chkpt_num, lr,
                   train_sets, val_sets, data_dir, **params):

    #PyTorch Model
    net = utils.create_network(model_class, model_args, model_kwargs)
    monitor = utils.LearningMonitor()

    #Loading model checkpoint (if applicable)
    if chkpt_num != 0:
        utils.load_chkpt(net, monitor, chkpt_num, params["model_dir"],
                         params["log_dir"])

    #DataProvider Sampler
    Sampler = params["sampler_class"]
    train_sampler = utils.AsyncSampler(
        Sampler(data_dir,
                dsets=train_sets,
                mode="train",
                resize=params["resize"]))

    val_sampler = utils.AsyncSampler(
        Sampler(data_dir, dsets=val_sets, mode="val", resize=params["resize"]))

    loss_fn = loss.BinomialCrossEntropyWithLogits()
    optimizer = torch.optim.Adam(net.parameters(), lr=lr)

    train.train(net,
                loss_fn,
                optimizer,
                train_sampler,
                val_sampler,
                last_iter=chkpt_num,
                monitor=monitor,
                **params)
Esempio n. 2
0
def start_training(model_class, model_args, model_kwargs, sampler_class,
                   sampler_spec, augmentor_constr, chkpt_num, lr, train_sets,
                   val_sets, data_dir, model_dir, log_dir, tb_train, tb_val,
                   **params):

    #PyTorch Model
    net = utils.create_network(model_class, model_args, model_kwargs)
    train_writer = tensorboardX.SummaryWriter(tb_train)
    val_writer = tensorboardX.SummaryWriter(tb_val)
    monitor = utils.LearningMonitor()

    #Loading model checkpoint (if applicable)
    if chkpt_num != 0:
        utils.load_chkpt(net, monitor, chkpt_num, model_dir, log_dir)

    #DataProvider Stuff
    train_aug = augmentor_constr(True)
    train_sampler = utils.AsyncSampler(
        sampler_class(data_dir,
                      sampler_spec,
                      vols=train_sets,
                      mode="train",
                      aug=train_aug))

    val_aug = augmentor_constr(False)
    val_sampler = utils.AsyncSampler(
        sampler_class(data_dir,
                      sampler_spec,
                      vols=val_sets,
                      mode="val",
                      aug=val_aug))

    loss_fn = loss.BinomialCrossEntropyWithLogits()
    optimizer = torch.optim.Adam(net.parameters(), lr=lr)

    train.train(net,
                loss_fn,
                optimizer,
                train_sampler,
                val_sampler,
                train_writer=train_writer,
                val_writer=val_writer,
                last_iter=chkpt_num,
                model_dir=model_dir,
                log_dir=log_dir,
                monitor=monitor,
                **params)