Example #1
0
def main():

    # settting
    sets = parse_opts()
    model, parameters = generate_model(sets)

    #debug
    # print(model)

    # print(f'type(model):{type(model)}')
    # print(f'type(parameters):{type(parameters)}')
    # print(f'type(model.parameters):{type(model.parameters)}')

    # freeze the pre-trained convolutional network parameters
    # for param in parameters['base_parameters']:
    #     param.requires_grad = False

    # ----------------------------------------------------
    # Debug:
    # for key in parameters.keys():
    #     _list = parameters[key]
    #     for i, item in enumerate(_list):
    #         print(item.shape)
    #         print(item.requires_grad)

    # criterion
    criterion = CrossEntropyLoss()
    # optimizer
    params = [{
        'params': parameters['base_parameters'],
        'lr': sets.learning_rate
    }, {
        'params': parameters['new_parameters'],
        'lr': sets.learning_rate * 100
    }]

    optimizer = optim.Adam(
        params,
        lr=0.1,
    )

    # train loader
    train_dataset = ABIDE1(sets, train=True)
    train_loader = DataLoader(train_dataset,
                              batch_size=8,
                              shuffle=True,
                              num_workers=1,
                              drop_last=True)

    # test loader
    # test_dataset = ABIDE1(sets, train=False)
    # test_loader = DataLoader(test_dataset, num_workers=1)

    train(train_loader, model, optimizer, criterion)
Example #2
0
                    torch.save(
                        {
                            'ecpoch': epoch,
                            'batch_id': batch_id,
                            'state_dict': model.state_dict(),
                            'optimizer': optimizer.state_dict()
                        }, model_save_path)

    print('Finished training')
    if sets.ci_test:
        exit()


if __name__ == '__main__':
    # settting
    sets = parse_opts()
    if sets.ci_test:
        sets.img_list = './toy_data/test_ci.txt'
        sets.n_epochs = 1
        sets.no_cuda = True
        sets.data_root = './toy_data'
        sets.pretrain_path = ''
        sets.num_workers = 0
        sets.model_depth = 10
        sets.resnet_shortcut = 'A'
        sets.input_D = 14
        sets.input_H = 28
        sets.input_W = 28

    # getting model
    torch.manual_seed(sets.manual_seed)
Example #3
0
            dices[idx, :] = seg_eval(pred_total[idx], y_total[idx],
                                     range(self.hparams.n_seg_classes))
        print("avg_loss(val) = ", avg_loss)
        print("dice_each_class(val) = ", dices.mean(axis=0))
        self.logger.experiment.add_text("dice_each_class",
                                        str(dices.mean(axis=0)),
                                        self.current_epoch)
        # print(self.trainer.lr_schedulers.get_lr())
        mDice = dices.mean()
        logs = {"val_loss": avg_loss, "mDice": torch.tensor(mDice)}
        return {'log': logs}


if __name__ == '__main__':
    # settings
    hparams = parse_opts()

    checkpoint_callback = ModelCheckpoint(filepath=None,
                                          monitor='mDice',
                                          save_top_k=1,
                                          verbose=True,
                                          mode='max')

    lr_logger = LearningRateLogger()

    if hparams.phase == "test":
        pretrained_model = BiMaskSeg.load_from_checkpoint(
            checkpoint_path=
            "trails/logs/train_lightning/lightning_logs/version_0/checkpoints/epoch=29.ckpt",
            hparams_file=
            "trails/logs/train_lightning/lightning_logs/version_0/hparams.yaml"
Example #4
0
                log.info('Save checkpoints: epoch = {}, batch_id = {}'.format(epoch, batch_id))
                torch.save({
                    'epoch': epoch,
                    'batch_id': batch_id,
                    'state_dict': model.state_dict(),
                    'optimizer': optimizer.state_dict()},
                    model_save_path)

        scheduler.step()

    print('Finished training')


if __name__ == '__main__':
    opt = parse_opts()

    str_ids = opt.gpu_ids.split(',')
    opt.gpu_ids = []
    for str_id in str_ids:
        idx = int(str_id)
        if idx >= 0:
            opt.gpu_ids.append(idx)

    torch.manual_seed(opt.seed)

    dataloader = DataLoader(opt)
    model = RegresserModel(opt)

    if not opt.no_cuda:
        if len(opt.gpu_ids) > 1: