Beispiel #1
0
def main(args):
    # Network Builders
    builder = ModelBuilder()

    unet = builder.build_unet(num_class=args.num_class,
        arch=args.unet_arch,
        weights=args.weights_unet)

    print("Froze the following layers: ")
    for name, p in unet.named_parameters():
        if p.requires_grad == False:
            print(name)
    print()

    crit = DualLoss(mode="train")

    segmentation_module = SegmentationModule(crit, unet)

    train_augs = Compose([PaddingCenterCrop(256), RandomHorizontallyFlip(), RandomVerticallyFlip(), RandomRotate(180)])
    test_augs = Compose([PaddingCenterCrop(256)])

    # Dataset and Loader
    dataset_train = AC17( #Loads 3D volumes
            root=args.data_root,
            split='train',
            k_split=args.k_split,
            augmentations=train_augs,
            img_norm=args.img_norm)
    ac17_train = load2D(dataset_train, split='train', deform=True) #Dataloader for 2D slices. Requires 3D loader.

    loader_train = data.DataLoader(
        ac17_train,
        batch_size=args.batch_size_per_gpu,
        shuffle=True,
        num_workers=int(args.workers),
        drop_last=True,
        pin_memory=True)

    dataset_val = AC17(
            root=args.data_root,
            split='val',
            k_split=args.k_split,
            augmentations=test_augs,
            img_norm=args.img_norm)

    ac17_val = load2D(dataset_val, split='val', deform=False)

    loader_val = data.DataLoader(
        ac17_val,
        batch_size=1,
        shuffle=False,
        collate_fn=user_scattered_collate,
        num_workers=5,
        drop_last=True)

    # load nets into gpu
    if len(args.gpus) > 1:
        segmentation_module = UserScatteredDataParallel(
            segmentation_module,
            device_ids=args.gpus)
        # For sync bn
        patch_replication_callback(segmentation_module)
    segmentation_module.cuda()

    # Set up optimizers
    nets = (net_encoder, net_decoder, crit) if args.unet == False else (unet, crit)
    optimizers = create_optimizers(nets, args)

    # Main loop
    history = {'train': {'epoch': [], 'loss': [], 'acc': [], 'jaccard': []}}
    best_val = {'epoch_1': 0, 'mIoU_1': 0,
                'epoch_2': 0, 'mIoU_2': 0,
                'epoch_3': 0, 'mIoU_3': 0,
                'epoch' : 0, 'mIoU': 0}

    for epoch in range(args.start_epoch, args.num_epoch + 1):
        train(segmentation_module, loader_train, optimizers, history, epoch, args)
        iou, loss = eval(loader_val, segmentation_module, args, crit)
        #checkpointing
        ckpted = False
        if loss < 0.215:
            ckpted = True
        if iou[0] > best_val['mIoU_1']:
            best_val['epoch_1'] = epoch
            best_val['mIoU_1'] = iou[0]
            ckpted = True

        if iou[1] > best_val['mIoU_2']:
            best_val['epoch_2'] = epoch
            best_val['mIoU_2'] = iou[1]
            ckpted = True

        if iou[2] > best_val['mIoU_3']:
            best_val['epoch_3'] = epoch
            best_val['mIoU_3'] = iou[2]
            ckpted = True

        if (iou[0]+iou[1]+iou[2])/3 > best_val['mIoU']:
            best_val['epoch'] = epoch
            best_val['mIoU'] = (iou[0]+iou[1]+iou[2])/3
            ckpted = True

        if epoch % 50 == 0:
            checkpoint(nets, history, args, epoch)
            continue

        if epoch == args.num_epoch:
            checkpoint(nets, history, args, epoch)
            continue
        if epoch < 15:
            ckpted = False
        if ckpted == False:
            continue
        else:
            checkpoint(nets, history, args, epoch)
            continue
        print()

    print('Training Done!')
        if img.ndim == 3:
            img = np.expand_dims(img, axis=0)
            img = np.concatenate((img, img, img), axis=0)
        img = torch.from_numpy(img).float()
        mask = torch.from_numpy(mask).long()
        return img, mask


if __name__ == "__main__":
    from pprint import pprint

    train_augs = Compose([
        PaddingCenterCrop(256),
        RandomHorizontallyFlip(),
        RandomVerticallyFlip(),
        RandomRotate(180)
    ])

    # root = "/home/hao/Downloads/COCO2CULane"
    #
    #
    # dataset_train = SideWalkData(
    #     root=root,
    #     split='train',
    #     k_split=1,
    #     augmentations=train_augs
    # )
    #
    # img = dataset_train[0]
    # # print(img)
    #
def main(args):
    # Network Builders
    builder = ModelBuilder()
    net_encoder=None
    net_decoder=None
    unet=None
    
    if args.unet == False:
        net_encoder = builder.build_encoder(
            arch=args.arch_encoder,
            fc_dim=args.fc_dim,
            weights=args.weights_encoder)
        net_decoder = builder.build_decoder(
            arch=args.arch_decoder,
            fc_dim=args.fc_dim,
            num_class=args.num_class,
            weights=args.weights_decoder)
    else:
        unet = builder.build_unet(num_class=args.num_class, 
            arch=args.unet_arch,
            weights=args.weights_unet)

        print("Froze the following layers: ")
        for name, p in unet.named_parameters():
            if p.requires_grad == False:
                print(name)
        print()
    
    crit = ACLoss(mode="train")
    #crit = nn.CrossEntropyLoss().cuda()
    #crit = nn.BCEWithLogitsLoss(pos_weight=torch.tensor(50))
    #crit = nn.CrossEntropyLoss().cuda()
    #crit = nn.BCELoss()

    if args.arch_decoder.endswith('deepsup') and args.unet == False:
        segmentation_module = SegmentationModule(
            net_encoder, net_decoder, crit, args.deep_sup_scale)
    else:
        segmentation_module = SegmentationModule(
            net_encoder, net_decoder,  crit, is_unet=args.unet, unet=unet)

    train_augs = Compose([PaddingCenterCrop(256), RandomHorizontallyFlip(), RandomVerticallyFlip(), RandomRotate(180)])
    test_augs = Compose([PaddingCenterCrop(256)])
    # Dataset and Loader
    dataset_train = AC17(
            root=args.data_root,
            split='train',
            k_split=args.k_split,
            augmentations=train_augs,
            img_norm=args.img_norm)
    ac17_train = load2D(dataset_train, split='train', deform=True)
    
    loader_train = data.DataLoader(
        ac17_train,
        batch_size=args.batch_size_per_gpu,  # we have modified data_parallel
        shuffle=True, 
        num_workers=int(args.workers),
        drop_last=True,
        pin_memory=True)
    dataset_val = AC17(
            root=args.data_root,
            split='val',
            k_split=args.k_split,
            augmentations=test_augs,
            img_norm=args.img_norm)
    ac17_val = load2D(dataset_val, split='val', deform=False)
    loader_val = data.DataLoader(
        ac17_val,
        batch_size=1,
        shuffle=False,
        collate_fn=user_scattered_collate,
        num_workers=5,
        drop_last=True)
    # create loader iterator
    #iterator_train = iter(loader_train)

    # load nets into gpu
    if len(args.gpus) > 1:
        segmentation_module = UserScatteredDataParallel(
            segmentation_module,
            device_ids=args.gpus)
        # For sync bn
        patch_replication_callback(segmentation_module)
    segmentation_module.cuda()
    
    # Set up optimizers
    nets = (net_encoder, net_decoder, crit) if args.unet == False else (unet, crit)
    optimizers = create_optimizers(nets, args)

    # Main loop
    history = {'train': {'epoch': [], 'loss': [], 'acc': [], 'jaccard': []}}
    best_val = {'epoch_1': 0, 'mIoU_1': 0,
                'epoch_2': 0, 'mIoU_2': 0,
                'epoch_3': 0, 'mIoU_3': 0,
                'epoch' : 0, 'mIoU': 0}

    for epoch in range(args.start_epoch, args.num_epoch + 1):
        train(segmentation_module, loader_train, optimizers, history, epoch, args)
        iou, loss = eval(loader_val, segmentation_module, args, crit)
        #checkpointing
        ckpted = False
        if loss < 0.215:
            ckpted = True
        if iou[0] > best_val['mIoU_1']:
            best_val['epoch_1'] = epoch
            best_val['mIoU_1'] = iou[0]
            ckpted = True

        if iou[1] > best_val['mIoU_2']:
            best_val['epoch_2'] = epoch
            best_val['mIoU_2'] = iou[1]
            ckpted = True

        if iou[2] > best_val['mIoU_3']:
            best_val['epoch_3'] = epoch
            best_val['mIoU_3'] = iou[2]
            ckpted = True
        
        if (iou[0]+iou[1]+iou[2])/3 > best_val['mIoU']:
            best_val['epoch'] = epoch
            best_val['mIoU'] = (iou[0]+iou[1]+iou[2])/3
            ckpted = True
        
        if epoch % 50 == 0:
            checkpoint(nets, history, args, epoch)
            continue

        if epoch == args.num_epoch:
            checkpoint(nets, history, args, epoch)
            continue
        if epoch < 15:
            ckpted = False
        if ckpted == False:
            continue
        else:
            checkpoint(nets, history, args, epoch)
            continue
        print()
    
    #print("[Val] Class 1: Epoch " + str(best_val['epoch_1']) + " had the best mIoU of " + str(best_val['mIoU_1']) + ".")
    #print("[Val] Class 2: Epoch " + str(best_val['epoch_2']) + " had the best mIoU of " + str(best_val['mIoU_2']) + ".")
    #print("[Val] Class 3: Epoch " + str(best_val['epoch_3']) + " had the best mIoU of " + str(best_val['mIoU_3']) + ".")
    print('Training Done!')
def run_model(space):
    # Network Builders
    builder = ModelBuilder()
    net_encoder = None
    net_decoder = None
    unet = None

    unet = builder.build_unet(num_class=4, arch='AlbuNet', weights='')

    crit = ACLoss()

    segmentation_module = SegmentationModule(net_encoder,
                                             net_decoder,
                                             crit,
                                             is_unet=True,
                                             unet=unet)

    train_augs = Compose([
        PaddingCenterCrop(224),
        RandomHorizontallyFlip(),
        RandomVerticallyFlip(),
        RandomRotate(180)
    ])
    test_augs = Compose([PaddingCenterCrop(224)])
    # Dataset and Loader
    dataset_train = AC17(root=os.getenv('DATA_ROOT',
                                        '/home/rexma/Desktop/MRI_Images/AC17'),
                         split='train',
                         k_split=1,
                         augmentations=train_augs,
                         img_norm=True)
    ac17_train = load2D(dataset_train, split='train', deform=False)

    loader_train = data.DataLoader(
        ac17_train,
        batch_size=4,  # we have modified data_parallel
        shuffle=True,
        num_workers=5,
        drop_last=True,
        pin_memory=True)

    dataset_val = AC17(root=os.getenv('DATA_ROOT',
                                      '/home/rexma/Desktop/MRI_Images/AC17'),
                       split='val',
                       k_split=1,
                       augmentations=test_augs,
                       img_norm=True)
    ac17_val = load2D(dataset_val, split='val', deform=False)
    loader_val = data.DataLoader(ac17_val,
                                 batch_size=1,
                                 shuffle=False,
                                 collate_fn=user_scattered_collate,
                                 num_workers=5,
                                 drop_last=True)

    segmentation_module.cuda()

    # Set up optimizers
    nets = (unet, crit)
    optimizers = create_optimizers(nets, space)

    val_losses = []
    train_losses = []
    status = STATUS_OK
    print("Searching " + "lr: " + str(space['lr'])
          )  #+ " b1: " + str(space['b1']) + "b2: " + str(space['b2']))
    # Main loop
    for epoch in range(1, 31):
        t_iou = train(segmentation_module, loader_train, optimizers, epoch,
                      space)
        v_iou = eval(loader_val, segmentation_module, crit)
        train_losses.append(t_iou)
        val_losses.append(v_iou)
        if epoch == 3 and v_iou >= 1.0:
            status = STATUS_FAIL
            break

    #values to be returned
    opt_name = 'lr' + str(
        space['lr'])  # + "_b1" + str(space['b1']) + "_b2" + str(space['b2'])
    model_dict = {'loss': min(val_losses), 'status': status, 'name': opt_name}
    return model_dict