Ejemplo n.º 1
0
def test_hyp_sel_hdr(paths, images_path, use_log=False):
    in_channels = 2 if use_log else 3
    dataset = MIDataset(datatype='test',
                        folder='dataset_crf/realworld',
                        special_folder=images_path,
                        transforms=get_validation_augmentation(),
                        use_mask=True)

    loader = DataLoader(dataset, batch_size=1, shuffle=True, num_workers=0)

    model = HypNet.HypNet(patch_height=44,
                          patch_width=44,
                          in_channels=in_channels,
                          out_channels=in_channels)
    model.cuda(0)
    model.load_state_dict(torch.load(paths[0]))

    selNet = HypNet.SelUnet()
    selNet.cuda(0)
    selNet.model.load_state_dict(torch.load(paths[1]))

    patch_width_ratio = 100. / 640
    patch_height_ratio = 100. / 320

    for batch_idx, (data, mask, gt) in enumerate(loader):
        gt = gt / 255.
        data = data / 255.
        for img, gti in zip(data, gt):
            gti_i = gti.mean(1).mean(1)
            final = selNet.test(model, img, gti_i, patch_height_ratio,
                                patch_width_ratio)
            visualize_tensor(img.cpu(), gti, final.cpu())
        torch.cuda.empty_cache()
Ejemplo n.º 2
0
    in_channels = out_channels * 3 if known_ills else out_channels

    model = Unet(in_channels, pretrain=True)
    model.cuda(0)

    num_workers = 0
    bs = 4

    train_dataset = CubeDataset(
        datatype='train',
        transforms=get_training_augmentation(),
        log_transform=use_log
    )  # , preprocessing=get_preprocessing(preprocessing_fn))
    valid_dataset = CubeDataset(
        datatype='valid',
        transforms=get_validation_augmentation(),
        log_transform=use_log
    )  # , preprocessing=get_preprocessing(preprocessing_fn))

    train_loader = DataLoader(train_dataset,
                              batch_size=bs,
                              shuffle=True,
                              num_workers=num_workers)
    valid_loader = DataLoader(valid_dataset,
                              batch_size=bs,
                              shuffle=False,
                              num_workers=num_workers)

    # model, criterion, optimizer
    optimizer = torch.optim.Adam(params=model.parameters(), lr=1e-2)
    criterion1 = torch.nn.MSELoss()
Ejemplo n.º 3
0
def test(model, dataset, images_path, preproc, use_log, use_corrected, path,
         custom, reg):
    datatype = dataset
    dt = 'valid'
    folder = None
    path = './data'
    aug = get_validation_augmentation() if use_log else get_test_augmentation()
    if dataset == 'crf':
        folder = 'dataset_crf/realworld'
        dataset = MIDataset(datatype=dt,
                            folder=folder,
                            special_folder=images_path,
                            transforms=aug,
                            preprocessing=preproc,
                            use_mask=False,
                            use_corrected=use_corrected,
                            dataset='crf',
                            log_transform=use_log)

    elif dataset == 'test':
        folder = 'test/whatsapp'
        dataset = MIDataset(datatype='test',
                            folder=folder,
                            special_folder=images_path,
                            transforms=aug,
                            preprocessing=preproc,
                            use_mask=False,
                            use_corrected=use_corrected,
                            dataset='test',
                            log_transform=use_log)

    elif dataset == 'projector_test':
        folder = 'both'
        path = 'G:\\fax\\diplomski\\Datasets\\third\\ambient'
        dataset = MIDataset(path=path,
                            datatype='test',
                            folder=folder,
                            special_folder=images_path,
                            transforms=aug,
                            preprocessing=preproc,
                            use_mask=False,
                            use_corrected=use_corrected,
                            dataset='test',
                            log_transform=use_log)

    else:
        folder = 'dataset_relighted/valid'
        dataset = MIDataset(datatype=dt,
                            folder='dataset_relighted/valid',
                            special_folder=images_path,
                            transforms=get_validation_augmentation(),
                            preprocessing=preproc,
                            use_mask=False,
                            use_corrected=use_corrected,
                            dataset='cube',
                            log_transform=use_log)
    loader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=0)

    dl = ls.DiceLoss()

    def dice(x, y):
        return 1 - dl(x, y)

    def sigmoid(x):
        return 1 / (1 + torch.exp(-x))

    def save_mask(name, mask):
        image, _, _ = load_img_and_gt_crf_dataset(name,
                                                  path=path,
                                                  folder=folder,
                                                  dataset=datatype,
                                                  use_corrected=use_corrected,
                                                  rotate=False,
                                                  use_mask=False)
        if image.shape[0] > image.shape[1]:
            fx = image.shape[0] / mask.shape[3]
            fy = image.shape[1] / mask.shape[2]
        else:
            fx = image.shape[1] / mask.shape[3]
            fy = image.shape[0] / mask.shape[2]
        rot_mask = cv2.resize(mask_to_image(to_np_img(mask)), (0, 0),
                              fx=fx,
                              fy=fy)
        if image.shape[0] > image.shape[1]:
            rot_mask = cv2.flip(rot_mask, -1)
            rot_mask = cv2.flip(rot_mask, 0)
            rot_mask = cv2.rotate(rot_mask, cv2.ROTATE_180)
        fld = f'{path}/{folder}/pmasks6{"-cor" if use_corrected else ""}{"-custom" if custom else ""}{"-reg" if reg else ""}'
        if not os.path.exists(fld):
            os.mkdir(fld)
        rot_mask = cv2.cvtColor(rot_mask, cv2.COLOR_RGB2BGR)
        cv2.imwrite(f'{fld}/{name}', rot_mask)

    if datatype == 'test' or dt == 'test':
        for idx, (name, data, gs) in enumerate(loader):
            if not custom:
                p_mask, label = model(data)
            else:
                p_mask = model(data)
            if p_mask.max() < 3:
                p_mask = torch.clamp(p_mask, 0, 1)
            sig_mask = sigmoid(p_mask)
            save_mask(name[0], p_mask)
            plot(data, gs, sig_mask, p_mask, use_log, reg, use_mixture=True)
            torch.cuda.empty_cache()
        return
    dices = []
    sig_dices = []
    for batch_idx, (data, mask, gt) in enumerate(loader):
        data, gs = data
        if not custom:
            p_mask, label = model(data)
        else:
            p_mask = model(data)
        # save_mask(str(batch_idx), sig_mask)
        if not reg:
            p_mask_clamp = torch.clamp(p_mask, 0, 1)
            sig_mask = sigmoid(p_mask)
            plot(data, gs, mask, sig_mask, use_log, reg, use_mixture=True)
            dc = dice(mask, p_mask_clamp) if use_corrected else max(
                dice(mask, p_mask_clamp), dice(1 - mask, p_mask_clamp))
            dices.append(dc.item())
            dc_sig = dice(mask, sig_mask) if use_corrected else max(
                dice(mask, sig_mask), dice(1 - mask, sig_mask))
            sig_dices.append(dc_sig.item())
        else:
            plot(data, gs, p_mask, gt, use_log, reg, use_mixture=True)
        # print(dc)
    if not reg:
        print(folder, path, use_corrected)
        print(
            f'Mean: {np.array(dices).mean()}\t Trimean: {stats.trimean(dices)}\t Median: {stats.median(dices)}'
        )
        print(
            f'Mean: {np.array(sig_dices).mean()}\t Trimean: {stats.trimean(sig_dices)}\t Median: {stats.median(sig_dices)}'
        )
        print(
            '--------------------------------------------------------------------------------'
        )
Ejemplo n.º 4
0
num_workers = 0
bs = 64

train_dataset = MIPatchedDataset(
    folder='dataset_relighted',
    datatype='train',
    dataset='cube',
    transforms=get_training_augmentation(patch_size, patch_size),
    use_mask=False,
    log_transform=use_log
)  # , preprocessing=get_preprocessing(preprocessing_fn))
valid_dataset = MIPatchedDataset(
    folder="dataset_relighted/valid",
    datatype='valid',
    dataset='cube',
    transforms=get_validation_augmentation(patch_size, patch_size),
    use_mask=False,
    log_transform=use_log
)  # , preprocessing=get_preprocessing(preprocessing_fn))

train_loader = DataLoader(train_dataset,
                          batch_size=bs,
                          shuffle=True,
                          num_workers=num_workers)
valid_loader = DataLoader(valid_dataset,
                          batch_size=bs,
                          shuffle=False,
                          num_workers=num_workers)

loaders = {"train": train_loader, "valid": valid_loader}