Ejemplo n.º 1
0
def make_dataloaders(cfg, train_folds, test_fold, batch_size):

    _, transf_normal = im_utils.make_data_aug(cfg)

    dls_train = []
    for train_fold in train_folds:
        dl = BaseDataset(train_fold['in_path'], normalization=transf_normal)
        if (train_fold['run_path'] is not None):
            segs = np.load(pjoin(train_fold['run_path'],
                                 'results.npz'))['ksp_scores_mat']
            dl.truths = [segs[..., i] for i in range(segs.shape[-1])]
        dls_train.append(dl)

    dl_train_val = ConcatDataset(dls_train)

    # keep 5% of training frames as validation
    idx_train_val = np.random.permutation(len(dl_train_val))
    idx_train = idx_train_val[:int(len(idx_train_val) * 0.95)]
    idx_val = [i for i in idx_train_val if (i not in idx_train)]

    collate_fn = dls_train[0].collate_fn
    dl_train = DataLoader(dl_train_val,
                          batch_size=batch_size,
                          sampler=SubsetRandomSampler(idx_train),
                          collate_fn=collate_fn,
                          drop_last=True)
    dl_val = DataLoader(dl_train_val,
                        batch_size=batch_size,
                        sampler=SubsetRandomSampler(idx_val),
                        collate_fn=collate_fn,
                        drop_last=True)
    dl_test = DataLoader(BaseDataset(test_fold['in_path'],
                                     normalization=transf_normal),
                         collate_fn=collate_fn)
    dl_prev = DataLoader(BaseDataset(test_fold['in_path']),
                         collate_fn=collate_fn,
                         sampler=SubsetRandomSampler(
                             np.random.choice(len(dl_test), 5)),
                         batch_size=1)

    dataloaders = {
        'train': dl_train,
        'val': dl_val,
        'test': dl_test,
        'prev': dl_prev
    }

    return dataloaders
Ejemplo n.º 2
0
    def extract(self,
                root_path,
                save_path):

        flows_bvx = []
        flows_bvy = []
        flows_fvx = []
        flows_fvy = []


        if(os.path.isfile(save_path)):
            self.logger.info("Output file {} exists.".format(save_path))
        else:
            dset = BaseDataset(root_path)
            self.logger.info('Precomputing the optical flows...')
            for f in np.arange(1, len(dset)):
                self.logger.info('{}/{}'.format(f, len(dset)))
                im1 = dset[f-1]['image'] / 255.
                im2 = dset[f]['image'] / 255.
                fvx, fvy, _ = pyflow.coarse2fine_flow(im1,
                                                      im2,
                                                      self.alpha,
                                                      self.ratio,
                                                      self.minWidth,
                                                      self.nOuterFPIterations,
                                                      self.nInnerFPIterations,
                                                      self.nSORIterations,
                                                      0)
                bvx, bvy, _ = pyflow.coarse2fine_flow(im2,
                                                      im1,
                                                      self.alpha,
                                                      self.ratio,
                                                      self.minWidth,
                                                      self.nOuterFPIterations,
                                                      self.nInnerFPIterations,
                                                      self.nSORIterations,
                                                      0)
                flows_bvx.append(bvx.astype(np.float32))
                flows_bvy.append(bvy.astype(np.float32))
                flows_fvx.append(fvx.astype(np.float32))
                flows_fvy.append(fvy.astype(np.float32))

            flows_bvx = np.asarray(flows_bvx).transpose(1, 2, 0)
            flows_bvy = np.asarray(flows_bvy).transpose(1, 2, 0)
            flows_fvx = np.asarray(flows_fvx).transpose(1, 2, 0)
            flows_fvy = np.asarray(flows_fvy).transpose(1, 2, 0)
            self.logger.info('Optical flow calculations done')

            self.logger.info('Saving optical flows to {}'.format(save_path))

            data = dict()
            data['bvx'] = flows_bvx
            data['bvy'] = flows_bvy
            data['fvx'] = flows_fvx
            data['fvy'] = flows_fvy
            np.savez(save_path, **data)

            self.logger.info('Done.')
Ejemplo n.º 3
0
    def run(self, do_save=True):
        """
        Makes centroids and contours
        """

        dset = BaseDataset(self.root_path)
        if (not os.path.exists(pjoin(self.desc_path, 'sp_labels.npy'))):

            print('Running SLIC on {} images with {} labels'.format(
                len(dset), self.n_segments))
            labels = np.array([
                segmentation.slic(s['image'],
                                  n_segments=self.n_segments,
                                  compactness=self.compactness,
                                  start_label=0) for s in dset
            ]).astype(np.uint16)
            print('Saving labels to {}'.format(self.desc_path))
            np.save(os.path.join(self.desc_path, 'sp_labels.npy'), labels)

        if (not os.path.exists(pjoin(self.desc_path,
                                     'sp_labels_contours.npz'))):
            self.labels_contours_ = list()
            print("Generating label contour maps")

            for l in self.labels:
                # labels values are not always "incremental" (values are skipped).
                self.labels_contours_.append(
                    segmentation.find_boundaries(l).astype(bool))

            self.labels_contours_ = np.array(self.labels_contours_)
            print("Saving labels")
            data = dict()
            data['labels_contours'] = self.labels_contours
            np.savez(os.path.join(self.desc_path, 'sp_labels_contours.npz'),
                     **data)

        if (not os.path.exists(pjoin(self.desc_path, 'centroids_loc_df.p'))):
            print('Getting centroids...')
            self.centroids_loc_ = sputls.getLabelCentroids(self.labels)

            self.centroids_loc_.to_pickle(
                os.path.join(self.desc_path, 'centroids_loc_df.p'))

        if (do_save and
                not os.path.exists(pjoin(self.desc_path, 'spix_previews'))):
            print('Saving slic previews to {}'.format(
                pjoin(self.desc_path, 'spix_previews')))
            previews_dir = os.path.join(self.desc_path, 'spix_previews')
            if (not os.path.exists(previews_dir)):
                os.makedirs(previews_dir)
            for i, sample in enumerate(dset):
                fname = os.path.join(previews_dir,
                                     'frame_{0:04d}.png'.format(i))

                im = sputls.drawLabelContourMask(sample['image'],
                                                 self.labels[i, ...])
                io.imsave(fname, im)
Ejemplo n.º 4
0
    def calc_superpix(self, compactness, n_segments, do_save=True):
        """
        Makes centroids and contours
        """

        if (not os.path.exists(pjoin(self.desc_path, 'sp_labels.npz'))):
            dset = BaseDataset(self.root_path, got_labels=False)

            self.logger.info('Running SLIC on {} images with {} labels'.format(
                len(dset), n_segments))
            labels = np.array([
                segmentation.slic(s['image'],
                                  n_segments=n_segments,
                                  compactness=compactness) for s in dset
            ])
            labels = np.rollaxis(labels, 0, 3)
            self.logger.info('Saving labels to {}'.format(self.desc_path))
            np.savez(os.path.join(self.desc_path, 'sp_labels.npz'),
                     **{'sp_labels': labels})

            self.labels_contours_ = list()
            self.logger.info("Generating label contour maps")

            for im in range(self.labels.shape[2]):
                # labels values are not always "incremental" (values are skipped).
                self.labels_contours_.append(
                    segmentation.find_boundaries(self.labels[:, :, im]))

            self.labels_contours_ = np.array(self.labels_contours_)
            self.logger.info("Saving labels")
            data = dict()
            data['labels_contours'] = self.labels_contours
            np.savez(os.path.join(self.desc_path, 'sp_labels_contours.npz'),
                     **data)

            if (do_save):
                self.logger.info('Saving slic previews to {}'.format(
                    pjoin(self.desc_path, 'spix_previews')))
                previews_dir = os.path.join(self.desc_path, 'spix_previews')
                if (not os.path.exists(previews_dir)):
                    os.makedirs(previews_dir)
                for i, sample in enumerate(dset):
                    fname = os.path.join(previews_dir,
                                         'frame_{0:04d}.png'.format(i))

                    im = sputls.drawLabelContourMask(sample['image'],
                                                     self.labels[..., i])
                    io.imsave(fname, im)

            self.logger.info('Getting centroids...')
            self.centroids_loc_ = sputls.getLabelCentroids(self.labels)

            self.centroids_loc_.to_pickle(
                os.path.join(self.desc_path, 'centroids_loc_df.p'))
        else:
            self.logger.info(
                "Superpixels were already computed. Delete to re-run.")
Ejemplo n.º 5
0
    def extract(self, root_path, save_path):

        flows_bvx = []
        flows_bvy = []
        flows_fvx = []
        flows_fvy = []

        paths = [
            os.path.join(save_path, 'flows_{}.npy'.format(f))
            for f in ['fvx', 'fvy', 'bvx', 'bvy']
        ]
        exists = [os.path.exists(p) for p in paths]

        if (np.sum(exists) == 4):
            self.logger.info("Flows are already computed.")
        else:
            dset = BaseDataset(root_path)
            self.logger.info('Precomputing the optical flows...')
            for f in np.arange(1, len(dset)):
                self.logger.info('{}/{}'.format(f, len(dset)))
                im1 = dset[f - 1]['image'] / 255.
                im2 = dset[f]['image'] / 255.
                fvx, fvy, _ = pyflow.coarse2fine_flow(im1, im2, self.alpha,
                                                      self.ratio,
                                                      self.minWidth,
                                                      self.nOuterFPIterations,
                                                      self.nInnerFPIterations,
                                                      self.nSORIterations, 0)
                bvx, bvy, _ = pyflow.coarse2fine_flow(im2, im1, self.alpha,
                                                      self.ratio,
                                                      self.minWidth,
                                                      self.nOuterFPIterations,
                                                      self.nInnerFPIterations,
                                                      self.nSORIterations, 0)
                flows_bvx.append(bvx.astype(np.float32))
                flows_bvy.append(bvy.astype(np.float32))
                flows_fvx.append(fvx.astype(np.float32))
                flows_fvy.append(fvy.astype(np.float32))

            bvx = np.asarray(bvx).transpose(1, 2, 0)
            bvy = np.asarray(bvy).transpose(1, 2, 0)
            fvx = np.asarray(fvx).transpose(1, 2, 0)
            fvy = np.asarray(fvy).transpose(1, 2, 0)
            self.logger.info('Optical flow calculations done')

            self.logger.info('Saving optical flows to {}'.format(save_path))

            np.save(os.path.join(save_path, 'flows_fvx.npy'), fvx)
            np.save(os.path.join(save_path, 'flows_fvy.npy'), fvy)
            np.save(os.path.join(save_path, 'flows_bvx.npy'), bvx)
            np.save(os.path.join(save_path, 'flows_bvy.npy'), bvy)

            self.logger.info('Done.')
Ejemplo n.º 6
0
def main(cfg):
    for idx in cfg.dset_idx:
        run_path = pjoin(cfg.out_root, 'Dataset' + idx)
        if (not os.path.exists(run_path)):
            os.makedirs(run_path)

        df_path = pjoin(run_path, 'scores.csv')

        print('run_path: {}'.format(run_path))

        if (not os.path.exists(df_path)):
            truths = []
            truths_sp = []
            in_path = pjoin(cfg.in_root, 'Dataset' + idx)
            dl = BaseDataset(in_path)
            pbar = tqdm.tqdm(total=len(dl))
            for s in dl:
                labels = s['labels'][..., 0]
                truth = s['label/segmentation'][..., 0]
                regions = regionprops(labels + 1, intensity_image=truth)
                pos = np.array([p['mean_intensity'] > 0.5
                                for p in regions])[..., None]
                mapping = np.concatenate((np.unique(labels)[..., None], pos),
                                         axis=1)

                _, ind = np.unique(labels, return_inverse=True)
                truth_sp = mapping[ind, 1:].reshape(labels.shape)
                truths.append(truth)
                truths_sp.append(truth_sp)
                pbar.update(1)
            pbar.close()

            print('computing f1 to {}'.format(df_path))

            f1 = f1_score(
                np.array(truths).ravel(),
                np.array(truths_sp).ravel())
            data = {'f1': f1}
            df = pd.Series(data)
            df.to_csv(df_path)
        else:
            print('score file {} exists'.format(df_path))
Ejemplo n.º 7
0
def main(cfg):

    logger = logging.getLogger('comp_ksp')

    out_path = pjoin(cfg.out_path, cfg.exp_name)
    logger.info('Writing scores to: ' + out_path)

    res = np.load(os.path.join(out_path, 'results.npz'))

    dset = BaseDataset(cfg.in_path, resize_shape=512)

    truths = np.array([s['label/segmentation'] for s in dset])

    fpr, tpr, _ = roc_curve(truths.ravel(), res['ksp_scores_mat'].ravel())
    precision, recall, _ = precision_recall_curve(
        truths.ravel(), res['ksp_scores_mat'].ravel())
    f1 = f1_score(truths.ravel(), res['ksp_scores_mat'].ravel())

    data = {
        'f1_ksp': f1,
        'fpr_ksp': fpr[1],
        'tpr_ksp': tpr[1],
        'pr_ksp': precision[1],
        'rc_ksp': recall[1]
    }

    precision, recall, _ = precision_recall_curve(truths.ravel(),
                                                  res['pm_scores_mat'].ravel())
    f1 = (2 * (precision * recall) / (precision + recall)).max()
    auc_ = auc(fpr, tpr)

    data.update({'f1_pm': f1, 'auc_pm': auc_})

    df = pd.Series(data)
    df.to_csv(pjoin(out_path, 'scores.csv'))

    data = {'pr_pm': precision, 'rc_pm': recall, 'tpr_pm': tpr, 'fpr_pm': fpr}

    np.savez(pjoin(out_path, 'scores_curves.npz'), **data)
Ejemplo n.º 8
0
import numpy as np
import matplotlib.pyplot as plt
from os.path import join as pjoin
from ksptrack.utils.base_dataset import BaseDataset

root_path = '/home/ubelix/lejeune/data/medical-labeling/'
dset_dir = 'Dataset00'
desc_dir = 'precomp_desc'
frame = 91

dset = BaseDataset(pjoin(root_path, dset_dir))

labels = np.load(pjoin(root_path, dset_dir, desc_dir,
                       'sp_labels.npz'))['sp_labels']
plt.subplot(121)
plt.imshow(labels[..., frame] == 406)
plt.subplot(122)
plt.imshow(dset[frame]['image'])
plt.show()
Ejemplo n.º 9
0
    return im, labels, label


def get_hoof(df, f, l, dir_):
    df = df.loc[(df['frame'] == f) & (df['label'] == l)]['hoof_' + dir_]
    return df.to_numpy()[0]


root_path = '/home/ubelix/lejeune/data/medical-labeling/Dataset00'

dm = DataManager(root_path)
hoof_extr = HOOFExtractor(root_path, dm.desc_dir, dm.labels)
hoof_extr.make_hoof()

dset = BaseDataset(root_path)

dir_ = 'backward'
f0 = 118
f1 = f0 - 1
r, c = 200, 550

im0, labels0, l0 = make_image(dset[f0], r, c)
im1, labels1, l1 = make_image(dset[f1], r, c)
h0 = get_hoof(hoof_extr.hoof, f0, l0, dir_)
h1 = get_hoof(hoof_extr.hoof, f1, l1, dir_)

plt.subplot(221)
plt.imshow(im0)
plt.subplot(222)
plt.imshow(im1)
Ejemplo n.º 10
0
    p = params.get_params()

    p.add('--out-root', required=True)
    p.add('--in-root', required=True)
    p.add('--dsets-idx', nargs='+', required=True)

    cfg = p.parse_args()

    dset_dirs = ['Dataset' + idx for idx in cfg.dsets_idx]

    types = {'0': 'Tweezer', '1': 'Cochlea', '2': 'Slitlamp', '3': 'Brain'}
    colors = {'0': True, '1': False, '2': True, '3': False}
    idx = ['A', 'B', 'C', 'D']

    loaders = [BaseDataset(pjoin(cfg.in_root, dir_)) for dir_ in dset_dirs]

    fields = []

    for l in loaders:
        prefix = os.path.split(l.root_path)[-1][-2]
        f = {
            # 'Type': types[prefix],
            # 'Name': idx[int(os.path.split(l.root_path)[-1][-2])],
            'Height': l[0]['image'].shape[0],
            'Width': l[0]['image'].shape[1],
            'Color': colors[prefix]
        }

        fields.append(f)