def _fix_over_under_segmentation_from_nuclei():
    args = parse()
    scaling = np.array(args.scaling)

    cell_seg, voxel_size = smart_load(args.seg_path, key='segmentation')
    if abs(np.prod(scaling) - 1) > 1e-8:
        print(" -scaling boundary predictions")
        cell_seg = zoom(cell_seg, scaling, order=0)
        voxel_size *= scaling
    cell_seg_shape = np.array(cell_seg.shape)

    nuclei_seg, _ = smart_load(args.nuclei_seg_path)
    nuclei_seg_shape = np.array(nuclei_seg.shape)
    if not np.allclose(nuclei_seg_shape, cell_seg_shape):
        print(
            f" -fix nuclei segmentation shape {nuclei_seg_shape} to cell segmentation size, {cell_seg_shape}"
        )
        nuclei_seg = zoom(nuclei_seg,
                          cell_seg_shape / nuclei_seg_shape,
                          order=0)

    if args.boundaries_path is not None:
        boundaries, _ = smart_load(args.boundaries_path, key='predictions')
        boundaries = boundaries[0] if boundaries.ndim == 4 else boundaries
        boundaries_shape = np.array(boundaries.shape)
        if not np.allclose(boundaries_shape, cell_seg_shape):
            # fix boundary shape if necessary
            print(
                f" -fix boundaries shape {boundaries_shape} to cell segmentation size, {cell_seg_shape}"
            )
            boundaries = zoom(boundaries,
                              cell_seg_shape / boundaries_shape,
                              order=1)

    else:
        boundaries, _ = None, None

    fix_cell_seg = fix_over_under_segmentation_from_nuclei(
        cell_seg,
        nuclei_seg,
        threshold_merge=args.t_merge,
        threshold_split=args.t_split,
        quantiles_nuclei=(args.quantiles[0], args.quantiles[1]),
        boundary=boundaries)

    base, ext = os.path.splitext(args.seg_path)
    out_path = f'{base}_nuclei_fixed'
    if args.export_h5:
        out_path += ".h5"
        create_h5(out_path,
                  fix_cell_seg,
                  key='segmentation',
                  voxel_size=voxel_size)
    else:
        out_path += ".tiff"
        create_tiff(out_path, fix_cell_seg, voxel_size=voxel_size)
Exemple #2
0
def _seeded_ws_from_nuclei_seg():
    args = parse()
    scaling_pre = np.array(args.scaling)

    boundaries, voxel_size = smart_load(args.boundaries_path, key='predictions')
    assert boundaries.ndim in {3, 4}
    boundaries = boundaries[0] if boundaries.ndim == 4 else boundaries

    if abs(np.prod(scaling_pre) - 1) > 1e-8:
        print(" -scaling boundary predictions")
        boundaries = zoom(boundaries, scaling_pre, order=1)
        voxel_size *= scaling_pre
    boundaries_shape = np.array(boundaries.shape)

    nuclei_seg, _ = smart_load(args.nuclei_path)
    assert nuclei_seg.ndim in {3, 4}
    nuclei_seg = nuclei_seg[0] if nuclei_seg.ndim == 4 else nuclei_seg
    nuclei_pmap_shape = np.array(nuclei_seg.shape)

    if not np.allclose(nuclei_pmap_shape, boundaries_shape):
        print(f" -fix nuclei predictions shape {nuclei_pmap_shape} to boundary predictions size, {boundaries_shape}")
        nuclei_seg = zoom(nuclei_seg, boundaries_shape / nuclei_pmap_shape, order=0)

    boundaries = boundaries.astype(np.float32)
    boundaries = boundaries / np.max(boundaries)

    nuclei_seg = nuclei_seg.astype('uint32')
    cell_seg, _ = seeded_dt_ws(boundaries, 0.5, nuclei_seg)
    nuclei_seg = nuclei_seg.astype('uint16')
    cell_seg = cell_seg.astype('uint16')

    base, _ = os.path.splitext(args.boundaries_path)
    filename = base.split('/')[-1]
    base_dir, _ = os.path.split(base)

    res_dir = f'{base_dir}/seeded_ws/'
    print(f" -preparing all results in {res_dir}")
    os.makedirs(res_dir, exist_ok=True)
    boundaries = boundaries[None, ...] if boundaries.ndim == 3 else boundaries
    boundaries_path = f'{res_dir}/{filename}_boundaries_predictions.h5'
    create_h5(boundaries_path, boundaries, key='predictions', voxel_size=voxel_size, mode='w')

    nuclei_path = f'{res_dir}/{filename}_nuclei_predictions.h5'
    create_h5(nuclei_path, nuclei_seg, key='segmentation', voxel_size=voxel_size, mode='w')

    cell_path = f'{res_dir}/{filename}_seeded_ws_cell_segmentation.h5'
    create_h5(cell_path, cell_seg, key='segmentation', voxel_size=voxel_size, mode='w')
def _fix_mgx_seg_export():
    args = parse()
    segmentation, voxel_size = smart_load(args.seg_path, key=args.seg_dataset)
    segmentation = segmentation[:, ::-1]

    out_path = os.path.splitext(args.seg_path)[0]
    if args.export_h5:
        out_path += ".h5"
        create_h5(out_path,
                  segmentation,
                  key=args.seg_dataset,
                  voxel_size=voxel_size)
    else:
        out_path += ".tiff"
        create_tiff(out_path, segmentation, voxel_size=voxel_size)
Exemple #4
0
def main():
    args = parse()
    seg_array, _ = smart_load(args.path_seg, key=args.dataset_seg)
    gt_array, _ = smart_load(args.path_gt, key=args.dataset_gt)
    _ = run_evaluation(gt_array, seg_array, remove_background=True)
Exemple #5
0
    out_path = os.path.join(base, f'{name}{ext}')
    return out_path


if __name__ == '__main__':
    args = parse()
    path = args.path
    base = args.new_base
    to_tiff = args.to_tiff
    to_h5 = args.to_h5
    h5_dataset = args.h5_dataset
    crop = args.crop

    paths = glob.glob(path)
    for _path in paths:
        stack, voxel_size = smart_load(_path, key=h5_dataset)
        stack = crop_image(stack, start=crop[:3], end=crop[3:])
        if to_h5:
            out_path = format_out_name(path, base, ext='.h5')
            create_h5(out_path, stack, h5_dataset, voxel_size)

        elif to_tiff:
            out_path = format_out_name(path, base, ext='.tiff')
            create_tiff(out_path, stack, voxel_size)

        else:
            print(
                'please specify in which direction do you want to convert one by using the flag to_tiff or to_h5'
            )
            raise NotImplementedError
def main(args=None):

    if args is None:
        args = parse()

    # copy stain
    raw, voxel_size = smart_load(args.stain)
    raw_shape = raw.shape
    create_h5(args.out,
              raw.astype('float32'),
              'nuclei_stain',
              voxel_size=voxel_size,
              mode='w')

    if args.reporter is not None:
        stack, _ = smart_load(args.reporter)
        scale = np.array(raw_shape) / np.array(stack.shape)
        if np.prod(scale) != 1.:
            stack = zoom(stack, scale, order=0)
        create_h5(args.out,
                  stack.astype('float32'),
                  'nuclei_reporter',
                  voxel_size=voxel_size,
                  mode='a')
        stack = np.stack([raw, stack], axis=3)
        create_h5(args.out,
                  stack.astype('float32'),
                  'raw_merged',
                  voxel_size=voxel_size,
                  mode='a')

    if args.seg is not None:
        stack, _ = smart_load(args.seg)

        if args.flip:
            stack = stack[:, ::-1, :]

        scale = np.array(raw_shape) / np.array(stack.shape)
        if np.prod(scale) != 1.:
            stack = zoom(stack, scale, order=0)

        boundary = find_boundaries(stack)
        stack[boundary] = 0

        mask = stack != 0

        create_h5(args.out,
                  stack.astype('uint32'),
                  'segmentation',
                  voxel_size=voxel_size,
                  mode='a')
        create_h5(args.out,
                  mask.astype('uint32'),
                  'segmentation_mask',
                  voxel_size=voxel_size,
                  mode='a')

        if args.csv is not None:
            csv_ids, csv_labels = import_labels_csv(args.csv)
            atlas = map_cell_features2segmentation(stack, csv_ids, csv_labels)
            create_h5(args.out,
                      atlas.astype('uint32'),
                      'atlas',
                      voxel_size=voxel_size,
                      mode='a')
def _prepare_stack_for_lmc():
    args = parse()
    assert args.seg_mode != args.seg2pmap, "seg-mode and seg2pmaps are incompatible"
    scaling_pre = np.array(args.scaling)

    boundaries, voxel_size = smart_load(args.boundaries_path, key='predictions')
    assert boundaries.ndim in {3, 4}
    boundaries = boundaries[0] if boundaries.ndim == 4 else boundaries

    if abs(np.prod(scaling_pre) - 1) > 1e-8:
        print(" -scaling boundary predictions")
        boundaries = zoom(boundaries, scaling_pre, order=1)
        voxel_size *= scaling_pre
    boundaries_shape = np.array(boundaries.shape)

    nuclei_pmap, _ = smart_load(args.nuclei_path)
    assert nuclei_pmap.ndim in {3, 4}
    nuclei_pmap = nuclei_pmap[0] if nuclei_pmap.ndim == 4 else nuclei_pmap
    nuclei_pmap_shape = np.array(nuclei_pmap.shape)

    if not np.allclose(nuclei_pmap_shape, boundaries_shape):
        print(f" -fix nuclei predictions shape {nuclei_pmap_shape} to boundary predictions size, {boundaries_shape}")
        order = 0 if args.seg2pmap or args.seg_mode else 1
        nuclei_pmap = zoom(nuclei_pmap, boundaries_shape / nuclei_pmap_shape, order=order)

    if args.seg2pmap:
        print(' -transforming nuclei segmentation in pmaps')
        nuclei_pmap = seg2pmap(nuclei_pmap)

    boundaries = boundaries.astype(np.float32)
    boundaries = boundaries / np.max(boundaries)
    boundaries = boundaries[None, ...] if boundaries.ndim == 3 else boundaries

    if args.seg_mode:
        nuclei_pmap = nuclei_pmap.astype('uint16')
        nuclei_key = 'segmentation'
    else:
        nuclei_pmap = nuclei_pmap.astype(np.float32)
        nuclei_pmap = nuclei_pmap / np.max(nuclei_pmap)
        nuclei_pmap = nuclei_pmap[None, ...] if nuclei_pmap.ndim == 3 else nuclei_pmap
        nuclei_key = 'predictions'
    nuclei_pmap = nuclei_pmap[None, ...] if nuclei_pmap.ndim == 3 else nuclei_pmap
    base, _ = os.path.splitext(args.boundaries_path)
    filename = base.split('/')[-1]
    base_dir, _ = os.path.split(base)

    res_dir = f'{base_dir}/lmc_base/'
    print(f" -preparing all results in {res_dir}")
    os.makedirs(res_dir, exist_ok=True)

    boundaries_path = f'{res_dir}/{filename}_boundaries_predictions.h5'
    create_h5(boundaries_path, boundaries, key='predictions', voxel_size=voxel_size, mode='w')

    nuclei_path = f'{res_dir}/{filename}_nuclei_predictions.h5'
    create_h5(nuclei_path, nuclei_pmap, key=nuclei_key, voxel_size=voxel_size, mode='w')

    config = load_config()
    config['path'] = boundaries_path
    config['segmentation']['nuclei_predictions_path'] = nuclei_path
    config['segmentation']['is_segmentation'] = args.seg_mode
    out_config = f'{res_dir}/config_lmc.yaml'
    with open(out_config, 'w') as f:
        yaml.dump(config, f)