Example #1
0
def prepare_table(make_full_table=False):
    path = '/g/rompani/lgn-em-datasets/data/0.0.0/images/local/sbem-adult-1-lgn-boutons.n5'
    bb = get_bounding_box(scale=1)

    # this might be an issue if we get too large halos
    f = z5py.File(path, 'r')
    ds = f['setup0/timepoint0/s0']
    ds.n_threads = 8
    seg = ds[bb]

    bouton_ids = np.unique(seg)[1:]

    # make the annotation table
    bouton_table_dir = '/g/rompani/lgn-em-datasets/data/0.0.0/tables/sbem-adult-1-lgn-boutons'
    if make_full_table:
        default_table = os.path.join(bouton_table_dir, 'default.csv')
        default_table = pd.read_csv(default_table, sep='\t')

        label_ids = default_table['label_id'].values

        n_labels = len(label_ids)
        annotations = np.array(n_labels * ["None"])
        annotations[bouton_ids] = "unlabeled"

    else:
        label_ids = bouton_ids
        annotations = np.array(len(label_ids) * ["unlabeled"])

    out_table = np.concatenate([label_ids[:, None], annotations[:, None]],
                               axis=1)
    out_table = pd.DataFrame(out_table, columns=['label_id', 'annotation'])

    out_table_path = os.path.join(bouton_table_dir,
                                  'bouton_annotations_v1.csv')
    out_table.to_csv(out_table_path, sep='\t', index=False)
def label_vesicles():

    # TODO bounding box is too aggressive, but predictions is
    # also not good enough yet, need better training data
    threshold = 250

    path = './data.n5'
    with z5py.File(path, 'a') as f:
        ds = f['vesicles']
        ds = ThresholdWrapper(ds, threshold)

        out_key = 'segmentation/vesicles'
        ds_out = f.require_dataset(out_key,
                                   shape=ds.shape,
                                   compression='gzip',
                                   dtype='uint64',
                                   chunks=ds.chunks)

        bb = get_bounding_box(intersect_with_blocking=True)
        label(ds,
              ds_out,
              with_background=True,
              roi=bb,
              verbose=True,
              n_threads=32)
def write_proofread_boutons(annotation_table, keep_annotations):
    out_key = 'boutons_proofread'
    f_out = z5py.File(PATH, 'a')
    if out_key in f_out:
        return

    bouton_ids, annotations = load_bouton_annotations(annotation_table)
    bouton_ids = bouton_ids[np.isin(annotations,
                                    keep_annotations)].astype('uint64')

    bb = get_bounding_box(scale=1, intersect_with_blocking=True)

    path = '/g/rompani/lgn-em-datasets/data/0.0.0/images/local/sbem-adult-1-lgn-boutons.n5'
    f = z5py.File(path, 'r')
    ds = f['setup0/timepoint0/s0']
    ds.n_threads = 16

    # this might not work for larger cututouts and we need to use cluster tools
    seg = ds[bb]
    seg[~np.isin(seg, bouton_ids)] = 0

    ds_out = f_out.create_dataset(out_key,
                                  shape=ds.shape,
                                  chunks=ds.chunks,
                                  compression='gzip',
                                  dtype=seg.dtype)
    ds_out[bb] = seg
Example #4
0
def segment_boutons():
    path = './data.n5'
    with z5py.File(path, 'a') as f:
        seg_neurons = f['segmentation/multicut']
        seg_ves = f['segmentation/vesicles']

        ds_out = f.require_dataset('segmentation/boutons',
                                   shape=seg_ves.shape,
                                   chunks=tuple(BLOCK_SHAPE),
                                   dtype='uint64',
                                   compression='gzip')

        roi_begin, roi_end = get_bounding_box(intersect_with_blocking=True,
                                              return_as_lists=True)
        blocking = nt.blocking(roi_begin, roi_end, BLOCK_SHAPE)
        n_blocks = blocking.numberOfBlocks

        n_threads = 32
        func = partial(intersect_segmentation,
                       seg_a=seg_neurons,
                       seg_b=seg_ves,
                       seg_out=ds_out,
                       blocking=blocking)

        with futures.ThreadPoolExecutor(n_threads) as tp:
            list(tqdm(tp.map(func, range(n_blocks)), total=n_blocks))
Example #5
0
def check_mc():
    import napari
    bb = get_bounding_box(scale=0)

    path = '/g/rompani/lgn-em-datasets/data/0.0.0/images/local/sbem-adult-1-lgn-raw.n5'
    f = z5py.File(path, 'r')
    ds = f['setup0/timepoint0/s0']
    ds.n_threads = 8
    raw = ds[bb]

    path = './data.n5'
    f = z5py.File(path, 'r')
    ds = f['segmentation/multicut']
    ds.n_threads = 8
    seg = ds[bb]

    # f = z5py.File(path, 'r')
    # ds = f['boutons_proofread']
    # ds.n_threads = 8
    # bb_boutons = get_bounding_box(scale=1)
    # boutons = ds[bb_boutons]

    with napari.gui_qt():
        viewer = napari.Viewer()
        viewer.add_image(raw)
        viewer.add_labels(seg)
def predict_boundaries(target, gpus, threads_per_job=6):
    task = InferenceLocal if target == 'local' else InferenceSlurm
    halo = [8, 64, 64]

    output_key = {'affinities': [0, 3]}

    tmp_folder = './tmp_prediction'
    config_dir = os.path.join(tmp_folder, 'configs')
    os.makedirs(config_dir, exist_ok=True)

    roi_begin, roi_end = get_bounding_box(return_as_lists=True)
    conf = task.default_global_config()
    conf.update({
        'block_shape': BLOCK_SHAPE,
        'roi_begin': roi_begin,
        'roi_end': roi_end
    })
    with open(os.path.join(config_dir, 'global.config'), 'w') as f:
        json.dump(conf, f)

    if target == 'local':
        device_mapping = {ii: gpu for ii, gpu in enumerate(gpus)}
    else:
        device_mapping = None

    conf = task.default_task_config()
    conf.update({
        'dtype': 'uint8',
        'device_mapping': device_mapping,
        'threads_per_job': threads_per_job
    })
    with open(os.path.join(config_dir, 'inference.config'), 'w') as f:
        json.dump(conf, f)

    input_path = '/g/rompani/lgn-em-datasets/data/0.0.0/images/local/sbem-adult-1-lgn-raw.n5'
    input_key = 'setup0/timepoint0/s0'

    # TODO for larger outputs we should put this on scratch
    output_path = './data.n5'

    t = task(tmp_folder=tmp_folder,
             config_dir=config_dir,
             max_jobs=len(gpus),
             input_path=input_path,
             input_key=input_key,
             output_path=output_path,
             output_key=output_key,
             checkpoint_path=MODEL_PATH,
             halo=halo,
             framework='inferno')
    assert luigi.build([t], local_scheduler=True)
def _make_global_config(configs):
    os.makedirs(CONFIG_FOLDER, exist_ok=True)

    roi_begin, roi_end = get_bounding_box(return_as_lists=True)

    conf = configs['global']
    conf.update({
        'block_shape': BLOCK_SHAPE,
        'roi_begin': roi_begin,
        'roi_end': roi_end
    })

    with open(os.path.join(CONFIG_FOLDER, 'global.config'), 'w') as f:
        json.dump(conf, f)
Example #8
0
def check_boutons():
    import napari
    bb = get_bounding_box(scale=1)

    path = '/g/rompani/lgn-em-datasets/data/0.0.0/images/local/sbem-adult-1-lgn-boutons.n5'
    f = z5py.File(path, 'r')
    ds = f['setup0/timepoint0/s0']
    ds.n_threads = 8
    seg = ds[bb]

    path = '/g/rompani/lgn-em-datasets/data/0.0.0/images/local/sbem-adult-1-lgn-raw.n5'
    f = z5py.File(path, 'r')
    ds = f['setup0/timepoint0/s1']
    ds.n_threads = 8
    raw = ds[bb]

    with napari.gui_qt():
        viewer = napari.Viewer()
        viewer.add_image(raw)
        viewer.add_labels(seg)
def check_predictions():
    import napari
    bb = get_bounding_box(scale=0)

    path = '/g/rompani/lgn-em-datasets/data/0.0.0/images/local/sbem-adult-1-lgn-raw.n5'
    f = z5py.File(path, 'r')
    ds = f['setup0/timepoint0/s0']
    ds.n_threads = 8
    raw = ds[bb]

    path = './data.n5'
    f = z5py.File(path, 'r')
    ds = f['vesicles']
    ds.n_threads = 8
    pred = ds[bb]

    with napari.gui_qt():
        viewer = napari.Viewer()
        viewer.add_image(raw)
        viewer.add_image(pred)