예제 #1
0
def to_knossos_dataset(kd_p,
                       kd_pred_p,
                       cd_p,
                       model_p,
                       imposed_patch_size,
                       mfp_active=False):
    """

    Parameters
    ----------
    kd_p : str
    kd_pred_p : str
    cd_p : str
    model_p :
    imposed_patch_size :
    mfp_active :

    Returns
    -------

    """
    from elektronn2.neuromancer.model import modelload

    kd = KnossosDataset()
    kd.initialize_from_knossos_path(kd_p, fixed_mag=1)
    kd_pred = KnossosDataset()
    m = modelload(model_p,
                  imposed_patch_size=list(imposed_patch_size) if isinstance(
                      imposed_patch_size, tuple) else imposed_patch_size,
                  override_mfp_to_active=mfp_active,
                  imposed_batch_size=1)
    original_do_rates = m.dropout_rates
    m.dropout_rates = ([
        0.0,
    ] * len(original_do_rates))
    offset = m.target_node.shape.offsets
    offset = np.array([offset[1], offset[2], offset[0]], dtype=np.int)
    cd = ChunkDataset()
    cd.initialize(kd,
                  kd.boundary, [512, 512, 256],
                  cd_p,
                  overlap=offset,
                  box_coords=np.zeros(3),
                  fit_box_size=True)
    kd_pred.initialize_without_conf(kd_pred_p,
                                    kd.boundary,
                                    kd.scale,
                                    kd.experiment_name,
                                    mags=[1, 2, 4, 8])
    cd.export_cset_to_kd(kd_pred,
                         "pred", ["pred"], [4, 4],
                         as_raw=True,
                         stride=[256, 256, 256])
예제 #2
0
def overlaycubes2kzip(dest_p, vol, offset, kd_path):
    """
    Writes segmentation volume to kzip.

    Parameters
    ----------
    dest_p : str
        path to k.zip
    vol : np.array [X, Y, Z]
        Segmentation or prediction (uint)
    offset : np.array
    kd_path : str

    Returns
    -------
    np.array [Z, X, Y]
    """
    kd = KnossosDataset()
    kd.initialize_from_knossos_path(kd_path)
    kd.from_matrix_to_cubes(offset=offset,
                            kzip_path=dest_p,
                            mags=[1],
                            data=vol)
예제 #3
0
파일: mapping.py 프로젝트: russell0/SyConn
def map_glia_fraction(so, box_size=None, min_frag_size=10, overwrite=True):
    """
    Map glia properties within subvolume to SegmentationObject (cs). Requires
    attribute 'neuron_partners'.

    Parameters
    ----------
    so : SegmentationObject
    box_size : np.array
        size in voxels (XYZ), default: (500, 500, 250)
    min_frag_size : int
    overwrite : bool
    """
    if not overwrite:
        so.load_attr_dict()
        if "glia_vol_frac" in so.attr_dict.keys():
            return
    if box_size is None:
        box_size = np.array([300, 300, 150])
    kd = KnossosDataset()
    # TODO: Hack
    kd.initialize_from_knossos_path(
        so.working_dir + "knossosdatasets/j0126_realigned_v4b_cbs_ext0_fix/")
    bndry = np.array(kd.boundary)
    if np.any(so.rep_coord >= bndry) or np.any(
            so.rep_coord < np.zeros_like(bndry)):
        log_proc.warning(so.id, so.rep_coord)
        so.save_attributes(
            ["glia_vol_frac", "glia_sv_ids", "glia_cov_frac", "glia_cov"],
            [-1, -1, -1, -1])
        return
    c = so.rep_coord - (box_size // 2)
    c, box_size = crop_box_to_bndry(c, box_size, bndry)
    seg = kd.from_overlaycubes_to_matrix(box_size, c, show_progress=False)
    ids, cnts = np.unique(seg, return_counts=True)
    sv_ds = SegmentationDataset("sv", working_dir=so.working_dir)
    # remove small fragments, but include background label 0 in
    # cnts for proper volume estimation
    ids = ids[cnts >= min_frag_size]
    cnts = cnts[cnts >= min_frag_size]
    glia_vx = 0
    glia_sv_ids = []
    for ix, cnt in zip(ids, cnts):
        if ix == 0:  # ignore ECS
            continue
        sv = sv_ds.get_segmentation_object(ix)
        if sv.glia_pred():
            glia_vx += cnt
            glia_sv_ids.append(ix)
    nb_box_vx = np.sum(cnts)
    glia_vol_frac = glia_vx / float(nb_box_vx)

    # get glia coverage
    neuron_ids = so.attr_dict["neuron_partners"]
    sso = SuperSegmentationObject(neuron_ids[0],
                                  working_dir=so.working_dir,
                                  create=False)
    sso.load_attr_dict()
    neuron_sv_ids = list(sso.sv_ids)
    sso = SuperSegmentationObject(neuron_ids[1],
                                  working_dir=so.working_dir,
                                  create=False)
    sso.load_attr_dict()
    neuron_sv_ids += list(sso.sv_ids)
    sv_ids_in_seg = np.array([ix in ids for ix in neuron_sv_ids], dtype=bool)
    assert np.sum(sv_ids_in_seg) >= 2
    nb_cov_vx, frac_cov_vx = get_glia_coverage(seg, neuron_sv_ids, glia_sv_ids,
                                               300, kd.scale)

    so.save_attributes(
        ["glia_vol_frac", "glia_sv_ids", "glia_cov_frac", "glia_cov"],
        [glia_vol_frac, glia_sv_ids, frac_cov_vx, nb_cov_vx])
예제 #4
0
def _pred_dataset(kd_p,
                  kd_pred_p,
                  cd_p,
                  model_p,
                  imposed_patch_size=None,
                  mfp_active=False,
                  gpu_id=0,
                  overwrite=False,
                  i=None,
                  n=None):
    """
    Helper function for dataset prediction. Runs prediction on whole or partial
    knossos dataset. Imposed patch size has to be given in Z, X, Y!

    Parameters
    ----------
    kd_p : str
        path to knossos dataset .conf file
    kd_pred_p : str
        path to the knossos dataset head folder which will contain the prediction
    cd_p : str
        destination folder for chunk dataset containing prediction
    model_p : str
        path tho ELEKTRONN2 model
    imposed_patch_size : tuple or None
        patch size (Z, X, Y) of the model
    mfp_active : bool
        activate max-fragment pooling (might be necessary to change patch_size)
    gpu_id : int
        the GPU used
    overwrite : bool
        True: fresh predictions ; False: earlier prediction continues
        

    Returns
    -------

    """

    initgpu(gpu_id)
    from elektronn2.neuromancer.model import modelload
    kd = KnossosDataset()
    kd.initialize_from_knossos_path(kd_p, fixed_mag=1)

    m = modelload(model_p,
                  imposed_patch_size=list(imposed_patch_size) if isinstance(
                      imposed_patch_size, tuple) else imposed_patch_size,
                  override_mfp_to_active=mfp_active,
                  imposed_batch_size=1)
    original_do_rates = m.dropout_rates
    m.dropout_rates = ([
        0.0,
    ] * len(original_do_rates))
    offset = m.target_node.shape.offsets
    offset = np.array([offset[1], offset[2], offset[0]], dtype=np.int)
    cd = ChunkDataset()
    cd.initialize(kd,
                  kd.boundary, [512, 512, 256],
                  cd_p,
                  overlap=offset,
                  box_coords=np.zeros(3),
                  fit_box_size=True)

    ch_dc = cd.chunk_dict
    print('Total number of chunks for GPU/GPUs:', len(ch_dc.keys()))

    if i is not None and n is not None:
        chunks = ch_dc.values()[i::n]
    else:
        chunks = ch_dc.values()
    print("Starting prediction of %d chunks in gpu %d\n" %
          (len(chunks), gpu_id))

    if not overwrite:
        for chunk in chunks:
            try:
                _ = chunk.load_chunk("pred")[0]
            except Exception as e:
                chunk_pred(chunk, m)
    else:
        for chunk in chunks:
            try:
                chunk_pred(chunk, m)
            except KeyboardInterrupt as e:
                print("Exiting out from chunk prediction: ", str(e))
                return
    save_dataset(cd)

    # single gpu processing also exports the cset to kd
    if n is None:
        kd_pred = KnossosDataset()
        kd_pred.initialize_without_conf(kd_pred_p,
                                        kd.boundary,
                                        kd.scale,
                                        kd.experiment_name,
                                        mags=[1, 2, 4, 8])
        cd.export_cset_to_kd(kd_pred,
                             "pred", ["pred"], [4, 4],
                             as_raw=True,
                             stride=[256, 256, 256])
예제 #5
0
def load_gt_from_kzip(zip_fname, kd_p, raw_data_offset=75, verbose=False):
    """
    Loads ground truth from zip file, generated with Knossos. Corresponding
    dataset config file is locatet at kd_p.

    Parameters
    ----------
    zip_fname : str
    kd_p : str or List[str]
    raw_data_offset : int or np.array
        number of voxels used for additional raw offset, i.e. the offset for the
        raw data will be label_offset - raw_data_offset, while the raw data
        volume will be label_volume + 2*raw_data_offset. It will
        use 'kd.scaling' to account for dataset anisotropy if scalar or a
        list of length 3 hast to be provided for a custom x, y, z offset.
    verbose : bool

    Returns
    -------
    np.array, np.array
        raw data (float32) (multiplied with 1/255.), label data (uint16)
    """
    if type(kd_p) is str or type(kd_p) is bytes:
        kd_p = [kd_p]
    bb = parse_movement_area_from_zip(zip_fname)
    offset, size = bb[0], bb[1] - bb[0]
    raw_data = []
    label_data = []
    for curr_p in kd_p:
        kd = KnossosDataset()
        kd.initialize_from_knossos_path(curr_p)
        scaling = np.array(kd.scale, dtype=np.int)
        if np.isscalar(raw_data_offset):
            raw_data_offset = np.array(scaling[0] * raw_data_offset / scaling)
            if verbose:
                print('Using scale adapted raw offset:', raw_data_offset)
        elif len(raw_data_offset) != 3:
            raise ValueError("Offset for raw cubes has to have length 3.")
        else:
            raw_data_offset = np.array(raw_data_offset)
        raw = kd.from_raw_cubes_to_matrix(size + 2 * raw_data_offset,
                                          offset - raw_data_offset,
                                          nb_threads=2,
                                          mag=1,
                                          show_progress=False)
        raw_data.append(raw[..., None])
        label = kd.from_kzip_to_matrix(zip_fname,
                                       size,
                                       offset,
                                       mag=1,
                                       verbose=False,
                                       show_progress=False)
        label = label.astype(np.uint16)
        label_data.append(label[..., None])
    raw = np.concatenate(raw_data, axis=-1).astype(np.float32)
    label = np.concatenate(label_data, axis=-1).astype(np.uint16)
    try:
        _ = parse_cc_dict_from_kzip(zip_fname)
    except:  # mergelist.txt does not exist
        label = np.zeros(size).astype(np.uint16)
        return raw.astype(np.float32) / 255., label
    return raw.astype(np.float32) / 255., label
예제 #6
0
def make_shotgun_data_z(cube_shape, save_name, z_skip=5):
    barr_thresh = 0.7
    z_lookaround = 5
    max_footprint_S = ball_generator(9)
    max_footprint_L = ball_generator(21)
    max_footprint_L = max_footprint_L[3:-3]

    peak_thresh = 3.0
    peak_thresh_L = 9

    kds_barr = KnossosDataset()
    data_prefix = os.path.expanduser("~/lustre/sdorkenw/j0126_")
    kds_barr.initialize_from_knossos_path(data_prefix + '161012_barrier/')
    pred = kds_barr.from_raw_cubes_to_matrix(cube_shape.shape,
                                             cube_shape.offset,
                                             show_progress=False,
                                             zyx_mode=True,
                                             datatype=np.float32)
    pred /= 255

    mem_high = np.invert(pred > barr_thresh)
    seeds = []
    seed_values = []
    noise = np.random.rand(*(mem_high[0:0 + z_lookaround].shape)) * 1e-3
    running_sum = 0
    for z in range(0, mem_high.shape[0] - z_lookaround, z_skip):
        try:
            dt = ndimage.distance_transform_edt(mem_high[z:z + z_lookaround],
                                                sampling=[2, 1, 1])
            dt = ndimage.filters.gaussian_filter(dt, (1.0, 2.0, 2.0))
            dt += noise

            z_peaks_S = ndimage.maximum_filter(dt,
                                               footprint=max_footprint_S,
                                               mode='constant')
            z_peaks_L = ndimage.maximum_filter(dt,
                                               footprint=max_footprint_L,
                                               mode='constant')

            z_peaks_small = (z_peaks_S == dt) * ((peak_thresh_L > dt) &
                                                 (dt > peak_thresh))
            z_peaks_large = (z_peaks_L == dt) * ((peak_thresh_L <= dt))
            z_peaks = z_peaks_large + z_peaks_small
            z_peaks *= (pred[z:z + z_lookaround] < 0.5)
            seeds_z = np.array(z_peaks.nonzero()).T
            seeds_z[:, 0] += z
        except KeyboardInterrupt:
            break
        finally:
            seeds.append(seeds_z)
            seed_values.append(dt[z_peaks])
            running_sum += z_peaks.sum()
            print(z, running_sum, z_peaks_small.sum(), z_peaks_large.sum(),
                  z_peaks.sum())

    seeds = np.concatenate(seeds, axis=0)
    seed_values = np.concatenate(seed_values, axis=0)
    seeds, index = unique_rows(seeds)
    seed_values = seed_values[index]

    lar = np.array([4, 8, 8])
    lari = lar * [2, 1, 1]
    sz = lar * 2 + 1
    szi = lari * 2 + 1

    pred2 = kds_barr.from_raw_cubes_to_matrix(cube_shape.shape + 2 * lar,
                                              cube_shape.offset - lar,
                                              show_progress=False,
                                              zyx_mode=True,
                                              datatype=np.float32)
    pred2 /= 255

    mem_high2 = np.invert(pred2 > barr_thresh)
    dt = ndimage.distance_transform_edt(mem_high2, sampling=[2, 1, 1])
    local_grid = np.vstack([x - x.mean() for x in np.ones(szi).nonzero()])
    directions = np.zeros((len(seeds), 3))
    perm = np.random.permutation(local_grid.shape[1])[:400]
    for i, (s, v) in enumerate(zip(seeds, seed_values)):
        z, y, x = s  # np.round(s).astype(np.int)
        cavity = dt[z:z + sz[0], y:y + sz[1], x:x + sz[2]]
        cavity = ndimage.zoom(cavity, [float(sz[1]) / sz[0], 1, 1])
        s_val = dt[z + lar[0], y + lar[1], x + lar[2]]
        diff = np.abs(cavity - s_val)
        d_m = diff.mean()
        mask = (diff < d_m)
        d_max = diff[mask].max()
        um = np.zeros_like(diff)
        um[mask] = d_max - diff[mask]
        um = um.ravel()
        uu, dd, vv = np.linalg.svd((um * local_grid).T[perm])
        direc_iso = vv[0]  # take largest eigenvector
        direc_iso /= np.linalg.norm(direc_iso, axis=0)  # normalise
        directions[i] = direc_iso


#    local_grid = np.mgrid[-2:2:5j, -2:2:5j,-2:2:5j]
#    local_grid = np.vstack([g.ravel() for g in local_grid])
#    directions = np.zeros((len(seeds), 3))
#    for i, s in enumerate(seeds):
#        z, y, x = s  # np.round(s).astype(np.int)
#        cavity = pred2[z:z + 3, y:y + 5, x:x + 5]
#        cavity = ndimage.zoom(cavity, [5.0/3.0,1,1] )
#        cavity = (1 - cavity).ravel()
#        uu, dd, vv = np.linalg.svd((cavity * local_grid).T)
#        direc_iso = vv[0]  # take largest eigenvector
#        direc_iso /= np.linalg.norm(direc_iso, axis=0)  # normalise
#        directions[i] = direc_iso

    seeds += cube_shape.offset
    utils.picklesave([seeds, directions, seed_values], save_name)

    # Creater skeleton with seeds and dirs
    skel_obj = knossos_skeleton.Skeleton()
    anno = knossos_skeleton.SkeletonAnnotation()
    anno.scaling = (9.0, 9.0, 20.0)
    skel_obj.add_annotation(anno)

    def add_node(s, r=5):
        z, y, x = s
        new_node = knossos_skeleton.SkeletonNode()
        new_node.from_scratch(anno, x, y, z, radius=r)
        anno.addNode(new_node)
        return new_node

    for s, dir, v in zip(seeds, directions, seed_values):
        n = add_node(s, r=4)
        n.appendComment("%.1f" % v)
        dir = dir.copy()
        dir[0] /= 2
        n1 = add_node((s + 11 * dir), r=1)
        n2 = add_node((s - 11 * dir), r=1)
        n1.addChild(n)
        n2.addChild(n)

    return seeds, directions, seed_values, skel_obj