Exemplo n.º 1
0
    def execute(self):
        vol = CloudVolume(
            self.cloudpath,
            mip=self.mip,
            info=self.info,
            cdn_cache=False,
            parallel=self.parallel,
            fill_missing=self.fill_missing,
        )
        bbox = Bbox.clamp(self.bounds, vol.bounds)
        index_bbox = Bbox.clamp(self.index_bounds, vol.bounds)

        path = skeldir(self.cloudpath)
        path = os.path.join(self.cloudpath, path)

        all_labels = vol[bbox.to_slices()]
        all_labels = all_labels[:, :, :, 0]

        if self.mask_ids:
            all_labels = fastremap.mask(all_labels, self.mask_ids)

        extra_targets_after = {}
        if self.synapses:
            extra_targets_after = kimimaro.synapses_to_targets(
                all_labels, self.synapses)

        skeletons = kimimaro.skeletonize(
            all_labels,
            self.teasar_params,
            object_ids=self.object_ids,
            anisotropy=vol.resolution,
            dust_threshold=self.dust_threshold,
            progress=self.progress,
            fix_branching=self.fix_branching,
            fix_borders=self.fix_borders,
            fix_avocados=self.fix_avocados,
            parallel=self.parallel,
            extra_targets_after=extra_targets_after.keys(),
        )

        for segid, skel in six.iteritems(skeletons):
            skel.vertices[:] += bbox.minpt * vol.resolution

        if self.synapses:
            for segid, skel in six.iteritems(skeletons):
                terminal_nodes = skel.vertices[skel.terminals()]

                for i, vert in enumerate(terminal_nodes):
                    vert = vert / vol.resolution - self.bounds.minpt
                    vert = tuple(np.round(vert).astype(int))
                    if vert in extra_targets_after.keys():
                        skel.vertex_types[i] = extra_targets_after[vert]

        if self.sharded:
            self.upload_batch(vol, path, index_bbox, skeletons)
        else:
            self.upload_individuals(vol, path, bbox, skeletons)

        if self.spatial_index:
            self.upload_spatial_index(vol, path, index_bbox, skeletons)
Exemplo n.º 2
0
  def execute(self):
    vol = CloudVolume(
      self.cloudpath, mip=self.mip, 
      info=self.info, cdn_cache=False,
      parallel=self.parallel
    )
    bbox = Bbox.clamp(self.bounds, vol.bounds)

    path = skeldir(self.cloudpath)
    path = os.path.join(self.cloudpath, path)

    all_labels = vol[ bbox.to_slices() ]
    all_labels = all_labels[:,:,:,0]

    if self.mask_ids:
      all_labels = fastremap.mask(all_labels, self.mask_ids)

    skeletons = kimimaro.skeletonize(
      all_labels, self.teasar_params, 
      object_ids=self.object_ids, anisotropy=vol.resolution,
      dust_threshold=self.dust_threshold, cc_safety_factor=0.25,
      progress=self.progress, 
      fix_branching=self.fix_branching,
      fix_borders=self.fix_borders,
      parallel=self.parallel,
    )

    for segid, skel in six.iteritems(skeletons):
      skel.vertices[:] += bbox.minpt * vol.resolution
      
    self.upload(vol, path, bbox, skeletons.values())
Exemplo n.º 3
0
 def mask_fragments(self, voxel_num_threshold: int):
     uniq, counts = fastremap.unique(self.array, return_counts=True)
     fragment_ids = uniq[counts <= voxel_num_threshold]
     logging.info(
         f'masking out {len(fragment_ids)} fragments in {len(uniq)} with a percentage of {len(fragment_ids)/len(uniq)}'
     )
     self.array = fastremap.mask(self.array, fragment_ids)
Exemplo n.º 4
0
  def _remove_dust(self, data, dust_threshold):
    if dust_threshold:
      segids, pxct = fastremap.unique(data, return_counts=True)
      dust_segids = [ sid for sid, ct in zip(segids, pxct) if ct < int(dust_threshold) ]
      data = fastremap.mask(data, dust_segids, in_place=True)

    return data
Exemplo n.º 5
0
def test_mask():
    for dtype in DTYPES:
        for in_place in (True, False):
            print(dtype)
            data = np.arange(100, dtype=dtype)
            data = fastremap.mask(data, [5, 10, 15, 20], in_place=in_place)

            labels, cts = np.unique(data, return_counts=True)
            assert cts[0] == 5
            assert labels[0] == 0
            assert np.all(cts[1:] == 1)
            assert len(labels == 95)
Exemplo n.º 6
0
    def _remove_dust(self, seg: np.ndarray):
        """
        this function is adopted from igneous.
        """
        if self.verbose:
            print('remove dust segments')

        if self.dust_size_threshold or self.ids:
            segids, voxel_nums = np.unique(seg, return_counts=True)
            dust_segids = [sid for sid, ct in
                           zip(segids, voxel_nums) 
                           if ct < self.dust_size_threshold]

            seg = fastremap.mask(seg, dust_segids, in_place=True)
        return seg
Exemplo n.º 7
0
def get_masks(p, iscell=None, rpad=20, flows=None, threshold=0.4, use_gpu=False, device=None):
    """ create masks using pixel convergence after running dynamics
    
    Makes a histogram of final pixel locations p, initializes masks 
    at peaks of histogram and extends the masks from the peaks so that
    they include all pixels with more than 2 final pixels p. Discards 
    masks with flow errors greater than the threshold. 
    Parameters
    ----------------
    p: float32, 3D or 4D array
        final locations of each pixel after dynamics,
        size [axis x Ly x Lx] or [axis x Lz x Ly x Lx].
    iscell: bool, 2D or 3D array
        if iscell is not None, set pixels that are 
        iscell False to stay in their original location.
    rpad: int (optional, default 20)
        histogram edge padding
    threshold: float (optional, default 0.4)
        masks with flow error greater than threshold are discarded 
        (if flows is not None)
    flows: float, 3D or 4D array (optional, default None)
        flows [axis x Ly x Lx] or [axis x Lz x Ly x Lx]. If flows
        is not None, then masks with inconsistent flows are removed using 
        `remove_bad_flow_masks`.
    Returns
    ---------------
    M0: int, 2D or 3D array
        masks with inconsistent flow masks removed, 
        0=NO masks; 1,2,...=mask labels,
        size [Ly x Lx] or [Lz x Ly x Lx]
    
    """
    
    pflows = []
    edges = []
    shape0 = p.shape[1:]
    dims = len(p)
    if iscell is not None:
        if dims==3:
            inds = np.meshgrid(np.arange(shape0[0]), np.arange(shape0[1]),
                np.arange(shape0[2]), indexing='ij')
        elif dims==2:
            inds = np.meshgrid(np.arange(shape0[0]), np.arange(shape0[1]),
                     indexing='ij')
        for i in range(dims):
            p[i, ~iscell] = inds[i][~iscell]

    for i in range(dims):
        pflows.append(p[i].flatten().astype('int32'))
        edges.append(np.arange(-.5-rpad, shape0[i]+.5+rpad, 1))

    h,_ = np.histogramdd(tuple(pflows), bins=edges)
    hmax = h.copy()
    for i in range(dims):
        hmax = maximum_filter1d(hmax, 5, axis=i)

    seeds = np.nonzero(np.logical_and(h-hmax>-1e-6, h>10))
    Nmax = h[seeds]
    isort = np.argsort(Nmax)[::-1]
    for s in seeds:
        s = s[isort]

    pix = list(np.array(seeds).T)

    shape = h.shape
    if dims==3:
        expand = np.nonzero(np.ones((3,3,3)))
    else:
        expand = np.nonzero(np.ones((3,3)))
    for e in expand:
        e = np.expand_dims(e,1)

    for iter in range(5):
        for k in range(len(pix)):
            if iter==0:
                pix[k] = list(pix[k])
            newpix = []
            iin = []
            for i,e in enumerate(expand):
                epix = e[:,np.newaxis] + np.expand_dims(pix[k][i], 0) - 1
                epix = epix.flatten()
                iin.append(np.logical_and(epix>=0, epix<shape[i]))
                newpix.append(epix)
            iin = np.all(tuple(iin), axis=0)
            for p in newpix:
                p = p[iin]
            newpix = tuple(newpix)
            igood = h[newpix]>2
            for i in range(dims):
                pix[k][i] = newpix[i][igood]
            if iter==4:
                pix[k] = tuple(pix[k])
    
    M = np.zeros(h.shape, np.uint32)
    for k in range(len(pix)):
        M[pix[k]] = 1+k
        
    for i in range(dims):
        pflows[i] = pflows[i] + rpad
    M0 = M[tuple(pflows)]

    # remove big masks
    uniq, counts = fastremap.unique(M0, return_counts=True)
    big = np.prod(shape0) * 0.4
    bigc = uniq[counts > big]
    if len(bigc) > 0 and (len(bigc)>1 or bigc[0]!=0):
        M0 = fastremap.mask(M0, bigc)
    fastremap.renumber(M0, in_place=True) #convenient to guarantee non-skipped labels
    M0 = np.reshape(M0, shape0)
    return M0