def __call__(self, prefix: str):
        logging.info(f'aggregate skeletons with prefix of {prefix}')

        id2filenames = defaultdict(list)
        for filename in self.fragments_storage.list_files(prefix=prefix):
            filename = os.path.basename(filename)
            # `match` implies the beginning (^). `search` matches whole string
            matches = re.search(r'(\d+):', filename)

            if not matches:
                continue

            # skeleton ID
            skl_id = int(matches.group(0)[:-1])
            id2filenames[skl_id].append(filename)

        for skl_id, filenames in id2filenames.items():
            logging.info(f'skeleton id: {skl_id}')
            frags = self.fragments_storage.get(filenames)
            frags = [
                PrecomputedSkeleton.from_precomputed(x['content'])
                for x in frags
            ]
            skel = PrecomputedSkeleton.simple_merge(frags).consolidate()
            skel = kimimaro.postprocess(skel,
                                        dust_threshold=1000,
                                        tick_threshold=3500)
            self.output_storage.put(
                file_path=str(skl_id),
                content=skel.to_precomputed(),
            )
            # the last few hundred files will not be uploaded without sleeping!
            sleep(0.01)
Пример #2
0
def test_caching():
    vol = CloudVolume('file:///tmp/cloudvolume/test-skeletons',
                      info=info,
                      cache=True)

    vol.cache.flush()

    skel = PrecomputedSkeleton(
        [
            (0, 0, 0),
            (1, 0, 0),
            (2, 0, 0),
            (0, 1, 0),
            (0, 2, 0),
            (0, 3, 0),
        ],
        edges=[(0, 1), (1, 2), (3, 4), (4, 5), (3, 5)],
        segid=666,
    )

    vol.skeleton.upload(skel)

    assert vol.cache.list_skeletons() == ['666.gz']

    skel.id = 1
    with open(os.path.join(vol.cache.path, 'skeletons/1'), 'wb') as f:
        f.write(skel.encode())

    cached_skel = vol.skeleton.get(1)

    assert cached_skel == skel

    vol.cache.flush()
Пример #3
0
def test_consolidate():
    skel = PrecomputedSkeleton(
        vertices=np.array([
            (0, 0, 0),
            (1, 0, 0),
            (2, 0, 0),
            (0, 0, 0),
            (2, 1, 0),
            (2, 2, 0),
            (2, 2, 1),
            (2, 2, 2),
        ],
                          dtype=np.float32),
        edges=np.array([
            [0, 1],
            [1, 2],
            [2, 3],
            [3, 4],
            [4, 5],
            [5, 6],
            [6, 7],
        ],
                       dtype=np.uint32),
        radii=np.array([0, 1, 2, 3, 4, 5, 6, 7], dtype=np.float32),
        vertex_types=np.array([0, 1, 2, 3, 4, 5, 6, 7], dtype=np.uint8),
    )

    correct_skel = PrecomputedSkeleton(
        vertices=np.array([
            (0, 0, 0),
            (1, 0, 0),
            (2, 0, 0),
            (2, 1, 0),
            (2, 2, 0),
            (2, 2, 1),
            (2, 2, 2),
        ],
                          dtype=np.float32),
        edges=np.array([
            [0, 1],
            [0, 2],
            [0, 3],
            [1, 2],
            [3, 4],
            [4, 5],
            [5, 6],
        ],
                       dtype=np.uint32),
        radii=np.array([0, 1, 2, 4, 5, 6, 7], dtype=np.float32),
        vertex_types=np.array([0, 1, 2, 4, 5, 6, 7], dtype=np.uint8),
    )

    consolidated = skel.consolidate()

    assert np.all(consolidated.vertices == correct_skel.vertices)
    assert np.all(consolidated.edges == correct_skel.edges)
    assert np.all(consolidated.radii == correct_skel.radii)
    assert np.all(consolidated.vertex_types == correct_skel.vertex_types)
Пример #4
0
def test_cable_length():
  skel = PrecomputedSkeleton([ 
      (0,0,0), (1,0,0), (2,0,0), (3,0,0), (4,0,0), (5,0,0)
    ], 
    edges=[ (1,0), (1,2), (2,3), (3,4), (5,4) ],
    radii=[ 1, 2, 3, 4, 5, 6 ],
    vertex_types=[1, 2, 3, 4, 5, 6]
  )

  assert skel.cable_length() == (skel.vertices.shape[0] - 1)

  skel = PrecomputedSkeleton([ 
      (2,0,0), (1,0,0), (0,0,0), (0,5,0), (0,6,0), (0,7,0)
    ], 
    edges=[ (1,0), (1,2), (2,3), (3,4), (5,4) ],
    radii=[ 1, 2, 3, 4, 5, 6 ],
    vertex_types=[1, 2, 3, 4, 5, 6]
  )
  assert skel.cable_length() == 9

  skel = PrecomputedSkeleton([ 
      (1,1,1), (0,0,0), (1,0,0)
    ], 
    edges=[ (1,0), (1,2) ],
    radii=[ 1, 2, 3],
    vertex_types=[1, 2, 3]
  )
  assert abs(skel.cable_length() - (math.sqrt(3) + 1)) < 1e-6
Пример #5
0
def test_read_swc():

  # From http://research.mssm.edu/cnic/swc.html
  test_file = """# ORIGINAL_SOURCE NeuronStudio 0.8.80
# CREATURE
# REGION
# FIELD/LAYER
# TYPE
# CONTRIBUTOR
# REFERENCE
# RAW
# EXTRAS
# SOMA_AREA
# SHINKAGE_CORRECTION 1.0 1.0 1.0
# VERSION_NUMBER 1.0
# VERSION_DATE 2007-07-24
# SCALE 1.0 1.0 1.0
1 1 14.566132 34.873772 7.857000 0.717830 -1
2 0 16.022520 33.760513 7.047000 0.463378 1
3 5 17.542000 32.604973 6.885001 0.638007 2
4 0 19.163984 32.022469 5.913000 0.602284 3
5 0 20.448090 30.822802 4.860000 0.436025 4
6 6 21.897903 28.881084 3.402000 0.471886 5
7 0 18.461960 30.289471 8.586000 0.447463 3
8 6 19.420759 28.730757 9.558000 0.496217 7"""

  skel = PrecomputedSkeleton.from_swc(test_file)
  assert skel.vertices.shape[0] == 8
  assert skel.edges.shape[0] == 7

  skel_gt = PrecomputedSkeleton(
    vertices=[
      [14.566132, 34.873772, 7.857000],
      [16.022520, 33.760513, 7.047000],
      [17.542000, 32.604973, 6.885001],
      [19.163984, 32.022469, 5.913000],
      [20.448090, 30.822802, 4.860000],
      [21.897903, 28.881084, 3.402000],
      [18.461960, 30.289471, 8.586000],
      [19.420759, 28.730757, 9.558000]
    ],
    edges=[ (0,1), (1,2), (2,3), (3,4), (4,5), (2,6), (7,6) ],
    radii=[ 
      0.717830, 0.463378, 0.638007, 0.602284, 
      0.436025, 0.471886, 0.447463, 0.496217
    ],
    vertex_types=[
      1, 0, 5, 0, 0, 6, 0, 6
    ],
  )

  assert PrecomputedSkeleton.equivalent(skel, skel_gt)
Пример #6
0
def test_downsample_joints():
  skel = PrecomputedSkeleton([ 
      
                        (2, 3,0), # 0
                        (2, 2,0), # 1
                        (2, 1,0), # 2
      (0,0,0), (1,0,0), (2, 0,0), (3,0,0), (4,0,0), # 3, 4, 5, 6, 7
                        (2,-1,0), # 8
                        (2,-2,0), # 9
                        (2,-3,0), # 10

    ], 
    edges=[ 
                  (0, 1),
                  (1, 2),
                  (2, 5),
        (3,4), (4,5), (5, 6), (6,7),
                  (5, 8),
                  (8, 9),
                  (9,10)
    ],
    radii=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ],
    vertex_types=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ],
    segid=1337,
  )

  ds_skel = skel.downsample(2)
  ds_skel_gt = PrecomputedSkeleton([ 

                        (2, 3,0), # 0
                        
                        (2, 2,0), # 1
      (0,0,0),          (2, 0,0),     (4,0,0), # 2, 3, 4

                        (2,-2,0), # 5                        
                        (2,-3,0), # 6

    ], 
    edges=[ 
                  (0,1),
                  (1,3),
              (2,3),  (3,4), 
                  (3,5),
                  (5,6)
    ],
    radii=[ 0, 1, 3, 5, 7, 9, 10 ],
    vertex_types=[ 0, 1, 3, 5, 7, 9, 10 ],
    segid=1337,
  )

  assert PrecomputedSkeleton.equivalent(ds_skel, ds_skel_gt)
Пример #7
0
  def fuse_skeletons(self, skels):
    if len(skels) == 0:
      return PrecomputedSkeleton()

    bbxs = [ item[0] for item in skels ]
    skeletons = [ item[1] for item in skels ]

    skeletons = self.crop_skels(bbxs, skeletons)
    skeletons = [ s for s in skeletons if not s.empty() ]

    if len(skeletons) == 0:
      return PrecomputedSkeleton()

    return PrecomputedSkeleton.simple_merge(skeletons).consolidate()
Пример #8
0
    def process_skeletons(self, unfused_skeletons, in_place=False):
        skeletons = {}
        if in_place:
            skeletons = unfused_skeletons

        for label in tqdm(unfused_skeletons.keys(),
                          desc="Postprocessing",
                          disable=(not self.progress)):
            skels = unfused_skeletons[label]
            skel = PrecomputedSkeleton.simple_merge(skels)
            skel.id = label
            skel.extra_attributes = [
              attr for attr in skel.extra_attributes \
              if attr['data_type'] == 'float32'
            ]
            if self.max_cable_length is not None and skel.cable_length(
            ) > self.max_cable_length:
                skeletons[label] = skel.to_precomputed()
            else:
                skeletons[label] = kimimaro.postprocess(
                    skel,
                    dust_threshold=self.dust_threshold,  # voxels 
                    tick_threshold=self.tick_threshold,  # nm
                ).to_precomputed()

        return skeletons
Пример #9
0
def remove_ticks(skeleton, threshold):
  """
  Simple merging of individual TESAR cubes results in lots of little 
  ticks due to the edge effect. We can remove them by thresholding
  the path length from a given branch to the "main body" of the neurite. 
  We successively remove paths from shortest to longest until no branches
  below threshold remain.

  If TEASAR parameters were chosen such that they allowed for spines to
  be traced, this is also an opportunity to correct for that.

  This algorithm is O(N^2) in the number of terminal nodes.

  Parameters:
    threshold: The maximum length in nanometers that may be culled.

  Returns: tick free skeleton
  """
  if skeleton.empty():
    return skeleton

  skels = []
  for component in skeleton.components():
    skels.append(_remove_ticks(component, threshold))

  return PrecomputedSkeleton.simple_merge(skels).consolidate()
Пример #10
0
def readSkelFromFile(fname, iz, iy, ix, bsize, anisotropy):

    print("Reading from file: " + fname)

    with open(fname, 'r') as f:
        inp_swc = f.read()

    skel_read = PrecomputedSkeleton.from_swc(inp_swc)

    # revert order from x y z to z y x
    vertices_copy = skel_read.vertices.copy()
    skel_read.vertices[:, 0] = vertices_copy[:, 2]
    skel_read.vertices[:, 2] = vertices_copy[:, 0]

    x_offset = ix * bsize[OR_X] * anisotropy[OR_X]
    y_offset = iy * bsize[OR_Y] * anisotropy[OR_Y]
    z_offset = iz * bsize[OR_Z] * anisotropy[OR_Z]

    transform_ = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])

    transform_[0, 3] = z_offset
    transform_[1, 3] = y_offset
    transform_[2, 3] = x_offset

    skel_read.transform = transform_

    skel_read.apply_transform()

    skel_read.transform = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])

    return skel_read
Пример #11
0
 def execute(self):
     corgie_logger.info(f"Merging skeletons at {self.dst_path}")
     fragment_filenames = self.cf.list(prefix=self.prefix, flat=True)
     skeleton_files = self.cf.get(fragment_filenames)
     skeletons = defaultdict(list)
     for skeleton_file in skeleton_files:
         try:
             colon_index = skeleton_file["path"].index(":")
         except ValueError:
             # File is full skeleton, not fragment
             continue
         seg_id = skeleton_file["path"][0:colon_index]
         skeleton_fragment = pickle.loads(skeleton_file["content"])
         if not skeleton_fragment.empty():
             skeletons[seg_id].append(skeleton_fragment)
     for seg_id, skeleton_fragments in skeletons.items():
         skeleton = PrecomputedSkeleton.simple_merge(
             skeleton_fragments).consolidate()
         skeleton = kimimaro.postprocess(skeleton, self.dust_threshold,
                                         self.tick_threshold)
         skeleton.id = int(seg_id)
         self.cf.put(path=seg_id,
                     content=skeleton.to_precomputed(),
                     compress="gzip")
         corgie_logger.info(f"Finished skeleton {seg_id}")
Пример #12
0
def merge(skeletons):
    merged_skels = {}
    for segid, skels in skeletons.items():
        skel = PrecomputedSkeleton.simple_merge(skels)
        merged_skels[segid] = skel.consolidate()

    return merged_skels
Пример #13
0
def test_components():
    skel = PrecomputedSkeleton(
        [
            (0, 0, 0),
            (1, 0, 0),
            (2, 0, 0),
            (0, 1, 0),
            (0, 2, 0),
            (0, 3, 0),
        ],
        edges=[(0, 1), (1, 2), (3, 4), (4, 5), (3, 5)],
        segid=666,
    )

    components = skel.components()
    assert len(components) == 2
    assert components[0].vertices.shape[0] == 3
    assert components[1].vertices.shape[0] == 3
    assert components[0].edges.shape[0] == 2
    assert components[1].edges.shape[0] == 3

    skel1_gt = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0), (2, 0, 0)], [(0, 1),
                                                                       (1, 2)])
    skel2_gt = PrecomputedSkeleton([(0, 1, 0), (0, 2, 0), (0, 3, 0)], [(0, 1),
                                                                       (0, 2),
                                                                       (1, 2)])

    assert PrecomputedSkeleton.equivalent(components[0], skel1_gt)
    assert PrecomputedSkeleton.equivalent(components[1], skel2_gt)
Пример #14
0
def remove_loops(skeleton):
  if skeleton.empty():
    return skeleton

  skels = []
  for component in skeleton.components():
    skels.append(_remove_loops(component))

  return PrecomputedSkeleton.simple_merge(skels).consolidate()
Пример #15
0
def test_remove_disconnected_vertices():
  skel = PrecomputedSkeleton(
    [ 
      (0,0,0), (1,0,0), (2,0,0),
      (0,1,0), (0,2,0), (0,3,0),
      (-1, -1, -1)
    ], 
    edges=[ 
      (0,1), (1,2), 
      (3,4), (4,5), (3,5)
    ],
    segid=666,
  )

  res = skel.remove_disconnected_vertices()
  assert res.vertices.shape[0] == 6
  assert res.edges.shape[0] == 5 
  assert res.radii.shape[0] == 6
  assert res.vertex_types.shape[0] == 6
  assert res.id == 666
Пример #16
0
def remove_dust(skeleton, dust_threshold):
  """Dust threshold in physical cable length."""
  
  if skeleton.empty():
    return skeleton

  skels = [] 
  for skel in skeleton.components():
    if skel.cable_length() > dust_threshold:
      skels.append(skel)

  skeleton = PrecomputedSkeleton.simple_merge(skels)
  return skeleton.consolidate()
Пример #17
0
  def process_skeletons(self, locations, cv):
    skeletons = {}
    for label, locs in locations.items():
      skel = PrecomputedSkeleton.simple_merge(
        self.get_unfused(label, locs, cv)
      )
      skel.id = label
      skel.extra_attributes = [ 
        attr for attr in skel.extra_attributes \
        if attr['data_type'] == 'float32' 
      ]      
      skeletons[label] = kimimaro.postprocess(
        skel, 
        dust_threshold=self.dust_threshold, # voxels 
        tick_threshold=self.tick_threshold, # nm
      ).to_precomputed()

    return skeletons
Пример #18
0
def load_raw_skeletons():
    print("Downloading list of files...")
    print(cv.skeleton.meta.layerpath)
    with Storage(cv.skeleton.meta.layerpath, progress=True) as stor:
        all_files = list(stor.list_files())

    all_files = [
        fname for fname in all_files if os.path.splitext(fname)[1] == '.frags'
    ]

    print("Downloading files...")
    with Storage(cv.skeleton.meta.layerpath, progress=True) as stor:
        all_files = stor.get_files(all_files)

    # CHECKPOINT?

    for i, res in enumerate(tqdm(all_files, desc='Unpickling')):
        all_files[i] = pickle.loads(res['content'])

    # group by segid

    unfused_skeletons = defaultdict(list)
    while all_files:
        fragment = all_files.pop()
        for label, skel_frag in fragment.items():
            unfused_skeletons[label].append(skel_frag)

    # CHECKPOINT?

    skeletons = crt_dict()
    labels = list(unfused_skeletons.keys())
    for label in tqdm(labels, desc='Simple Merging'):
        skels = unfused_skeletons[label]
        skeleton = PrecomputedSkeleton.simple_merge(skels)
        skeleton.id = label
        skeleton.extra_attributes = [
          attr for attr in skeleton.extra_attributes \
          if attr['data_type'] == 'float32'
        ]
        skeletons[label] = skeleton
        del unfused_skeletons[label]

    return skeletons
Пример #19
0
    def process_skeletons(self, locations, cv):
        filenames = set(itertools.chain(*locations.values()))
        labels = set(locations.keys())
        unfused_skeletons = self.get_unfused(labels, filenames, cv)

        skeletons = {}
        for label, skels in tqdm(unfused_skeletons.items(),
                                 desc="Postprocessing",
                                 disable=(not self.progress)):
            skel = PrecomputedSkeleton.simple_merge(skels)
            skel.id = label
            skel.extra_attributes = [
              attr for attr in skel.extra_attributes \
              if attr['data_type'] == 'float32'
            ]
            skeletons[label] = kimimaro.postprocess(
                skel,
                dust_threshold=self.dust_threshold,  # voxels 
                tick_threshold=self.tick_threshold,  # nm
            ).to_precomputed()

        return skeletons
Пример #20
0
def readSkelFromFile(fname, bz, by, bx, bsize, anisotropy):

    print("Reading from file: " + fname)

    with open(fname, 'r') as f:
        inp_swc = f.read()

    skel_read = PrecomputedSkeleton.from_swc(inp_swc)

    x_offset = bx*bsize[0]*anisotropy[0]
    y_offset = by*bsize[1]*anisotropy[1]
    z_offset = bz*bsize[2]*anisotropy[2]

    print("Block: " + str((bx,by,bz)))
    print("Offset: "+ str((x_offset,y_offset,z_offset)))

    skel_read.transform = np.array([[1,0,0,x_offset],
                                    [0,1,0,y_offset],
                                    [0,0,1,z_offset]])
    print("transform: " + str(skel_read.transform))

    # print("xmin, xmax: " + str(np.min(skel_read.vertices[:,0])) + ", " + str(np.max(skel_read.vertices[:,0])))
    # print("ymin, ymax: " + str(np.min(skel_read.vertices[:,1])) + ", " + str(np.max(skel_read.vertices[:,1])))
    # print("zmin, zmax: " + str(np.min(skel_read.vertices[:,2])) + ", " + str(np.max(skel_read.vertices[:,2])))
    # print("----------------------------")
    #
    skel_read.apply_transform()
    #
    # print("xmin, xmax: " + str(np.min(skel_read.vertices[:,0])) + ", " + str(np.max(skel_read.vertices[:,0])))
    # print("ymin, ymax: " + str(np.min(skel_read.vertices[:,1])) + ", " + str(np.max(skel_read.vertices[:,1])))
    # print("zmin, zmax: " + str(np.min(skel_read.vertices[:,2])) + ", " + str(np.max(skel_read.vertices[:,2])))

    skel_read.transform = np.array([[1,0,0,0],
                                    [0,1,0,0],
                                    [0,0,1,0]])


    return skel_read
Пример #21
0
def trace(
    labels,
    DBF,
    scale=10,
    const=10,
    anisotropy=(1, 1, 1),
    soma_detection_threshold=1100,
    soma_acceptance_threshold=4000,
    pdrf_scale=5000,
    pdrf_exponent=16,
    soma_invalidation_scale=0.5,
    soma_invalidation_const=0,
    fix_branching=True,
    manual_targets_before=[],
    manual_targets_after=[],
    root=None,
    max_paths=None,
    voxel_graph=None,
):
    """
  Given the euclidean distance transform of a label ("Distance to Boundary Function"), 
  convert it into a skeleton using an algorithm based on TEASAR. 

  DBF: Result of the euclidean distance transform. Must represent a single label,
       assumed to be expressed in chosen physical units (i.e. nm)
  scale: during the "rolling ball" invalidation phase, multiply the DBF value by this.
  const: during the "rolling ball" invalidation phase, this is the minimum radius in chosen physical units (i.e. nm).
  anisotropy: (x,y,z) conversion factor for voxels to chosen physical units (i.e. nm)
  soma_detection_threshold: if object has a DBF value larger than this, 
    root will be placed at largest DBF value and special one time invalidation
    will be run over that root location (see soma_invalidation scale)
    expressed in chosen physical units (i.e. nm) 
  pdrf_scale: scale factor in front of dbf, used to weight dbf over euclidean distance (higher to pay more attention to dbf) (default 5000)
  pdrf_exponent: exponent in dbf formula on distance from edge, faster if factor of 2 (default 16)
  soma_invalidation_scale: the 'scale' factor used in the one time soma root invalidation (default .5)
  soma_invalidation_const: the 'const' factor used in the one time soma root invalidation (default 0)
                           (units in chosen physical units (i.e. nm))
  fix_branching: When enabled, zero out the graph edge weights traversed by 
    of previously found paths. This causes branch points to occur closer to 
    the actual path divergence. However, there is a large performance penalty
    associated with this as dijkstra's algorithm is computed once per a path
    rather than once per a skeleton.
  manual_targets_before: list of (x,y,z) that correspond to locations that must 
    have paths drawn to. Used for specifying root and border targets for
    merging adjacent chunks out-of-core. Targets are applied before ordinary
    target selection.
  manual_targets_after: Same as manual_targets_before but the additional 
    targets are applied after the usual algorithm runs. The current 
    invalidation status of the shape makes no difference.
  max_paths: If a label requires drawing this number of paths or more,
    abort and move onto the next label.
  root: If you want to force the root to be a particular voxel, you can
    specify it here.
  voxel_graph: a connection graph that defines permissible 
    directions of motion between voxels. This is useful for
    dealing with self-touches. The graph is defined by the
    conventions used in cc3d.voxel_connectivity_graph 
    (https://github.com/seung-lab/connected-components-3d/blob/3.2.0/cc3d_graphs.hpp#L73-L92)

  Based on the algorithm by:

  M. Sato, I. Bitter, M. Bender, A. Kaufman, and M. Nakajima. 
  "TEASAR: tree-structure extraction algorithm for accurate and robust skeletons"  
    Proc. the Eighth Pacific Conference on Computer Graphics and Applications. Oct. 2000.
    doi:10.1109/PCCGA.2000.883951 (https://ieeexplore.ieee.org/document/883951/)

  Returns: Skeleton object
  """
    dbf_max = np.max(DBF)
    labels = np.asfortranarray(labels)
    DBF = np.asfortranarray(DBF)

    soma_mode = False
    # > 5000 nm, gonna be a soma or blood vessel
    # For somata: specially handle the root by
    # placing it at the approximate center of the soma
    if dbf_max > soma_detection_threshold:
        labels, num_voxels_filled = fill_voids.fill(labels,
                                                    in_place=True,
                                                    return_fill_count=True)
        if num_voxels_filled > 0:
            del DBF
            DBF = edt.edt(labels,
                          anisotropy=anisotropy,
                          order='F',
                          black_border=np.all(labels))
        dbf_max = np.max(DBF)
        soma_mode = dbf_max > soma_acceptance_threshold

    soma_radius = 0.0

    if soma_mode:
        if root is not None:
            manual_targets_before.insert(0, root)
        root = find_soma_root(DBF, dbf_max)
        soma_radius = dbf_max * soma_invalidation_scale + soma_invalidation_const
    elif root is None:
        root = find_root(labels, anisotropy)

    if root is None:
        return PrecomputedSkeleton()

    free_space_radius = 0 if not soma_mode else DBF[root]
    # DBF: Distance to Boundary Field
    # DAF: Distance from any voxel Field (distance from root field)
    # PDRF: Penalized Distance from Root Field
    DBF = kimimaro.skeletontricks.zero2inf(DBF)  # DBF[ DBF == 0 ] = np.inf
    DAF, target = dijkstra3d.euclidean_distance_field(
        labels,
        root,
        anisotropy=anisotropy,
        free_space_radius=free_space_radius,
        voxel_graph=voxel_graph,
        return_max_location=True,
    )
    DAF = kimimaro.skeletontricks.inf2zero(DAF)  # DAF[ DAF == np.inf ] = 0
    PDRF = compute_pdrf(dbf_max, pdrf_scale, pdrf_exponent, DBF, DAF)

    # Use dijkstra propogation w/o a target to generate a field of
    # pointers from each voxel to its parent. Then we can rapidly
    # compute multiple paths by simply hopping pointers using path_from_parents
    if not fix_branching:
        parents = dijkstra3d.parental_field(PDRF, root, voxel_graph)
        del PDRF
    else:
        parents = PDRF

    if soma_mode:
        invalidated, labels = kimimaro.skeletontricks.roll_invalidation_ball(
            labels,
            DBF,
            np.array([root], dtype=np.uint32),
            scale=soma_invalidation_scale,
            const=soma_invalidation_const,
            anisotropy=anisotropy)
    # This target is only valid if no
    # invalidations have occured yet.
    elif len(manual_targets_before) == 0:
        manual_targets_before.append(target)

    # delete reference to DAF and place it in
    # a list where we can delete it later and
    # free that memory.
    DAF = [DAF]

    paths = compute_paths(root, labels, DBF, DAF, parents, scale, const,
                          anisotropy, soma_mode, soma_radius, fix_branching,
                          manual_targets_before, manual_targets_after,
                          max_paths, voxel_graph)

    skel = PrecomputedSkeleton.simple_merge([
        PrecomputedSkeleton.from_path(path) for path in paths if len(path) > 0
    ]).consolidate()

    verts = skel.vertices.flatten().astype(np.uint32)
    skel.radii = DBF[verts[::3], verts[1::3], verts[2::3]]

    return skel
Пример #22
0
def test_equivalent():
    assert PrecomputedSkeleton.equivalent(PrecomputedSkeleton(),
                                          PrecomputedSkeleton())

    identity = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0)], [(0, 1)])
    assert PrecomputedSkeleton.equivalent(identity, identity)

    diffvertex = PrecomputedSkeleton([(0, 0, 0), (0, 1, 0)], [(0, 1)])
    assert not PrecomputedSkeleton.equivalent(identity, diffvertex)

    single1 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0)], edges=[(1, 0)])
    single2 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0)], edges=[(0, 1)])
    assert PrecomputedSkeleton.equivalent(single1, single2)

    double1 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0)], edges=[(1, 0)])
    double2 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0)], edges=[(0, 1)])
    assert PrecomputedSkeleton.equivalent(double1, double2)

    double1 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0), (1, 1, 0)],
                                  edges=[(1, 0), (1, 2)])
    double2 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0), (1, 1, 0)],
                                  edges=[(2, 1), (0, 1)])
    assert PrecomputedSkeleton.equivalent(double1, double2)

    double1 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0), (1, 1, 0), (1, 1, 3)],
                                  edges=[(1, 0), (1, 2), (1, 3)])
    double2 = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0), (1, 1, 0), (1, 1, 3)],
                                  edges=[(3, 1), (2, 1), (0, 1)])
    assert PrecomputedSkeleton.equivalent(double1, double2)
Пример #23
0
def trace(
    labels,
    DBF,
    scale=10,
    const=10,
    anisotropy=(1, 1, 1),
    soma_detection_threshold=1100,
    soma_acceptance_threshold=4000,
    pdrf_scale=5000,
    pdrf_exponent=16,
    soma_invalidation_scale=0.5,
    soma_invalidation_const=0,
    fix_branching=True,
):
    """
  Given the euclidean distance transform of a label ("Distance to Boundary Function"), 
  convert it into a skeleton using an algorithm based on TEASAR. 

  DBF: Result of the euclidean distance transform. Must represent a single label,
       assumed to be expressed in chosen physical units (i.e. nm)
  scale: during the "rolling ball" invalidation phase, multiply the DBF value by this.
  const: during the "rolling ball" invalidation phase, this is the minimum radius in chosen physical units (i.e. nm).
  anisotropy: (x,y,z) conversion factor for voxels to chosen physical units (i.e. nm)
  soma_detection_threshold: if object has a DBF value larger than this, 
    root will be placed at largest DBF value and special one time invalidation
    will be run over that root location (see soma_invalidation scale)
    expressed in chosen physical units (i.e. nm) 
  pdrf_scale: scale factor in front of dbf, used to weight dbf over euclidean distance (higher to pay more attention to dbf) (default 5000)
  pdrf_exponent: exponent in dbf formula on distance from edge, faster if factor of 2 (default 16)
  soma_invalidation_scale: the 'scale' factor used in the one time soma root invalidation (default .5)
  soma_invalidation_const: the 'const' factor used in the one time soma root invalidation (default 0)
                           (units in chosen physical units (i.e. nm))
  fix_branching: When enabled, zero out the graph edge weights traversed by 
    of previously found paths. This causes branch points to occur closer to 
    the actual path divergence. However, there is a large performance penalty
    associated with this as dijkstra's algorithm is computed once per a path
    rather than once per a skeleton.
  
  Based on the algorithm by:

  M. Sato, I. Bitter, M. Bender, A. Kaufman, and M. Nakajima. 
  "TEASAR: tree-structure extraction algorithm for accurate and robust skeletons"  
    Proc. the Eighth Pacific Conference on Computer Graphics and Applications. Oct. 2000.
    doi:10.1109/PCCGA.2000.883951 (https://ieeexplore.ieee.org/document/883951/)

  Returns: Skeleton object
  """
    dbf_max = np.max(DBF)
    labels = np.asfortranarray(labels)
    DBF = np.asfortranarray(DBF)

    soma_mode = False
    # > 5000 nm, gonna be a soma or blood vessel
    # For somata: specially handle the root by
    # placing it at the approximate center of the soma
    if dbf_max > soma_detection_threshold:
        del DBF
        labels = ndimage.binary_fill_holes(labels)
        labels = np.asfortranarray(labels)
        DBF = edt.edt(labels, anisotropy=anisotropy, order='F')
        dbf_max = np.max(DBF)
        soma_mode = dbf_max > soma_acceptance_threshold

    if soma_mode:
        root = np.unravel_index(np.argmax(DBF), DBF.shape)
        soma_radius = dbf_max * soma_invalidation_scale + soma_invalidation_const
    else:
        root = find_root(labels, anisotropy)
        soma_radius = 0.0

    if root is None:
        return PrecomputedSkeleton()

    # DBF: Distance to Boundary Field
    # DAF: Distance from any voxel Field (distance from root field)
    # PDRF: Penalized Distance from Root Field
    DBF = kimimaro.skeletontricks.zero2inf(DBF)  # DBF[ DBF == 0 ] = np.inf
    DAF = dijkstra3d.euclidean_distance_field(labels,
                                              root,
                                              anisotropy=anisotropy)
    DAF = kimimaro.skeletontricks.inf2zero(DAF)  # DAF[ DAF == np.inf ] = 0
    PDRF = compute_pdrf(dbf_max, pdrf_scale, pdrf_exponent, DBF, DAF)

    # Use dijkstra propogation w/o a target to generate a field of
    # pointers from each voxel to its parent. Then we can rapidly
    # compute multiple paths by simply hopping pointers using path_from_parents
    if not fix_branching:
        parents = dijkstra3d.parental_field(PDRF, root)
        del PDRF
    else:
        parents = PDRF

    if soma_mode:
        invalidated, labels = kimimaro.skeletontricks.roll_invalidation_ball(
            labels,
            DBF,
            np.array([root], dtype=np.uint32),
            scale=soma_invalidation_scale,
            const=soma_invalidation_const,
            anisotropy=anisotropy)

    paths = compute_paths(root, labels, DBF, DAF, parents, scale, const,
                          anisotropy, soma_mode, soma_radius, fix_branching)

    skel = PrecomputedSkeleton.simple_merge(
        [PrecomputedSkeleton.from_path(path) for path in paths]).consolidate()

    verts = skel.vertices.flatten().astype(np.uint32)
    skel.radii = DBF[verts[::3], verts[1::3], verts[2::3]]

    return skel
Пример #24
0
def test_downsample():
    skel = PrecomputedSkeleton(
        [(0, 0, 0), (1, 0, 0), (1, 1, 0), (1, 1, 3), (2, 1, 3), (2, 2, 3)],
        edges=[(1, 0), (1, 2), (2, 3), (3, 4), (5, 4)],
        radii=[1, 2, 3, 4, 5, 6],
        vertex_types=[1, 2, 3, 4, 5, 6],
        segid=1337,
    )

    def should_error(x):
        try:
            skel.downsample(x)
            assert False
        except ValueError:
            pass

    should_error(-1)
    should_error(0)
    should_error(.5)
    should_error(2.00000000000001)

    dskel = skel.downsample(1)
    assert PrecomputedSkeleton.equivalent(dskel, skel)
    assert dskel.id == skel.id
    assert dskel.id == 1337

    dskel = skel.downsample(2)
    dskel_gt = PrecomputedSkeleton([(0, 0, 0), (1, 1, 0), (2, 1, 3),
                                    (2, 2, 3)],
                                   edges=[(1, 0), (1, 2), (2, 3)],
                                   radii=[1, 3, 5, 6],
                                   vertex_types=[1, 3, 5, 6])
    assert PrecomputedSkeleton.equivalent(dskel, dskel_gt)

    dskel = skel.downsample(3)
    dskel_gt = PrecomputedSkeleton(
        [(0, 0, 0), (1, 1, 3), (2, 2, 3)],
        edges=[(1, 0), (1, 2)],
        radii=[1, 4, 6],
        vertex_types=[1, 4, 6],
    )
    assert PrecomputedSkeleton.equivalent(dskel, dskel_gt)

    skel = PrecomputedSkeleton([(0, 0, 0), (1, 0, 0), (1, 1, 0), (1, 1, 3),
                                (2, 1, 3), (2, 2, 3)],
                               edges=[(1, 0), (1, 2), (3, 4), (5, 4)],
                               radii=[1, 2, 3, 4, 5, 6],
                               vertex_types=[1, 2, 3, 4, 5, 6])
    dskel = skel.downsample(2)
    dskel_gt = PrecomputedSkeleton([(0, 0, 0), (1, 1, 0), (1, 1, 3),
                                    (2, 2, 3)],
                                   edges=[(1, 0), (2, 3)],
                                   radii=[1, 3, 4, 6],
                                   vertex_types=[1, 3, 4, 6])
    assert PrecomputedSkeleton.equivalent(dskel, dskel_gt)