def test_square(): labels = np.ones((1000, 1000), dtype=np.uint8) labels[-1, 0] = 0 labels[0, -1] = 0 skels = kimimaro.skeletonize(labels) assert len(skels) == 1 skel = skels[1] assert skel.vertices.shape[0] == 1000 assert skel.edges.shape[0] == 999 assert abs(skel.cable_length() - 999 * np.sqrt(2)) < 0.001 labels = np.ones((1000, 1000), dtype=np.uint8) labels[0, 0] = 0 labels[-1, -1] = 0 skels = kimimaro.skeletonize(labels) assert len(skels) == 1 skel = skels[1] assert skel.vertices.shape[0] == 1000 assert skel.edges.shape[0] == 999 assert abs(skel.cable_length() - 999 * np.sqrt(2)) < 0.001
def skeletonize(self, voxel_size): skels = kimimaro.skeletonize( self.array, anisotropy=voxel_size, parallel=multiprocessing.cpu_count() // 2 ) return skels
def skel_cal(fov_seg_array, dust_thres=2, nums_cpu=1, anisotropy=(200, 200, 1000)): fov_seg_array = fov_seg_array.astype(int) skels = kimimaro.skeletonize( fov_seg_array, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=dust_thres, anisotropy=anisotropy, # default True fix_branching=True, # default True fix_borders=True, # default True progress=False, # default False parallel=nums_cpu, # <= 0 all cpu, 1 single process, 2+ multiprocess ) print(skels.keys()) skel = skels[1] fov_ins_skel_array = np.zeros_like(fov_seg_array) coords = (skel.vertices / np.array(anisotropy)).astype(int) fov_ins_skel_array[coords[:, 0], coords[:, 1], coords[:, 2]] = 1 return fov_ins_skel_array, skel
def execute(self): corgie_logger.info( f"Skeletonizing {self.seg_layer} at MIP{self.mip}, region: {self.bcube}" ) seg_data = self.seg_layer.read(bcube=self.bcube, mip=self.mip, timestamp=self.timestamp) resolution = self.seg_layer.cv[self.mip].resolution skeletons = kimimaro.skeletonize( seg_data, self.teasar_params, object_ids=self.object_ids, anisotropy=resolution, dust_threshold=self.dust_threshold, progress=False, fix_branching=self.fix_branching, fix_borders=self.fix_borders, fix_avocados=self.fix_avocados, ).values() minpt = self.bcube.minpt(self.mip) for skel in skeletons: skel.vertices[:] += minpt * resolution cf = CloudFiles(self.dst_path) for skel in skeletons: path = "{}:{}".format(skel.id, self.bcube.to_filename(self.mip)) cf.put( path=path, content=pickle.dumps(skel), compress="gzip", content_type="application/python-pickle", cache_control=False, )
def getSkeleton(labels): return kimimaro.skeletonize( labels, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': 50, # default None }, # object_ids=[ ... ], # process only the specified labels # extra_targets_before=[ (27,33,100), (44,45,46) ], # target points in voxels # extra_targets_after=[ (27,33,100), (44,45,46) ], # target points in voxels dust_threshold=1000, # skip connected components with fewer than this many voxels anisotropy=(18,18,20), # default True fix_branching=True, # default True fix_borders=True, # default True progress=True, # default False, show progress bar parallel=1, # <= 0 all cpu, 1 single process, 2+ multiprocess parallel_chunk_size=100, # how many skeletons to process before updating progress bar )
def execute(self): vol = CloudVolume(self.cloudpath, mip=self.mip, info=self.info, cdn_cache=False) bbox = Bbox.clamp(self.bounds, vol.bounds) path = skeldir(self.cloudpath) path = os.path.join(self.cloudpath, path) all_labels = vol[bbox.to_slices()] all_labels = all_labels[:, :, :, 0] skeletons = kimimaro.skeletonize(all_labels, self.teasar_params, object_ids=self.object_ids, anisotropy=vol.resolution, dust_threshold=1000, cc_safety_factor=0.25, progress=False, fix_branching=self.fix_branching) for segid, skel in six.iteritems(skeletons): skel.vertices[:, 0] += bbox.minpt.x * vol.resolution.x skel.vertices[:, 1] += bbox.minpt.y * vol.resolution.y skel.vertices[:, 2] += bbox.minpt.z * vol.resolution.z self.upload(vol, path, bbox, skeletons.values())
def execute(self): vol = CloudVolume( self.cloudpath, mip=self.mip, info=self.info, cdn_cache=False, parallel=self.parallel ) bbox = Bbox.clamp(self.bounds, vol.bounds) path = skeldir(self.cloudpath) path = os.path.join(self.cloudpath, path) all_labels = vol[ bbox.to_slices() ] all_labels = all_labels[:,:,:,0] if self.mask_ids: all_labels = fastremap.mask(all_labels, self.mask_ids) skeletons = kimimaro.skeletonize( all_labels, self.teasar_params, object_ids=self.object_ids, anisotropy=vol.resolution, dust_threshold=self.dust_threshold, cc_safety_factor=0.25, progress=self.progress, fix_branching=self.fix_branching, fix_borders=self.fix_borders, parallel=self.parallel, ) for segid, skel in six.iteritems(skeletons): skel.vertices[:] += bbox.minpt * vol.resolution self.upload(vol, path, bbox, skeletons.values())
def execute(self): vol = CloudVolume( self.cloudpath, mip=self.mip, info=self.info, cdn_cache=False, parallel=self.parallel, fill_missing=self.fill_missing, ) bbox = Bbox.clamp(self.bounds, vol.bounds) index_bbox = Bbox.clamp(self.index_bounds, vol.bounds) path = skeldir(self.cloudpath) path = os.path.join(self.cloudpath, path) all_labels = vol[bbox.to_slices()] all_labels = all_labels[:, :, :, 0] if self.mask_ids: all_labels = fastremap.mask(all_labels, self.mask_ids) extra_targets_after = {} if self.synapses: extra_targets_after = kimimaro.synapses_to_targets( all_labels, self.synapses) skeletons = kimimaro.skeletonize( all_labels, self.teasar_params, object_ids=self.object_ids, anisotropy=vol.resolution, dust_threshold=self.dust_threshold, progress=self.progress, fix_branching=self.fix_branching, fix_borders=self.fix_borders, fix_avocados=self.fix_avocados, parallel=self.parallel, extra_targets_after=extra_targets_after.keys(), ) for segid, skel in six.iteritems(skeletons): skel.vertices[:] += bbox.minpt * vol.resolution if self.synapses: for segid, skel in six.iteritems(skeletons): terminal_nodes = skel.vertices[skel.terminals()] for i, vert in enumerate(terminal_nodes): vert = vert / vol.resolution - self.bounds.minpt vert = tuple(np.round(vert).astype(int)) if vert in extra_targets_after.keys(): skel.vertex_types[i] = extra_targets_after[vert] if self.sharded: self.upload_batch(vol, path, index_bbox, skeletons) else: self.upload_individuals(vol, path, bbox, skeletons) if self.spatial_index: self.upload_spatial_index(vol, path, index_bbox, skeletons)
def skels_cal(fov_ins_array, nums_cpu=1, anisotropy=(200, 200, 1000)): skels = kimimaro.skeletonize( fov_ins_array, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=0, anisotropy=anisotropy, # default True fix_branching=True, # default True fix_borders=True, # default True progress=False, # default False parallel=nums_cpu, # <= 0 all cpu, 1 single process, 2+ multiprocess ) fov_ins_skel_array = np.zeros_like(fov_ins_array) fov_ins_ends_array = np.zeros_like(fov_ins_array) fov_ins_cross_array = np.zeros_like(fov_ins_array) directs_mask = np.zeros([*(fov_ins_array.shape), 3]) direct_vis = np.zeros_like(fov_ins_array) cross_5_mask = np.zeros_like(fov_ins_array) ends_dict = {} for label in skels: skel = skels[label] # ends, vecs = ends_cal(skel, anisotropy) # ends, vecs = find_ends_angles_cross(skel, anisotropy) direct_mask, coords, ends, cross = skeleon2direct(skel, None, min_length=15) fov_ins_skel_array[coords[:, 0], coords[:, 1], coords[:, 2]] = label fov_ins_ends_array[ends[:, 0], ends[:, 1], ends[:, 2]] = label ends_dict[label] = ends if len(direct_mask) != 0: directs_mask[direct_mask[:, 3].astype(int), direct_mask[:, 4].astype(int), direct_mask[:, 5].astype(int), ...] = direct_mask[:, :3] direct_vis[direct_mask[:, 3].astype(int), direct_mask[:, 4].astype(int), direct_mask[:, 5].astype(int), ...] = label if len(cross) != 0: fov_ins_cross_array[cross[:, 0], cross[:, 1], cross[:, 2]] = label for c in cross: cross_5_mask[c[0] - 2:c[0] + 3, c[1] - 2:c[1] + 3, c[2] - 2:c[2] + 3] = 1 direct_vis[cross_5_mask == 1] = 0 return ends_dict, fov_ins_skel_array, fov_ins_ends_array, fov_ins_cross_array, directs_mask, direct_vis
def test_cube(): labels = np.ones((256, 256, 256), dtype=np.uint8) labels[0, 0, 0] = 0 labels[-1, -1, -1] = 0 skels = kimimaro.skeletonize(labels) assert len(skels) == 1 skel = skels[1] assert skel.vertices.shape[0] == 256 assert skel.edges.shape[0] == 255 assert abs(skel.cable_length() - 255 * np.sqrt(3)) < 0.001
def skels_cal(fov_ins_array, nums_cpu=1, anisotropy=(200, 200, 1000)): skels = kimimaro.skeletonize( fov_ins_array, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=0, anisotropy=anisotropy, # default True fix_branching=True, # default True fix_borders=True, # default True progress=False, # default False parallel=nums_cpu, # <= 0 all cpu, 1 single process, 2+ multiprocess ) fov_ins_skel_array = np.zeros_like(fov_ins_array) fov_ins_ends_array = np.zeros_like(fov_ins_array) fov_ins_cross_array = np.zeros_like(fov_ins_array) # direct_mask = np.zeros([*(fov_ins_array.shape), 3]) for label in skels: skel = skels[label] _, coords, ends, cross = skeleton2endcross(skel, anisotropy) fov_ins_skel_array[coords[:, 0], coords[:, 1], coords[:, 2]] = label fov_ins_ends_array[ends[:, 0], ends[:, 1], ends[:, 2]] = label # if len(direct) != 0: # direct_mask[direct[:, 3].astype(int), direct[:, 4].astype(int), direct[:, 5].astype(int), ...] = direct[:, :3] if len(cross) != 0: fov_ins_cross_array[cross[:, 0], cross[:, 1], cross[:, 2]] = label return fov_ins_skel_array, fov_ins_ends_array, fov_ins_cross_array
def skeleton_func(ins_mask, nums_cpu=1, anisotropy=(200, 200, 1000), return_arr=False): skels = kimimaro.skeletonize( ins_mask, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=0, anisotropy=anisotropy, # default True fix_branching=True, # default True fix_borders=True, # default True progress=False, # default False parallel=nums_cpu, # <= 0 all cpu, 1 single process, 2+ multiprocess ) # for i in skels: # skels[i].vertices = skels[i].vertices / np.array(anisotropy) ins_array = np.zeros_like(ins_mask) if return_arr: for label in skels: skel = skels[label] # ends, vecs = ends_cal(skel, anisotropy) coords = (skel.vertices / np.array(anisotropy)).astype(int) ins_array[coords[:, 0], coords[:, 1], coords[:, 2]] = label return skels, ins_array else: return skels
def forge(src, scale, const, pdrf_scale, pdrf_exponent, soma_detect, soma_accept, soma_scale, soma_const, anisotropy, dust, progress, fill_holes, fix_avocados, fix_branches, fix_borders, parallel, max_paths, outdir): """Skeletonize an input image and write out SWCs.""" labels = np.load(src) skels = kimimaro.skeletonize( labels, teasar_params={ "scale": scale, "const": const, "pdrf_scale": pdrf_scale, "pdrf_exponent": pdrf_exponent, "soma_detection_threshold": soma_detect, "soma_acceptance_threshold": soma_accept, "soma_invalidation_scale": soma_scale, "soma_invalidation_const": soma_const, "max_paths": max_paths, }, anisotropy=anisotropy, dust_threshold=dust, progress=progress, fill_holes=fill_holes, fix_avocados=fix_avocados, fix_branching=fix_branches, fix_borders=fix_borders, parallel=parallel, ) directory = mkdir(outdir) for label, skel in skels.items(): fname = os.path.join(directory, f"{label}.swc") with open(fname, "wt") as f: f.write(skel.to_swc()) if progress: print(f"kimimaro: wrote {len(skels)} skeletons to {directory}")
def skels_cal(fov_ins_array, nums_cpu=1, anisotropy=(200, 200, 1000)): skels = kimimaro.skeletonize( fov_ins_array, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=0, anisotropy=anisotropy, # default True fix_branching=True, # default True fix_borders=True, # default True progress=False, # default False parallel=nums_cpu, # <= 0 all cpu, 1 single process, 2+ multiprocess ) fov_ins_skel_array = np.zeros_like(fov_ins_array) fov_ins_ends_array = np.zeros_like(fov_ins_array) ends_dict = {} vecs_dict = {} for label in skels: skel = skels[label] ends, vecs = ends_cal(skel, anisotropy) # ends, vecs = find_ends_angles_cross(skel, anisotropy) coords = (skel.vertices / np.array(anisotropy)).astype(int) fov_ins_skel_array[coords[:, 0], coords[:, 1], coords[:, 2]] = label fov_ins_ends_array[ends[:, 0], ends[:, 1], ends[:, 2]] = label ends_dict[label] = ends vecs_dict[label] = vecs return ends_dict, vecs_dict, fov_ins_skel_array, fov_ins_ends_array, skels
def kimimaro_func(fov_ins_array, nums_cpu=1, anisotropy=(200, 200, 1000)): skels = kimimaro.skeletonize( fov_ins_array, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=0, anisotropy=anisotropy, # default True fix_branching=True, # default True fix_borders=True, # default True progress=False, # default False parallel=nums_cpu, # <= 0 all cpu, 1 single process, 2+ multiprocess ) return skels
def skeletonize_seg(seg, segid, kimi_params): return kimimaro.skeletonize(seg, object_ids=[segid], **kimi_params)[segid]
nib.save(nimg, path + fname + '_thres.nii.gz') '''veins from swi''' fname = 'swi_brain_susan' swi = nib.load(path + fname + '.nii.gz') img = swi.get_fdata() mask = nib.load(path + 'swi_brain_mask.nii.gz') msk = mask.get_fdata() res = (img.max() - img) * msk nimg = nib.Nifti1Image(res, swi.affine) nib.save(nimg, path + fname + '_inv.nii.gz') res[res > 110] = 1 res[res != 1] = 0 nimg = nib.Nifti1Image(res, swi.affine) nib.save(nimg, path + fname + '_thres.nii.gz') '''kimimaro skeletonization''' import kimimaro skels = kimimaro.skeletonize(img) skel = skels[1] skel.viewer() skels = kimimaro.skeletonize(res) skel = skels[1] skel.viewer()
def __teasar_reconstruct(image, spacing, core_count=0, debug=False): if kimimaro is None: raise ImportError("kimimaro is not installed (pip -U install kimimaro)") return label_im, nb_labels = ndimage.label(image) teasar_params = {} for key in ['scale', 'const', 'pdrf_exponent', 'pdrf_scale', 'soma_detection_threshold', 'soma_acceptance_threshold', 'soma_invalidation_scale', 'soma_invalidation_const', 'max_paths']: teasar_params[key] = getattr(config.extract.teasar, key) skeleton = kimimaro.skeletonize( label_im, teasar_params=teasar_params, dust_threshold=config.extract.teasar.dust_threshold // np.prod(spacing), # skip connected components with fewer than this many voxels anisotropy=spacing, # default True fix_branching=True, # default True fix_borders=True, # default True fill_holes=False, # default False fix_avocados=False, # default False progress=debug, # default False, show progress bar parallel=core_count, # <= 0 all cpu, 1 single process, 2+ multiprocess parallel_chunk_size=50, # how many skeletons to process before updating progress bar ) segment_data = {} seg_id = 0 for network in skeleton.values(): segments = [] start_points = None stop_points = [] network_graph = nx.Graph() network_graph.add_edges_from(network.edges) for node in network_graph: if len(network_graph.adj[node]) == 1: start_points = [(node,)] break if start_points is None: start_points = [(network_graph.nodes[0],)] while start_points: start = start_points.pop() if len(start) == 1: new_segment = [start[0]] start = start[0] else: if not network_graph.has_edge(start[0], start[1]): continue new_segment = [start[0], start[1]] network_graph.remove_edge(start[0], start[1]) start = start[1] for f, t in nx.dfs_successors(network_graph, start).items(): if len(t) == 1: new_segment.append(t[0]) network_graph.remove_edge(f, t[0]) if t[0] in stop_points: break elif len(t) > 1: for paths in t: start_points.append((f, paths)) stop_points.append(f) break segments.append(new_segment) for seg in segments: points = np.array([(round(x, 4), round(y, 4), round(z, 4)) for (z, y, x) in network.vertices[seg]]) segment_data[seg_id] = dict( points=points, radius=network.radius[seg], seg_id=seg_id ) seg_id += 1 return segment_data
def fov_connect(fov_ins_array): def parent(edges, i): coords = np.where( edges == i ) edge = edges[ coords[0][0] ] if edge[0] == i: return edge[1] + 1 return edge[0] + 1 skels = kimimaro.skeletonize( fov_ins_array, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': None, # default None }, dust_threshold=50, anisotropy=(200,200,1000), # default True fix_branching=True, # default True fix_borders=True, # default True progress=True, # default False parallel=2, # <= 0 all cpu, 1 single process, 2+ multiprocess ) ends_dict = {} fov_ins_skel_array = np.zeros_like(fov_ins_array) ends_array = np.zeros_like(fov_ins_array) for label_ in skels: skel = skels[label_] coords = (skel.vertices / np.array([200, 200, 1000])).astype(int) fov_ins_skel_array[coords[:, 0], coords[:, 1], coords[:, 2]] = label_ coords = coords.tolist() edges = skel.edges.tolist() ftree = Tree() cur_ = edges[0][0] ftree.create_node(cur_, cur_, data = coords[0]) cur_list = [cur_] while(len(edges) > 0 and len(cur_list) > 0): _cur_list = [] edges_ = edges[:] #print(cur_list) for cur_ in cur_list: next_inds = np.where(np.array(edges_) == cur_)[0] if len(next_inds) == 0:continue for next_ind in next_inds: edge_ = edges_[next_ind] edges.remove(edge_) #print(cur_, edge_) if edge_[0] == cur_: next_ = edge_[-1] else: next_ = edge_[0] _cur_list.append(next_) ftree.create_node(next_, next_, data = coords[next_], parent = cur_) edges_ = edges[:] cur_list = _cur_list ends = [x.data for x in ftree.leaves()] ends.append(coords[0]) ends_dict[label_] = ends ends_ = np.array(ends) ends_array[ends_[:, 0], ends_[:, 1], ends_[:, 2]] = 1 #ends_array = dilation(ends_array, ball(1)) return fov_ins_skel_array, ends_array, ends_dict
import numpy as np from skimage.external import tifffile labels = tifffile.imread(\ "./ins_pred.tif") skels = kimimaro.skeletonize( labels, teasar_params={ 'scale': 4, 'const': 500, # physical units 'pdrf_exponent': 4, 'pdrf_scale': 100000, 'soma_detection_threshold': 1100, # physical units 'soma_acceptance_threshold': 3500, # physical units 'soma_invalidation_scale': 1.0, 'soma_invalidation_const': 300, # physical units 'max_paths': 15, # default None }, dust_threshold=50, anisotropy=(200, 200, 1000), # default True fix_branching=True, # default True fix_borders=True, # default True progress=True, # default False parallel=1, # <= 0 all cpu, 1 single process, 2+ multiprocess ) print(np.unique(labels)) print(skels.keys()) skel = skels[115] #print(skel.edges) #print(skel.vertices)