def getSomaPositionSingleSeed(filename_swc_or_tree): if isinstance(filename_swc_or_tree, str): tree = utility.readSWC(filename_swc) else: tree = filename_swc_or_tree roots = tree[np.where(tree[:, 6] == -1)] seeds = roots[:, 2:5].astype(int).tolist() return seeds
def calculateKinkDensityAroundNeuriteoutgrowths(annotated_mainbranch='data/trees/annbranch.swc', window_size=4.0, scale=(0.223, 0.223, 0.3)): """ Caluclate the kink density around neuriteoutgrowths. Parameters ---------- annotated_mainbranch : np.array An annotated mainbranch in .swc format where the neurite outgrowth branching points are annotated with TYPE 1 and the kinks are annotated with RADIUS 2. window_size : float Size of the window wich is used for density calculation in um. scale : tuple (x, y, z) scales of the original images [um/px] Returns ------- kink_density : float The local kink density in a window around neurite outgrowths """ if isinstance(annotated_mainbranch, str): annotated_mainbranch = utility.readSWC(annotated_mainbranch) else: annotated_mainbranch = annotated_mainbranch branching_nodes = annotated_mainbranch[np.where(annotated_mainbranch[:,1]==1)] n_kinks_local = np.zeros(len(branching_nodes)) window_sizes = np.zeros(len(branching_nodes)) for i in range(len(branching_nodes)): window = utility.findWindow(branching_nodes[i], annotated_mainbranch, window_size=window_size, scale=scale) n_kinks_local[i] = (window[:,5]==3).sum() window_sizes[i] = utility.calculateDistancesTree(window, scale=scale) n_kinks_sum = n_kinks_local.sum() total_window_size = window_sizes.sum() density_around_outgrowths = n_kinks_sum/total_window_size n_kinks_total = (annotated_mainbranch[:,5]==3).sum() length_total = utility.calculateDistancesTree(annotated_mainbranch, scale=scale) density_total = n_kinks_total/length_total return density_around_outgrowths, density_total
mask_existing_structures = (annotations[:, 5] == 3) | (annotations[:, 1] != 0) it = int(np.floor(suppression_window / 2 / mean_distance)) mask_existing_structures = utility.dilate_array(mask_existing_structures, it) realThickness = realThickness * np.invert(mask_existing_structures) i = 0 while realThickness.max() > (mean_thickness + 3 * std_thickness): mainbranch_beads[realThickness.argmax(), 1] = 1 mainbranch_beads[realThickness.argmax(), 5] *= 2 lower = int(realThickness.argmax() - np.floor(suppression_window / 2 / mean_distance)) upper = int(realThickness.argmax() + np.floor(suppression_window / 2 / mean_distance)) + 1 if lower < 0: realThickness[0:upper] = 0 elif upper > len(realThickness): realThickness[lower:] = 0 else: realThickness[lower:upper] = 0 i += 1 bead_count = sum(mainbranch_beads[:, 1]) return mainbranch_beads, bead_count if __name__ == '__main__': mainbranch = utility.readSWC('mainbranch.swc') ann = utility.readSWC('ann.swc') m, c = countBeads(mainbranch, ann) print(c)
def getSomaPosition(filename_swc): tree = utility.readSWC(filename_swc) roots = tree[np.where(tree[:, 1] == 0)] seeds = roots[:, 2:5].astype(int).tolist() return seeds
def cleanup(infilename='data/trees/plm.swc', outfilename='data/trees/plm_clean.swc', neurontype='PLM', scale=(0.223, 0.223, 0.3), visualize=True): tree = utility.readSWC(infilename) endpoints = utility.findEndpoints(tree) #For ALM neurons detect the soma_nodes i.e. all nodes connected to the root that are above a threshold if neurontype == 'ALM': soma_nodes = utility.findSomaNodes(tree, scale=scale) else: soma_nodes = [] #Trace from every endpoint to a root and save the corresponding branches, select the longest as mainbranch branches = [] lengths = np.zeros(len(endpoints)) for i in range(len(endpoints)): branch, length = traceBranch(endpoints[i], tree, soma_nodes=soma_nodes, scale=scale) branches.append(branch) lengths[i] = length mainbranch = branches[lengths.argmax()] mainbranch_length = lengths.max() - mainbranch[-1][5] * scale[ 0] #the last node is part of the soma and its radius gets subtracted from the final length #Trace from every endpoint to a node on the mainbranch to find sidebranches side_branches = [] side_lengths = np.zeros(len(endpoints)) for i in range(len(endpoints)): branch, length = traceBranch(endpoints[i], tree, main_nodes=mainbranch, soma_nodes=soma_nodes, scale=scale) side_branches.append(np.flip(branch, axis=0)) side_lengths[i] = length - branch[-1][5] * scale[0] #check if sidebranches are close and parallel to mainbranch if visualize: fig, axes = plt.subplots(2, 1, sharex='col') all_distances = [] all_slopes = [] clean_side_branches = [] windows = [] for side_branch in side_branches: root = utility.findRoots(side_branch, return_node=True)[0] if root.tolist() in soma_nodes: window = [ root ] #set the searching window to the root node in case of alm soma_outgrowth side_branch. else: window = utility.findWindow(root, mainbranch, window_size=40, scale=scale) windows.append(window) min_distance_from_mainbranch = [] for node in side_branch: distances = [] for main_node in window: distances.append(utility.dist3D(node, main_node, scale=scale)) min_distance_from_mainbranch.append(min(distances)) #all_distances.append(min_distance_from_mainbranch) min_distance_from_mainbranch = min_distance_from_mainbranch[5:] if visualize: axes[0].plot(min_distance_from_mainbranch) all_distances.append(min_distance_from_mainbranch) n = 4 out = np.zeros(n).tolist() x = np.arange(n) for i in range(len(min_distance_from_mainbranch) - n): data = min_distance_from_mainbranch[i:i + n] try: slope, intercept, r_value, p_value, std_err = linregress( x, data) except ValueError: break if slope > 0.05: pass out.append(slope) if visualize: axes[1].plot(out, '.') #out.insert(0, np.zeros(n).tolist()) all_slopes.append(out) if visualize: plt.show() start_node_index = np.zeros(len(all_slopes)) for i in range(len(all_slopes)): if len(all_slopes[i]) == len(all_distances[i]): for j in range(len(all_slopes[i])): if all_slopes[i][j] > 0.02 or all_distances[i][j] > 0.5: start_node_index[i] = j break for i in range(len(start_node_index)): if start_node_index[i] == 0: clean_side_branches.append(side_branches[i]) else: new_side_branch = side_branches[i] new_side_branch = new_side_branch[int(start_node_index[i]):] window = windows[i] distances = np.zeros(len(window)) for i in range(len(window)): distances[i] = utility.dist3D(new_side_branch[0], window[i]) connection_node = window[distances.argmin()] new_side_branch[0][6] = connection_node[0] clean_side_branches.append(new_side_branch) #connect everything again and save clean .swc file full_clean_tree = [] for node in mainbranch: full_clean_tree.append(node) for node in soma_nodes: full_clean_tree.append(node) for clean_side_branch in clean_side_branches: for node in clean_side_branch: full_clean_tree.append(node) full_clean_tree = np.array(full_clean_tree) full_clean = utility.removeDoubleNodes(full_clean_tree) utility.saveSWC(outfilename, full_clean)
def classify(infilename='data/trees/plm.swc', outfilename_tree='data/trees/plm_classified.swc', outfilename_mainbranch='data/trees/mainbranch.swc', neurontype='PLM', length_threshold=3, scale=(0.223, 0.223, 0.3), debug=False): ''' ''' tree = utility.readSWC(infilename) tree = utility.removeDoubleNodes(tree) endpoints = utility.findEndpoints(tree) #For ALM neurons detect the soma_nodes i.e. all nodes connected to the root that are above a threshold if neurontype == 'ALM': soma_nodes = utility.findSomaNodes(tree, scale=scale).tolist() else: soma_nodes = [] #Trace from every endpoint to a root and save the corresponding branches, select the longest as mainbranch branches = [] lengths = np.zeros(len(endpoints)) for i in range(len(endpoints)): branch, length = traceBranch(endpoints[i], tree, soma_nodes=soma_nodes, scale=scale) branches.append(branch) lengths[i] = length mainbranch = branches[lengths.argmax()] mainbranch_length = lengths.max() - mainbranch[-1][5] * scale[ 0] #the last node is part of the soma and its radius gets subtracted from the final length #Trace from every endpoint to a node on the mainbranch to find sidebranches side_branches = [] side_lengths = np.zeros(len(endpoints)) for i in range(len(endpoints)): branch, length = traceBranch(endpoints[i], tree, main_nodes=mainbranch, soma_nodes=soma_nodes, scale=scale) side_branches.append(np.flip(branch, axis=0)) side_lengths[i] = length - branch[-1][5] * scale[0] #Concatenate side branches that end in the same final_node to side_trees final_nodes = [] for side_branch in side_branches: final_nodes.append(side_branch[-1].tolist()) final_nodes.sort() final_nodes = list(final_nodes for final_nodes, _ in itertools.groupby(final_nodes)) side_trees = [] for final_node in final_nodes: side_tree = [] for side_branch in side_branches: if side_branch[-1].tolist() == final_node: side_tree.append(side_branch) side_tree_noduplicates = [ node.tolist() for branch in side_tree for node in branch ] side_tree_noduplicates.sort() side_tree_noduplicates = list( node for node, _ in itertools.groupby(side_tree_noduplicates)) side_trees.append(side_tree_noduplicates) side_trees_array = [np.array(side_tree) for side_tree in side_trees] #Find PMV branch in PLM neurons if neurontype == 'PLM': pmv_nodes = [] for i in range(len(side_trees_array)): maximum = side_trees_array[i][:, 5].max() mean = side_trees_array[i][:, 5].mean() length = utility.calculateDistancesTree(side_trees_array[i], scale=scale) string = '{0:.3f} {1:.3f} {2:.3f}'.format( maximum, mean, length) if maximum > 5 and length > 20: pmv_node = utility.findRoots(side_trees_array[i], return_node=True)[0] try: pmv_nodes = utility.findWindow(pmv_node, mainbranch, window_size=7, scale=scale).tolist() except NameError: pass else: pmv_nodes = [] #classify the side_trees according to their final node (pmv_nodes -> pmv-branch, soma_nodes -> soma-outgrowth, main_nodes -> neurite-outgrowth) annotated_mainbranch = mainbranch.copy() annotated_mainbranch[:, 1] = 0 annotated_mainbranch[:, 5] = 1 main_nodes = mainbranch.tolist() side_category = np.zeros(len(side_trees_array)) last_nodes = [] tree_lengths = [] tree_classes = [] tree_mean_radii = [] tree_max_radii = [] tree_orders = [] side_trees_clean = [] for i in range(len(side_trees_array)): tree_length = utility.calculateDistancesTree(side_trees[i], scale=scale, return_sum=True) tree_mean_radius = side_trees_array[i][:, 5].mean() tree_max_radius = side_trees_array[i][:, 5].max() if tree_length > length_threshold: root = utility.findRoots(side_trees[i], return_node=True)[0].tolist() last_nodes.append(root) tree_lengths.append(tree_length) tree_mean_radii.append(tree_mean_radius) tree_max_radii.append(tree_max_radius) if root in pmv_nodes: side_trees_array[i][:, 1] = 6 side_category[i] = 6 tree_classes.append('PMV') elif root in soma_nodes: side_trees_array[i][:, 1] = 3 side_category[i] = 3 tree_classes.append('SomaOutgrowth') #elif root in main_nodes and side_trees_array[i][:,5].max()>3: #side_trees_array[i][:,1]=5 #side_category[i] = 5 elif root in main_nodes and side_trees_array[i][:, 5].mean( ) > 2 and tree_length < 5: side_trees_array[i][:, 1] = 5 side_category[i] = 5 tree_classes.append('Blob') elif root in main_nodes: side_trees_array[i][:, 1] = 2 side_category[i] = 2 tree_classes.append('NeuriteOutgrowth') annotated_mainbranch[np.argwhere( annotated_mainbranch[:, 0] == root[0])[0][0], 1] = 1 else: side_trees_array[i][:, 1] = 9 side_category[i] = 9 tree_classes.append('Unknown') side_trees_clean.append(side_trees_array[i]) #save the final classified tree full_classified_tree = [] mainbranch[:, 1] = 1 main_nodes = mainbranch.tolist() soma_nodes = np.array(soma_nodes) try: soma_nodes[:, 1] = 0 except IndexError: pass soma_nodes = soma_nodes.tolist() for node in main_nodes: full_classified_tree.append(node) for tree in side_trees_clean: for node in tree: full_classified_tree.append(node.tolist()) for node in soma_nodes: full_classified_tree.append(node) full_classified_tree_array = np.array(full_classified_tree) utility.saveSWC(outfilename_tree, full_classified_tree_array) utility.saveSWC(outfilename_mainbranch, mainbranch) tree_lengths.append(mainbranch_length) tree_classes.append('MainBranch') tree_mean_radii.append(mainbranch[:, 5].mean()) tree_max_radii.append(mainbranch[:, 5].max()) if debug: for i in range(len(tree_lengths)): print( 'Class: {0:16} Length: {1:>6.2f} Max_r: {2:>4.2f} Mean_r: {3:>4.2f}' .format(tree_classes[i], tree_lengths[i], tree_max_radii[i], tree_mean_radii[i])) return tree_lengths, tree_classes, tree_mean_radii, tree_max_radii, mainbranch, annotated_mainbranch
def wavyness(infilename_or_mainbranch='data/trees/annotated_mainbranch.swc', outfilename_tree='data/trees/wavytree.swc', outfilename_kinks='data/trees/kinks.swc', angle_threshold=145, window_size_linear_regression=4.0, window_size_maximum_supression=4.0, n_colors=10, scale=(0.223, 0.223, 0.3), fix_node=False, plot_cdf=False): if plot_cdf: fig, ax = plt.subplots(figsize=(8, 4)) ax.grid(True) ax.set_title('Cumulative distribution') ax.set_xlabel('Angle (deg)') ax.set_ylabel('Percentage') #ax.axis([50,cutoff_angle,0,1]) if isinstance(infilename_or_mainbranch, str): tree = utility.readSWC(infilename_or_mainbranch) elif isinstance(infilename_or_mainbranch, (list, np.ndarray)): tree = np.array(infilename_or_mainbranch) tree[:, 5] = 0.5 annotated_mainbranch = tree.copy() angles = np.zeros(len(tree)) for i in range(len(tree)): angles[i] = calculateAnglesWithLinearRegression( tree[i], tree, window_size=window_size_linear_regression, visualize=False, fixed_node=fix_node) angles = np.reshape(angles, (len(angles), 1)) angles[np.where(np.isnan(angles))] = 180 angles[: 20] = 180 # set the first and last 20 nodes to 180 as they generally don't correspond to real kinks angles[-20:] = 180 data = np.concatenate((tree, angles), axis=1) sample_numbers = data[:, 0] kinks_count = 0 kinks = [] while min(data[:, 7]) < angle_threshold: annotated_mainbranch[np.argwhere( annotated_mainbranch[:, 0] == data[data[:, 7].argmin()][0])[0][0], 5] = 3 kinks.append(data[data[:, 7].argmin()].tolist()) kinks_count += 1 w = findWindow(data[data[:, 7].argmin()][:7], tree, window_size=window_size_maximum_supression, scale=scale) indices = np.argwhere(np.isin(data[:, 0], w[:, 0])).reshape(len(w)) for index in indices: data[index, 7] = 180 kinks = np.array(kinks) try: kinks[:, 5] = 3 utility.saveSWC(outfilename_kinks, kinks) except IndexError: pass if plot_cdf: n, bins, patches = ax.hist(kinks[:, 7], bins=10000, normed=1, histtype='step', cumulative=True, label='neuronname') patches[0].set_xy(patches[0].get_xy()[:-1]) ax.legend(loc='center left') print(angles.min()) m = interpolate.interp1d([0, 180], [1, n_colors]) normalized_angles = np.round(m(angles)).reshape(len(angles)) tree[:, 1] = normalized_angles utility.saveSWC(outfilename_tree, tree) if plot_cdf: plt.show() return kinks_count, angle_threshold, annotated_mainbranch
files = os.listdir(n_infolder) # List all the files in `n_infolder` files = [ file for file in files if (file.endswith('.swc')) and file[0] != '#' ] # Only keep .swc files that are not "commented out" (starting with #). for file in tqdm(files): # Generate .tif file name and get metadata (like strain, age, etc...) from the name file_tif = file[:-4] + '.tif' string = file[:-4] dat = string.split('_') strain = dat[0] series = dat[2] age = dat[1] name = dat[3] # Load the .swc file raw_tree = utility.readSWC(n_infolder + file) # Run the cleanup function if cleanup_tree: clean_tree = cleanup.cleanup(raw_tree, neurontype=neurontype, scale=scale, visualize=False) else: clean_tree = raw_tree # Classify the tree and get length measurements of outgrowth events tree_lengths, tree_classes, tree_mean_radii, tree_max_radii, mainbranch, annotated_mainbranch, classified_tree = classify.classify( clean_tree, neurontype=neurontype, length_threshold=length_threshold,
soma_nodes = np.array(soma_nodes) try: soma_nodes[:,1] = 0 except IndexError: pass soma_nodes = soma_nodes.tolist() for node in main_nodes: full_classified_tree.append(node) for tree in side_trees_clean: for node in tree: full_classified_tree.append(node.tolist()) for node in soma_nodes: full_classified_tree.append(node) full_classified_tree_array = np.array(full_classified_tree) tree_lengths.append(mainbranch_length) tree_classes.append('MainBranch') tree_mean_radii.append(mainbranch[:,5].mean()) tree_max_radii.append(mainbranch[:,5].max()) if debug: for i in range(len(tree_lengths)): print('Class: {0:16} Length: {1:>6.2f} Max_r: {2:>4.2f} Mean_r: {3:>4.2f}'.format(tree_classes[i], tree_lengths[i], tree_max_radii[i], tree_mean_radii[i])) return tree_lengths, tree_classes, tree_mean_radii, tree_max_radii, mainbranch, annotated_mainbranch, full_classified_tree_array if __name__ == '__main__': tree = utility.readSWC(r'E:\debug_data\ALM\traces_manually\COL10_D21_S4_ALM08_gs0-7.swc') tree_lengths, tree_classes, tree_mean_radii, tree_max_radii, mainbranch, annotated_mainbranch, full_classified_tree_array = classify(tree, neurontype='ALM', debug=True) a = 3