def extract_lr_rat(imgtck, ratio=2.0): """ extract lr ratio fiber :param imgtck:input wholeBrain fiber :return: ArraySequence: extract fiber:the percentage of left and right hemispheres fiber points in [0.4, 2.5] """ L_temp_need = nibas.ArraySequence() L_temp_n = nibas.ArraySequence() if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): rat = len(imgtck.streamlines[i][:, 0][imgtck.streamlines[i][:, 0] <= 0]) / \ len(imgtck.streamlines[i][:, 0][imgtck.streamlines[i][:, 0] >= 0]) if rat < 1: rat = 1 / rat if rat < ratio: L_temp_need.append(imgtck.streamlines[i]) else: L_temp_n.append(imgtck.streamlines[i]) if isinstance(imgtck, nibas.ArraySequence): for i in range(len(imgtck)): rat = len(imgtck[i][:, 0][imgtck[i][:, 0] <= 0]) / \ len(imgtck[i][:, 0][imgtck[i][:, 0] >= 0]) if rat < 1: rat = 1 / rat if rat < ratio: L_temp_need.append(imgtck[i]) else: L_temp_n.append(imgtck[i]) return L_temp_need, L_temp_n
def extract_multi_node(imgtck): """ extract multi-nodes fiber :param imgtck: wholeBrain fiber :return: only node fiber and multi-nodes fiber """ L_temp_noly_node = nibas.ArraySequence() L_temp_multi_node = nibas.ArraySequence() if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): l = imgtck.streamlines[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) if len(np.argwhere(x_stemp_index < 0)) == 2 \ or len(np.argwhere(x_stemp_index == 0)) == 2: L_temp_noly_node.append(imgtck.streamlines[i]) else: L_temp_multi_node.append(imgtck.streamlines[i]) # count = 0 # for j in range(len(imgtck.streamlines[i]) - 1): # if imgtck.streamlines[i][j][0] * imgtck.streamlines[i][j + 1][0] < 0: # count += 1 # elif imgtck.streamlines[i][j][0] == 0: # count += 1 # if count == 1: # L_temp_noly_node.append(imgtck.streamlines[i]) # else: # L_temp_multi_node.append(imgtck.streamlines[i]) if isinstance(imgtck, nibas.ArraySequence): for i in range(len(imgtck)): l = imgtck[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) if len(np.argwhere(x_stemp_index < 0)) == 2 \ or len(np.argwhere(x_stemp_index == 0)) == 2: L_temp_noly_node.append(imgtck[i]) else: L_temp_multi_node.append(imgtck[i]) # count = 0 # for j in range(len(imgtck[i]) - 1): # if imgtck[i][j][0] * imgtck[i][j + 1][0] < 0: # count += 1 # elif imgtck[i][j][0] == 0: # count += 1 # if count == 1: # L_temp_noly_node.append(imgtck[i]) # else: # L_temp_multi_node.append(imgtck[i]) return L_temp_noly_node, L_temp_multi_node
def separation_fib_to_hemi(self, data=None): """Separating a bundle fiber to both hemispheres""" if data is None: streamlines = self._data else: streamlines = data streamlines = self.sort_streamlines(streamlines) fib_lh = nibas.ArraySequence() fib_rh = nibas.ArraySequence() for i in range(len(streamlines)): l = streamlines[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) if len(index0) != 0: index_term = np.argmin( (abs(streamlines[i][index0[0][0]][0]), abs(streamlines[i][index0[0][0] + 1][0]))) index = index0[0][0] + index_term fib_lh.append(streamlines[i][:index + 1]) fib_rh.append(streamlines[i][index:]) else: if streamlines[i][0][0] <= 0: fib_lh.append(streamlines[i]) else: fib_rh.append(streamlines[i]) return fib_lh, fib_rh
def extract_cc_step(imgtck): ''' extract cc fiber :param streamlines:input wholeBrain fiber :return: ArraySequence: extract cc fiber ''' L_temp_need = nibAS.ArraySequence() L_temp_n = nibAS.ArraySequence() if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): if imgtck.streamlines[i][0][0] * imgtck.streamlines[i][-1][0] < 0: for j in range(len(imgtck.streamlines[i]) - 1): if imgtck.streamlines[i][j][0] * imgtck.streamlines[i][ j + 1][0] < 0: if (j - 20) in range(len(imgtck.streamlines[i])) \ and (j + 20) in range(len(imgtck.streamlines[i])) \ and imgtck.streamlines[i][j - 20][0] * imgtck.streamlines[i][j + 20][0] < 0: L_temp_need.append(imgtck.streamlines[i]) else: L_temp_n.append(imgtck.streamlines[i]) elif imgtck.streamlines[i][j][0] == 0: if (j - 20) in range(len(imgtck.streamlines[i])) \ and (j + 20) in range(len(imgtck.streamlines[i])) \ and imgtck.streamlines[i][j - 20][0] * imgtck.streamlines[i][j + 20][0] < 0: L_temp_need.append(imgtck.streamlines[i]) else: L_temp_n.append(imgtck.streamlines[i]) if isinstance(imgtck, nibAS.ArraySequence): for i in range(len(imgtck)): if imgtck[i][0][0] * imgtck[i][-1][0] < 0: for j in range(len(imgtck[i]) - 1): if imgtck[i][j][0] * imgtck[i][j + 1][0] < 0: if (j - 20) in range(len(imgtck[i])) \ and (j + 20) in range(len(imgtck[i])) \ and imgtck[i][j - 20][0] * imgtck[i][j + 20][0] < 0: L_temp_need.append(imgtck[i]) else: L_temp_n.append(imgtck[i]) elif imgtck[i][j][0] == 0: if (j - 20) in range(len(imgtck[i])) \ and (j + 20) in range(len(imgtck[i])) \ and imgtck[i][j - 20][0] * imgtck[i][j + 20][0] < 0: L_temp_need.append(imgtck[i]) else: L_temp_n.append(imgtck[i]) return L_temp_need, L_temp_n
def hemi_fib_separation(self, data=None): """Separation of streamlines that have different hemispheres as seeds.""" if data is not None: fasciculus_data = data else: fasciculus_data = self._data fib_lh = nibas.ArraySequence() fib_rh = nibas.ArraySequence() for fib in fasciculus_data: if fib[0][0] < 0: fib_lh.append(fib) elif fib[0][0] > 0: fib_rh.append(fib) return fib_lh, fib_rh
def bundle_centroids(self, streamlines=None, cluster_thre=10, dist_thre=10.0, pts=12): """ QuickBundles-based segmentation Parameters ---------- streamlines: streamline data cluster_thre: remove small cluster dist_thre: clustering threshold (distance mm) pts: each streamlines are divided into sections Return ------ centroids: cluster's centroids """ if streamlines is None: streamlines = self._fasciculus.get_data() else: streamlines = streamlines bundles = QuickBundles(streamlines, dist_thre, pts) bundles.remove_small_clusters(cluster_thre) centroids = bundles.centroids return nibas.ArraySequence(centroids)
def endpoints_seg(self, streamlines=None, temp_clusters=None, thre=2.0, mode='lh'): """ Endpoints-based clustering fibers Parameters ---------- streamlines: streamline data temp_clusters: the number of k-means iterations (the first step to use k-means when data set is too big) thre: hierarchical/agglomerative clustering threshold (distance mm) mode:'lh','rh','lh-rh'(left endpoints, right endpoints or left right endpoints) Return ------ labels: label of each streamline """ if streamlines is None: streamlines = self._fasciculus.get_data() else: streamlines = streamlines if temp_clusters is None: temp_clusters = len(streamlines) streamlines = self._fasciculus.sort_streamlines(streamlines) endpoints_l = nibas.ArraySequence([fib[0] for fib in streamlines]) endpoints_r = nibas.ArraySequence([fib[-1] for fib in streamlines]) if mode == 'lh': nc = NodeClustering(endpoints_l) labels = nc.hiera_single_clust(temp_clusters=temp_clusters, t=thre) elif mode == 'rh': nc = NodeClustering(endpoints_r) labels = nc.hiera_single_clust(temp_clusters=temp_clusters, t=thre) elif mode == 'lh-rh': endpoints_l_r = nibas.ArraySequence( np.hstack((endpoints_l, endpoints_r))) nc = NodeClustering(endpoints_l_r) labels = nc.hiera_single_clust(temp_clusters=temp_clusters, t=thre) else: raise ValueError("Without this mode!") return labels
def _sort_streamlines(fasciculus_data): """Store order of streamline is from left to right.""" fasciculus_data_sort = nibas.ArraySequence() for i in range(len(fasciculus_data)): if fasciculus_data[i][0][0] < 0: fasciculus_data_sort.append(fasciculus_data[i]) else: fasciculus_data_sort.append(fasciculus_data[i][::-1]) return fasciculus_data_sort
def extract_up_z(img_cc): L_temp = nibAS.ArraySequence() for i in range(len(img_cc.streamlines)): l_x = [] for j in range(len(img_cc.streamlines[i])): l_x.append(np.abs(img_cc.streamlines[i][j][0])) x_min_index = np.argmin(l_x) if img_cc.streamlines[i][x_min_index][2] > -10: # -2<x<2 & z>-10 L_temp.append(img_cc.streamlines[i]) return L_temp
def length_seg(self): """ Length-based segmentation Return ------ labels: label of each streamline length_clusters: cluster data """ length_clusters = nibas.ArraySequence() labels = np.array(len(self._fasciculus.get_data()) * [None]) length_seg_temp = self._length_seg(20, 50) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 1 length_seg_temp = self._length_seg(50, 65) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 2 length_seg_temp = self._length_seg(65, 80) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 3 length_seg_temp = self._length_seg(80, 95) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 4 length_seg_temp = self._length_seg(95, 110) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 5 length_seg_temp = self._length_seg(110, 130) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 6 length_seg_temp = self._length_seg(130, 150) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 7 length_seg_temp = self._length_seg(150, 175) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 8 length_seg_temp = self._length_seg(175, 200) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 9 length_seg_temp = self._length_seg(200, 250) length_clusters.append(length_seg_temp[1]) labels[length_seg_temp[0]] = 10 return labels, length_clusters
def fib_merge(self, data1, data2): """self._data merge with data""" fib = nibas.ArraySequence() for i in range(len(data1)): fib.append(data1[i]) for j in range(len(data2)): flag = np.array([np.array(f == data2[j]).all() for f in data1]).any() if flag: continue fib.append(data2[j]) return fib
def extract_multi_node(imgtck): ''' extract multi-nodes fiber :param imgtck: wholeBrain fiber :return: only node fiber and multi-nodes fiber ''' L_temp_noly_node = nibAS.ArraySequence() L_temp_multi_node = nibAS.ArraySequence() if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): count = 0 if imgtck.streamlines[i][0][0] * imgtck.streamlines[i][-1][0] < 0: for j in range(len(imgtck.streamlines[i]) - 1): if imgtck.streamlines[i][j][0] * imgtck.streamlines[i][ j + 1][0] < 0: count += 1 elif imgtck.streamlines[i][j][0] == 0: count += 1 if count == 1: L_temp_noly_node.append(imgtck.streamlines[i]) else: L_temp_multi_node.append(imgtck.streamlines[i]) if isinstance(imgtck, nibAS.ArraySequence): for i in range(len(imgtck)): count = 0 if imgtck[i][0][0] * imgtck[i][-1][0] < 0: for j in range(len(imgtck[i]) - 1): if imgtck[i][j][0] * imgtck[i][j + 1][0] < 0: count += 1 elif imgtck[i][j][0] == 0: count += 1 if count == 1: L_temp_noly_node.append(imgtck[i]) else: L_temp_multi_node.append(imgtck[i]) return L_temp_noly_node, L_temp_multi_node
def sort_streamlines(self, data=None): """Store order of streamline is from left to right.""" if data is not None: fasciculus_data = data else: fasciculus_data = self._data fasciculus_data_sort = nibas.ArraySequence() for i in range(len(fasciculus_data)): if fasciculus_data[i][0][0] < 0: fasciculus_data_sort.append(fasciculus_data[i]) elif fasciculus_data[i][0][0] > 0 \ and fasciculus_data[i][-1][0] < 0: fasciculus_data_sort.append(fasciculus_data[i][::-1]) else: fasciculus_data_sort.append(fasciculus_data[i]) return fasciculus_data_sort
def extract_up_z(img_cc, z_value=-10): """ extract z > z_value fiber :param img_cc: input fiber :param z_value: z thresh :return: z > z_value fiber """ L_temp = nibas.ArraySequence() for i in range(len(img_cc.streamlines)): l_x = [] for j in range(len(img_cc.streamlines[i])): l_x.append(np.abs(img_cc.streamlines[i][j][0])) x_min_index = np.argmin(l_x) if img_cc.streamlines[i][x_min_index][2] > z_value: # -2<x<2 & z>-10 L_temp.append(img_cc.streamlines[i]) return L_temp
def xmin_nodes(self, data=None): """Extract node that has the minimum |x|.""" if data is not None: self._data = data xmin_nodes = nibas.ArraySequence() for i in range(len(self._data)): l = self._data[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin((abs(self._data[i][index0[0][0]][0]), abs(self._data[i][index0[0][0] + 1][0]))) index = index0[0][0] + index_term xmin_nodes.append(self._data[i][index]) return xmin_nodes
def extract_cc(imgtck): ''' extract cc fiber :param streamlines:input wholeBrain fiber :return: ArraySequence: extract cc fiber ''' L_temp = nibAS.ArraySequence() if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): if imgtck.streamlines[i][0][0] * imgtck.streamlines[i][-1][0] < 0: L_temp.append(imgtck.streamlines[i]) if isinstance(imgtck, nibAS.ArraySequence): for i in range(len(imgtck)): if imgtck[i][0][0] * imgtck[i][-1][0] < 0: L_temp.append(imgtck[i]) return L_temp
def extract_endpoint_dissimilar(imgtck): """ extract endpoint dissimilar fiber :param imgtck:input wholeBrain fiber :return: ArraySequence: extract endpoint dissimilar fiber """ L_temp = nibas.ArraySequence() if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): if imgtck.streamlines[i][0][0] * imgtck.streamlines[i][-1][0] < 0: L_temp.append(imgtck.streamlines[i]) if isinstance(imgtck, nibas.ArraySequence): for i in range(len(imgtck)): if imgtck[i][0][0] * imgtck[i][-1][0] < 0: L_temp.append(imgtck[i]) return L_temp
def _sort_streamlines(fasciculus_data): """ Store order of streamline is from left to right. Parameters ---------- fasciculus_data: streamlines data Return ------ sorted streamlines """ fasciculus_data_sort = nibas.ArraySequence() for i in range(len(fasciculus_data)): if fasciculus_data[i][0][0] < 0: fasciculus_data_sort.append(fasciculus_data[i]) elif fasciculus_data[i][0][0] > 0 \ and fasciculus_data[i][-1][0] <0: fasciculus_data_sort.append(fasciculus_data[i][::-1]) else: fasciculus_data_sort.append(fasciculus_data[i]) return fasciculus_data_sort
def muti_bundle_registration(paths_file, pts=12): """ muti-bundle registration and consolidation Parameters ---------- paths_file: list; muti_bundle file path pts: each streamline is divided into sections Return ------ new header: include id of each streamline that comes from different subjects registration and consolidation bundle """ fas = Fasciculus(paths_file[0]) # print fas.get_header() bundle_header = {'fasciculus_id': None} sub1 = fas.get_data() bundle_header['fasciculus_id'] = len(sub1) * [ int(paths_file[0].split('/')[9]) ] sub2 = Fasciculus(paths_file[1]).get_data() subj2_aligned = bundle_registration(sub1, sub2, pts=pts) bundle = fas.fib_merge(sub1, subj2_aligned) bundle_header['fasciculus_id'] += ( len(bundle) - len(sub1)) * [int(paths_file[1].split('/')[9])] # print bundle_header # print len(bundle) for index in range(len(paths_file))[2:]: # print paths_file[index] sub = Fasciculus(paths_file[index]).get_data() sub_aligned = bundle_registration(sub1, sub, pts=pts) lenth = len(bundle) bundle = fas.fib_merge(bundle, sub_aligned) bundle_header['fasciculus_id'] += ( len(bundle) - lenth) * [int(paths_file[index].split('/')[9])] fas.update_header(bundle_header) fas.set_data(nibas.ArraySequence(bundle)) return fas
def hemisphere_cc(self, streamlines=None, hemi='lh'): """ Select a particular hemisphere streamlines to display Parameters ---------- streamlines: streamline data hemi:'lh','rh','both' Return ------ hemi_fib: particular hemisphere streamlines """ if streamlines is None: streamlines = self._fasciculus.get_data() else: streamlines = streamlines streamlines = self._fasciculus.sort_streamlines(streamlines) hemi_fib = nibas.ArraySequence() for i in range(len(streamlines)): l = streamlines[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin((abs(streamlines[i][index0[0][0]][0]), abs(streamlines[i][index0[0][0] + 1][0]))) index = index0[0][0] + index_term if hemi == 'lh': hemi_fib.append(streamlines[i][:index + 1]) elif hemi == 'rh': hemi_fib.append(streamlines[i][index:]) elif hemi == 'both': hemi_fib.append(streamlines[i]) else: raise ValueError("Without this mode!") return hemi_fib
def xmin_extract(streams): """ extract node according to x_min :param streams: streamlines img :return: extracted node """ Ls_temp = nibas.ArraySequence() if isinstance(streams, nibtck.TckFile): for i in range(len(streams.streamlines)): l = streams.streamlines[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin( (abs(streams.streamlines[i][index0[0]][0]), abs(streams.streamlines[i][index0[0] + 1][0]))) index = index0[0] + index_term Ls_temp.append(streams.streamlines[i][index][0]) if isinstance(streams, nibas.ArraySequence): for i in range(len(streams)): l = streams[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin((abs(streams[i][index0[0]][0]), abs(streams[i][index0[0] + 1][0]))) index = index0[0] + index_term Ls_temp.append(streams[i][index][0]) return Ls_temp
def fib_ac_oc(self): """ Extract anterior commissure and optic chiasma fiber The function is implemented after function step/lr/gradient """ fasciculus_data = self._fasciculus.get_data() labels = self._fasciculus.get_labes() xmin = self._fasciculus.xmin_nodes() node_clusters = [] fib_clusters = [] for label in set(labels): index_i = np.argwhere(labels == label) node_clusters.append(xmin[index_i]) fib_clusters.append(fasciculus_data[index_i]) clusters_z_mean = [n_c[:, 2].mean() for n_c in node_clusters] clusters_y_mean = [n_c[:, 1].mean() for n_c in node_clusters] index_z = np.array(clusters_z_mean) < 1.50 index_y = np.array(clusters_y_mean) < -10.50 remain_clusters = [] remain_fib_clusters = [] for i in range(len(node_clusters)): if not index_z[i] or not index_y[i]: remain_clusters.append(node_clusters[i]) remain_fib_clusters.append(fib_clusters[i]) clusters_z_max = [k[:, 2].max() for k in remain_clusters] node = np.array(zip(range(len(clusters_z_max)), clusters_z_max)) node_sort = node[np.lexsort(node.T)] node_total = [] other_node = [] for d in node_sort[2:-1]: for n in remain_clusters[int(d[0])]: other_node.append(list(n)) node_total.append(remain_clusters[int(node_sort[0][0])]) node_total.append(remain_clusters[int(node_sort[1][0])]) if len(other_node) == 0: pass else: node_total.append(np.array(other_node)) node_total.append(remain_clusters[int(node_sort[-1][0])]) fib_total = [] other = nibas.ArraySequence() for d in node_sort[2:-1]: for f in remain_fib_clusters[int(d[0])]: other.append(f) fib_total.append(remain_fib_clusters[int(node_sort[0][0])]) fib_total.append(remain_fib_clusters[int(node_sort[1][0])]) if len(other) == 0: pass else: fib_total.append(other) fib_total.append(remain_fib_clusters[int(node_sort[-1][0])]) # return [oc ac other cc] or [oc ac cc] return node_total, fib_total
from pyfat.algorithm.hierarchical_clustering import hierarchical_clust import nibabel.streamlines.array_sequence as nibAS from sklearn.neighbors import kneighbors_graph input_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \ 'response_dhollander/100206/result/CC_fib.tck' img_cc = load_tck(input_path) Ls_temp = xmin_extract(img_cc) # connectivity = kneighbors_graph(Ls_temp, n_neighbors=10, mode='connectivity', include_self=True) # connectivity = kneighbors_graph(Ls_temp, n_neighbors=10, include_self=False) labels = hierarchical_clust(Ls_temp, 4, linkage='complete') print len(labels) d = zip(labels, Ls_temp) L_temp_0 = nibAS.ArraySequence() L_temp_1 = nibAS.ArraySequence() L_temp_2 = nibAS.ArraySequence() L_temp_3 = nibAS.ArraySequence() for k in range(len(d)): if d[k][0] == 0: L_temp_0.append(img_cc.streamlines[k]) if d[k][0] == 1: L_temp_1.append(img_cc.streamlines[k]) if d[k][0] == 2: L_temp_2.append(img_cc.streamlines[k]) if d[k][0] == 3: L_temp_3.append(img_cc.streamlines[k]) out_put = '/home/brain/workingdir/data/dwi/hcp/preprocessed/response_dhollander/100206/result/CC_fib_only3_0.tck'
def extract_lr_step(imgtck, n=20): """ extract lr n steps fiber :param imgtck:input wholeBrain fiber :param n:number of steps :return: ArraySequence: extract lr n steps fiber """ L_temp_need = nibas.ArraySequence() L_temp_n = nibas.ArraySequence() if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): l = imgtck.streamlines[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin( (abs(imgtck.streamlines[i][index0[0]][0]), abs(imgtck.streamlines[i][index0[0] + 1][0]))) index = index0[0] + index_term if index - n in range(len(l)) \ and index + n in range(len(l)): L_temp_need.append(imgtck.streamlines[i]) else: L_temp_n.append(imgtck.streamlines[i]) # # index = np.argmin(abs(imgtck.streamlines[i][:, 0])) # for j in range(len(imgtck.streamlines[i]) - 1): # if imgtck.streamlines[i][j][0] * imgtck.streamlines[i][j+1][0] <= 0: # if (j - n) in range(len(imgtck.streamlines[i])) \ # and (j + n) in range(len(imgtck.streamlines[i])): # L_temp_need.append(imgtck.streamlines[i]) # else: # L_temp_n.append(imgtck.streamlines[i]) if isinstance(imgtck, nibas.ArraySequence): for i in range(len(imgtck)): l = imgtck[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin((abs(imgtck[i][index0[0]][0]), abs(imgtck[i][index0[0] + 1][0]))) index = index0[0] + index_term if index - n in range(len(l)) \ and index + n in range(len(l)): L_temp_need.append(imgtck[i]) else: L_temp_n.append(imgtck[i]) # index = np.argmin(abs(imgtck[i][:, 0])) # if (index - n) in range(len(imgtck[i])) \ # and (index + n) in range(len(imgtck[i])): # L_temp_need.append(imgtck[i]) # else: # L_temp_n.append(imgtck[i]) return L_temp_need, L_temp_n
print node_total[2] fig, ax = plt.subplots() slice = img.get_data()[img.shape[0] / 2, :, :] ax.imshow(slice.T, cmap='gray', origin='lower') color = plt.cm.spectral(np.linspace(0, 1, len(node_total))) for index in range(len(node_total)): ax.plot(node_total[index][:, 1], node_total[index][:, 2], 'o', color=color[index]) # plt.show() fib_total = [] other = nibas.ArraySequence() for d in node_sort[2:-1]: for f in remain_fib_clusters[int(d[0])]: other.append(f) fib_total.append(remain_fib_clusters[int(node_sort[0][0])]) fib_total.append(remain_fib_clusters[int(node_sort[1][0])]) if len(other) == 0: pass else: fib_total.append(other) fib_total.append(remain_fib_clusters[int(node_sort[-1][0])]) out_path = '/home/brain/workingdir/data/dwi/hcp/' \ 'preprocessed/response_dhollander/100408/result/' \ 'result20vs45/cc_20fib_lr1.5_01_new_hierarchical_single_%s.tck'
def extract_xyz_gradient(imgtck, n=None): """ extract fiber :param imgtck:input wholeBrain fiber :param n:lr numbers :return: ALS: extract AP LR SI orientation fiber """ AP = nibas.ArraySequence() LR = nibas.ArraySequence() SI = nibas.ArraySequence() ALS = [AP, LR, SI] if n is None: if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): grad = np.gradient(imgtck.streamlines[i]) x_grad = grad[0][:, 0].sum() y_grad = grad[0][:, 1].sum() z_grad = grad[0][:, 2].sum() index = np.array([y_grad, x_grad, z_grad]).argmax() ALS[index].append(imgtck.streamlines[i]) if isinstance(imgtck, nibas.ArraySequence): for i in range(len(imgtck)): grad = np.gradient(imgtck[i]) x_grad = grad[0][:, 0].sum() y_grad = grad[0][:, 1].sum() z_grad = grad[0][:, 2].sum() index = np.array([y_grad, x_grad, z_grad]).argmax() ALS[index].append(imgtck[i]) else: if isinstance(imgtck, nibtck.TckFile): for i in range(len(imgtck.streamlines)): l = imgtck.streamlines[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin( (abs(imgtck.streamlines[i][index0[0]][0]), abs(imgtck.streamlines[i][index0[0] + 1][0]))) index = index0[0] + index_term if (index - n) in range(len(l)) \ and (index + n) in range(len(l)): grad = np.gradient(imgtck.streamlines[i][index - n:index + n, :]) x_grad = grad[0][:, 0].sum() y_grad = grad[0][:, 1].sum() z_grad = grad[0][:, 2].sum() index = np.array([y_grad, x_grad, z_grad]).argmax() ALS[index].append(imgtck.streamlines[i]) if isinstance(imgtck, nibas.ArraySequence): for i in range(len(imgtck)): l = imgtck[i][:, 0] l_ahead = list(l[:]) a = l_ahead.pop(0) l_ahead.append(a) x_stemp = np.array([l, l_ahead]) x_stemp_index = x_stemp.prod(axis=0) index0 = np.argwhere(x_stemp_index <= 0) index_term = np.argmin( (abs(imgtck[i][index0[0]][0]), abs(imgtck.streamlines[i][index0[0] + 1][0]))) index = index0[0] + index_term if (index - n) in range(len(l)) \ and (index + n) in range(len(l)): grad = np.gradient(imgtck[i]) x_grad = grad[0][:, 0].sum() y_grad = grad[0][:, 1].sum() z_grad = grad[0][:, 2].sum() index = np.array([y_grad, x_grad, z_grad]).argmax() ALS[index].append(imgtck[i]) return ALS