def spherical_rois(fdpy,fsr,sq_radius=4): R=atlantic_points() dpr=Dpy(fdpy,'r') T=dpr.read_tracks() dpr.close() center=R['BCC'] refimg=nib.load(fref) aff=refimg.get_affine() SR={} for key in R: center=R[key] #back to world space centerw=np.dot(aff,np.array(center+(1,)))[:3] centerw.shape=(1,)+centerw.shape centerw=centerw.astype(np.float32) res= [track_roi_intersection_check(t,centerw,sq_radius) for t in T] res= np.array(res,dtype=np.int) ind=np.where(res>0)[0] SR[key]={} SR[key]['center']=center SR[key]['centerw']=tuple(np.squeeze(centerw)) SR[key]['radiusw']=np.sqrt(sq_radius) SR[key]['indices']=ind save_pickle(fsr,SR)
def warp_tracks_linearly(flirt_filename,fa_filename, tracks_filename,linear_filename): import nibabel as nib from dipy.external.fsl import flirt2aff fsl_ref = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz' img_fa = nib.load(fa_filename) flirt_affine= np.loadtxt(flirt_filename) img_ref =nib.load(fsl_ref) #create affine matrix from flirt mat=flirt2aff(flirt_affine,img_fa,img_ref) #read tracks tensor_tracks = load_whole_tract(tracks_filename) #linear tranform for tractography tracks_warped_linear = transform_tracks(tensor_tracks,mat) #save tracks_warped_linear dpr_linear = Dpy(linear_filename, 'w') dpr_linear.write_tracks(tracks_warped_linear) dpr_linear.close()
def skeletonize(fdpy,flsc,points=3): dpr=Dpy(fdpy,'r') T=dpr.read_tracks() dpr.close() print len(T) Td=[downsample(t,points) for t in T] C=local_skeleton_clustering(Td,d_thr=10.,points=points) #Tobject=np.array(T,dtype=np.object) #''' #r=fvtk.ren() skeleton=[] for c in C: #color=np.random.rand(3) if C[c]['N']>0: Ttmp=[] for i in C[c]['indices']: Ttmp.append(T[i]) si,s=most_similar_track_mam(Ttmp,'avg') print si,C[c]['N'] C[c]['most']=Ttmp[si] #fvtk.add(r,fvtk.line(Ttmp[si],color)) print len(skeleton) #r=fos.ren() #fos.add(r,fos.line(skeleton,color)) #fos.add(r,fos.line(T,fos.red)) #fvtk.show(r) #''' save_pickle(flsc,C)
def tracking_prob(dir_src, dir_out, verbose=False): wm_name = 'wm_mask_' + par_b_tag + '_' + par_dim_tag + '.nii.gz' wm_mask, affine = load_nifti(pjoin(dir_src, wm_name), verbose) sh_name = 'sh_' + par_b_tag + '_' + par_dim_tag + '.nii.gz' sh, _ = load_nifti(pjoin(dir_src, sh_name), verbose) sphere = get_sphere('symmetric724') classifier = ThresholdTissueClassifier(wm_mask.astype('f8'), .5) classifier = BinaryTissueClassifier(wm_mask) max_dg = ProbabilisticDirectionGetter.from_shcoeff(sh, max_angle=par_trk_max_angle, sphere=sphere) seeds = utils.seeds_from_mask(wm_mask, density=2, affine=affine) streamlines = LocalTracking(max_dg, classifier, seeds, affine, step_size=par_trk_step_size) streamlines = list(streamlines) trk_name = 'tractogram_' + par_b_tag + '_' + par_dim_tag + '_' + par_trk_prob_tag + '.trk' trk_out = os.path.join(dir_out, trk_name) save_trk(trk_out, streamlines, affine, wm_mask.shape) dpy_out = trk_out.replace('.trk', '.dpy') dpy = Dpy(dpy_out, 'w') dpy.write_tracks(streamlines) dpy.close()
def tracking_eudx(dir_src, dir_out, verbose=False): # Loading FA and evecs data fa_name = 'data_' + par_b_tag + '_' + par_dim_tag + '_FA.nii.gz' FA, affine = load_nifti(pjoin(dir_src, fa_name), verbose) evecs_name = 'data_' + par_b_tag + '_' + par_dim_tag + '_EV.nii.gz' evecs, _ = load_nifti(pjoin(dir_src, evecs_name), verbose) # Computation of streamlines sphere = get_sphere('symmetric724') peak_indices = quantize_evecs(evecs, sphere.vertices) streamlines = EuDX(FA.astype('f8'), ind=peak_indices, seeds=par_eudx_seeds, odf_vertices= sphere.vertices, a_low=par_eudx_threshold) # Saving tractography voxel_size = (par_dim_vox,) * 3 dims = FA.shape[:3] hdr = nib.trackvis.empty_header() hdr['voxel_size'] = voxel_size hdr['voxel_order'] = 'LAS' hdr['dim'] = dims hdr['vox_to_ras'] = affine strm = ((sl, None, None) for sl in streamlines) trk_name = 'tractogram_' + par_b_tag + '_' + par_dim_tag + '_' + par_rec_tag + '_' + par_eudx_tag + '.trk' trk_out = os.path.join(dir_out, trk_name) nib.trackvis.write(trk_out, strm, hdr, points_space='voxel') dpy_out = trk_out.replace('.trk', '.dpy') dpy = Dpy(dpy_out, 'w') dpy.write_tracks(streamlines) dpy.close()
def test_picking_trajectories(): curves=[100*np.random.rand(10,3),100*np.random.rand(5,3),100*np.random.rand(3,3)] curves=[100*np.array([[0,0,0],[1,0,0]]), 100*np.array([[0,1,0],[0,1,3]]),100*np.array([[0,2,0],[0,2,3]])] ''' from nibabel import trackvis as tv #fname='/home/eg309/Data/PROC_MR10032/subj_01/101/1312211075232351192010091419011391228126452ep2dadvdiffDSI25x25x25b4000s003a001_FA_warp.trk' fname='/home/eg309/Data/fibers.trk' streams,hdr=tv.read(fname) T=[s[0] for s in streams] curves=T[:200000] ''' fname='/home/eg309/Data/PROC_MR10032/subj_02/101/1312211075232351192010091708112071055601107ep2dadvdiffDSI10125x25x25STs002a001_QA_native.dpy' from dipy.io.dpy import Dpy dpr=Dpy(fname,'r') T=dpr.read_indexed(range(20000)) from dipy.core.track_metrics import length curves=[t for t in T if length(t) > 20] dpr.close() #colors=np.random.rand(len(curves),4).astype('f4') colors=0.5*np.ones((len(curves),4)).astype('f4') for (i,c) in enumerate(curves): orient=c[0]-c[-1] orient=np.abs(orient/np.linalg.norm(orient)) colors[i,:3]=orient c=InteractiveCurves(curves,colors=colors) w=World() w.add(c) wi=Window() wi.attach(w)
def warp_tracks(input_tracks_filename, input_flirt_fmatrix, input_fa_filename, output_filename = None, input_ref = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz'): print 'Loading fa, flirt matrix ...' img_fa = nib.load(input_fa_filename) flirt_affine= np.loadtxt(input_flirt_fmatrix) img_ref =nib.load(input_ref) #create affine matrix from flirt mat=flirt2aff(flirt_affine,img_fa,img_ref) #read tracks print 'Loading tracks ...' tensor_tracks = load_tracks(input_tracks_filename) #linear tranform for tractography tracks_warped_linear = transform_tracks(tensor_tracks,mat) if output_filename == None: filename_save = input_tracks_filename.split('.')[0]+'_linear.dpy' else: filename_save = os.path.abspath(output_filename) #save tracks_warped_linear print 'Saving warped tracks into :', filename_save dpr_linear = Dpy(filename_save, 'w') dpr_linear.write_tracks(tracks_warped_linear) dpr_linear.close() return filename_save
def loading_full_tractograpy(self, tracpath=None): """ Loading full tractography and creates StreamlineLabeler to show it all. """ # load the tracks registered in MNI space self.tracpath=tracpath basename = os.path.basename(self.tracpath) tracks_basename, tracks_format = os.path.splitext(basename) if tracks_format == '.dpy': dpr = Dpy(self.tracpath, 'r') print "Loading", self.tracpath self.T = dpr.read_tracks() dpr.close() self.T = np.array(self.T, dtype=np.object) elif tracks_format == '.trk': print "Loading", self.tracpath # Old nibabel API: # streams, self.hdr = nib.trackvis.read(self.tracpath, points_space='voxel') # self.T = np.array([s[0] for s in streams], dtype=np.object) # New nibabel API tmp = nib.streamlines.load(self.tracpath) streams = tmp.tractogram.apply_affine(np.linalg.inv(tmp.affine)).streamlines self.header = tmp.header self.T = np.array(streams, dtype=np.object) # The following code has been commented out to avoid # misalignment between original streamlines IDs and final IDs. # print "Removing short streamlines" # self.T = np.array([t for t in self.T if length(t)>= 15], dtype=np.object) tracks_directoryname = os.path.dirname(self.tracpath) + '/.temp/' general_info_filename = tracks_directoryname + tracks_basename + '.spa' # Check if there is the .spa file that contains all the # computed information from the tractography anyway and try to # load it try: print "Looking for general information file" self.load_info(general_info_filename) except (IOError, KeyError): print "General information not found, recomputing buffers" self.update_info(general_info_filename) # create the interaction system for tracks, self.streamlab = StreamlineLabeler('Bundle Picker', self.buffers, self.clusters, vol_shape=self.dims, affine=np.copy(self.affine), clustering_parameter=len(self.clusters), clustering_parameter_max=len(self.clusters), full_dissimilarity_matrix=self.full_dissimilarity_matrix) self.scene.add_actor(self.streamlab)
def load_tractogram(filename, lazy_load=False): """ Loads tractogram files (*.trk or *.tck or *.dpy) Parameters ---------- filename : str input trk filename lazy_load : {False, True}, optional If True, load streamlines in a lazy manner i.e. they will not be kept in memory and only be loaded when needed. Otherwise, load all streamlines in memory. Returns ------- streamlines : list of 2D arrays Each 2D array represents a sequence of 3D points (points, 3). hdr : dict header from a trk file """ if 'dpy' in os.path.splitext(filename)[1].lower(): dpw = Dpy(filename, 'r') streamlines = dpw.read_tracks() dpw.close() return streamlines, {} trk_file = nib.streamlines.load(filename, lazy_load) return trk_file.streamlines, trk_file.header
def write_tracks(fdpy,scalar,indices,seed_no=10**6,a_thr=.2,compression=1): eudx=EuDX(scalar,indices,seed_no=seed_no,a_low=a_thr) #exi=iter(eudx) dpw=Dpy(fdpy,'w',compression=1) #for (i,track) in enumerate(exi): for track in eudx: dpw.write_track(track.astype(np.float32)) dpw.close()
def save_dpy(streamlines, filename): ''' Save tractography to a .dpy file''' print "Save tracks as .dpy" tracks = [track for track in streamlines] dpw = Dpy(filename, 'w') dpw.write_tracks(tracks) dpw.close()
def load_cst(tracks_filename, cst_index_file, ext): from dipy.io.dpy import Dpy from dipy.io.pickles import load_pickle dpr_tracks = Dpy(tracks_filename, 'r') all_tracks=dpr_tracks.read_tracks() dpr_tracks.close() tracks_id = load_pickle(cst_index_file) cst = [all_tracks[i] for i in tracks_id] cst_ext = [all_tracks[i] for i in tracks_id] medoid_cst = [] #len_dis = 250 if ext: k = np.round(len(cst)*1.2) not_cst_fil = [] min_len = min(len(i) for i in cst) #print 'min_len of cst', min_len min_len = min_len*2.2/3#2./3.2# - 20 for i in np.arange(len(all_tracks)): if (i not in tracks_id) and (length(all_tracks[i]) > min_len): not_cst_fil.append(all_tracks[i]) #for st in all_tracks: # if (length(st)>=min_len) and (st not in cst): # not_cst_fil.append(st) from dipy.segment.quickbundles import QuickBundles qb = QuickBundles(cst,200,18) medoid_cst = qb.centroids[0] med_notcst_dm = bundles_distances_mam([medoid_cst], not_cst_fil) med_cst_dm = bundles_distances_mam([medoid_cst], cst) cst_rad = med_cst_dm[0][np.argmax(med_cst_dm[0])] len_dis = cst_rad * 2.8/2. #print med_cst_dm #print cst_rad #print len_dis #k_indices which close to the medoid sort = np.argsort(med_notcst_dm,axis = 1)[0] #print sort[:k+1] while (k>0 and med_notcst_dm[0][sort[k]]>=len_dis): k = k - 1 #print med_notcst_dm[0][sort[0:k]] #print k #close_indices = np.argsort(cst_dm,axis = 1)[:,0:k][0] close_indices = sort[0:k] for idx in close_indices: cst_ext.append(not_cst_fil[idx]) return cst, cst_ext, medoid_cst return cst
def roi_track_counts(fdpy,fref,fatlas,roi_no,dist_transf=True,fres=None): dpr=Dpy(fdpy,'r') T=dpr.read_tracks() dpr.close() img=nib.load(fref) affine=img.get_affine() zooms = img.get_header().get_zooms() iaffine=np.linalg.inv(affine) T2=[] #go back to volume space for t in T: T2.append(np.dot(t,iaffine[:3,:3].T)+iaffine[:3,3]) del T tcs,tes=track_counts(T2,img.get_shape(),zooms,True) atlas_img=nib.load(fatlas) atlas=atlas_img.get_data() roi=atlas.copy() roi[atlas!=roi_no]=0 if dist_transf: roi2=distance_transform_cdt(roi) roi[roi2!=roi2.max()]=0 I=np.array(np.where(roi==roi_no)).T else: I=np.array(np.where(roi==roi_no)).T """ if erosion_level>0: roi2=binary_erosion(roi,cross,erosion_level) I=np.array(np.where(roi2==True)).T else: roi2=distance_transform_cdt(roi) I=np.array(np.where(roi==roi_no)).T """ #print I.shape #nib.save(nib.Nifti1Image(roi2,affine),'/tmp/test.nii.gz') Ttes=[] for iroi in I: try: Ttes.append(tes[tuple(iroi)]) except KeyError: pass Ttes=list(set(list(chain.from_iterable(Ttes)))) T2n=np.array(T2,dtype=np.object) res=list(T2n[Ttes]) #back to world space res2=[] for t in res: res2.append(np.dot(t,affine[:3,:3].T)+affine[:3,3]) np.save(fres,np.array(res2,dtype=np.object))
def loading_full_tractograpy(self, tracpath=None): """ Loading full tractography and creates StreamlineLabeler to show it all. """ # load the tracks registered in MNI space self.tracpath = tracpath basename = os.path.basename(self.tracpath) tracks_basename, tracks_format = os.path.splitext(basename) if tracks_format == '.dpy': dpr = Dpy(self.tracpath, 'r') print "Loading", self.tracpath self.T = dpr.read_tracks() dpr.close() self.T = np.array(self.T, dtype=np.object) elif tracks_format == '.trk': streams, self.hdr = nib.trackvis.read(self.tracpath, points_space='voxel') print "Loading", self.tracpath self.T = np.array([s[0] for s in streams], dtype=np.object) print "Removing short streamlines" self.T = np.array([t for t in self.T if length(t) >= 15], dtype=np.object) tracks_directoryname = os.path.dirname(self.tracpath) + '/.temp/' general_info_filename = tracks_directoryname + tracks_basename + '.spa' # Check if there is the .spa file that contains all the # computed information from the tractography anyway and try to # load it try: print "Looking for general information file" self.load_info(general_info_filename) except (IOError, KeyError): print "General information not found, recomputing buffers" self.update_info(general_info_filename) # create the interaction system for tracks, self.streamlab = StreamlineLabeler( 'Bundle Picker', self.buffers, self.clusters, vol_shape=self.dims, affine=np.copy(self.affine), clustering_parameter=len(self.clusters), clustering_parameter_max=len(self.clusters), full_dissimilarity_matrix=self.full_dissimilarity_matrix) self.scene.add_actor(self.streamlab)
def create_save_tracks(anisotropy, indices, seeds, low_thresh, filename): euler = EuDX( anisotropy, ind=indices, odf_vertices=get_sphere("symmetric362").vertices, seeds=seeds, a_low=low_thresh ) tensor_tracks_old = [track for track in euler] tracks = [track for track in tensor_tracks_old if track.shape[0] > 1] dpw = Dpy(filename, "w") dpw.write_tracks(tracks) dpw.close()
def create_save_tracks(anisotropy,indices, seeds, low_thresh,filename): euler = EuDX(anisotropy, ind=indices, #odf_vertices=get_sphere('symmetric362'), seeds=seeds, a_low=low_thresh) #odf_vertices=get_sphere('symmetric362').vertices, tracks = [track for track in euler] dpw = Dpy(filename, 'w') dpw.write_tracks(tracks) dpw.close()
def save_tractogram(fname, streamlines, affine, vox_size=None, shape=None, header=None, reduce_memory_usage=False, tractogram_file=None): """ Saves tractogram files (*.trk or *.tck or *.dpy) Parameters ---------- fname : str output trk filename streamlines : list of 2D arrays, generator or ArraySequence Each 2D array represents a sequence of 3D points (points, 3). affine : array_like (4, 4) The mapping from voxel coordinates to streamline points. vox_size : array_like (3,), optional The sizes of the voxels in the reference image (default: None) shape : array, shape (dim,), optional The shape of the reference image (default: None) header : dict, optional Metadata associated to the tractogram file(*.trk). (default: None) reduce_memory_usage : {False, True}, optional If True, save streamlines in a lazy manner i.e. they will not be kept in memory. Otherwise, keep all streamlines in memory until saving. tractogram_file : class TractogramFile, optional Define tractogram class type (TrkFile vs TckFile) Default is None which means auto detect format """ if 'dpy' in os.path.splitext(fname)[1].lower(): dpw = Dpy(fname, 'w') dpw.write_tracks(Streamlines(streamlines)) dpw.close() return tractogram_file = tractogram_file or detect_format(fname) if tractogram_file is None: raise ValueError("Unknown format for 'fname': {}".format(fname)) if vox_size is not None and shape is not None: if not isinstance(header, dict): header = {} header[Field.VOXEL_TO_RASMM] = affine.copy() header[Field.VOXEL_SIZES] = vox_size header[Field.DIMENSIONS] = shape header[Field.VOXEL_ORDER] = "".join(aff2axcodes(affine)) if reduce_memory_usage and not callable(streamlines): sg = lambda: (s for s in streamlines) else: sg = streamlines tractogram_loader = LazyTractogram if reduce_memory_usage else Tractogram tractogram = tractogram_loader(sg) tractogram.affine_to_rasmm = affine track_file = tractogram_file(tractogram, header=header) nib.streamlines.save(track_file, fname)
def sums_length(dname,type='64'): for root, dirs, files in os.walk(dname): if root.endswith(type): for file in files: if file.endswith('_warp.dpy'): fname=os.path.join(root,file) dpr=Dpy(fname,'r') sum=0 for i in range(dpr.track_no): sum+=length(dpr.read_track()) dpr.close() print fname, sum
def get_luigi_SLFI(): fseg1 = "/home/eg309/Devel/segmented_bundles/luigi_s1_for_eleftherios/S1_SLFI" # fseg2='/home/eg309/Devel/segmented_bundles/luigi_s1_for_eleftherios/S1_SLFII' subject = "01" fdpyw = "data/subj_" + subject + "/101_32/DTI/tracks_gqi_3M_linear.dpy" dpr = Dpy(fdpyw, "r") T = dpr.read_tracks() dpr.close() seg_inds = load_pickle(fseg1) T1 = [T[i] for i in seg_inds] # seg_inds=load_pickle(fseg2) # T2=[T[i] for i in seg_inds] return T1
def generate_lengths(): for root, dirs, files in os.walk(dname): for file in files: if file.endswith('_warp.dpy'): fname=os.path.join(root,file) dpr=Dpy(fname,'r') lengths=np.zeros((dpr.track_no,)) for i in range(dpr.track_no): lengths[i]=length(dpr.read_track()) dpr.close() fname2=fname.split('_warp.dpy')[0]+'_warp_lengths.npy' print fname2 np.save(fname2,lengths)
def create_save_tracks(anisotropy,indices, seeds, low_thresh,filename): #this is new features in new dipy -current 121011 0.6.0.dev #print "Computing EuDX reconstruction." euler = EuDX(anisotropy, ind=indices, odf_vertices=get_sphere('symmetric362').vertices, seeds=seeds, a_low=low_thresh) euler = EuDX(anisotropy, ind=indices, seeds=seeds, a_low=low_thresh) tracks = [track for track in euler] dpw = Dpy(filename, 'w') dpw.write_tracks(tracks) dpw.close()
def load_data(figure, data_id): if figure == "small_dataset": filename = "ALS_Data/" + str(data_id) + "/DIFF2DEPI_EKJ_64dirs_14/DTI/tracks_dti_10K.dpy" elif figure == "median_dataset": filename = "ALS_Data/" + str(data_id) + "/DIFF2DEPI_EKJ_64dirs_14/DTI/tracks_dti_1M.dpy" elif figure == "big_dataset": filename = "ALS_Data/" + str(data_id) + "/DIFF2DEPI_EKJ_64dirs_14/DTI/tracks_dti_3M.dpy" print "Loading tracks." dpr = Dpy(filename, "r") tracks = dpr.read_tracks() dpr.close() tracks = np.array(tracks, dtype=np.object) return tracks
def load_whole_tract(tracks_filename): from dipy.io.pickles import load_pickle if (tracks_filename[-3:]=='dpy'): from dipy.io.dpy import Dpy dpr_tracks = Dpy(tracks_filename, 'r') all_tracks=dpr_tracks.read_tracks() dpr_tracks.close() else: import nibabel as nib streams,hdr=nib.trackvis.read(tracks_filename,points_space='voxel') all_tracks = np.array([s[0] for s in streams], dtype=np.object) all_tracks = np.array(all_tracks,dtype=np.object) return all_tracks
def load_data(figure, data_id): if figure=='small_dataset': filename = 'ALS_Data/'+ str(data_id) + '/DIFF2DEPI_EKJ_64dirs_14/DTI/tracks_dti_10K.dpy' elif figure=='median_dataset': filename = 'ALS_Data/' + str(data_id) + '/DIFF2DEPI_EKJ_64dirs_14/DTI/tracks_dti_1M.dpy' elif figure=='big_dataset': filename = 'ALS_Data/' + str(data_id) + '/DIFF2DEPI_EKJ_64dirs_14/DTI/tracks_dti_3M.dpy' print "Loading tracks." dpr = Dpy(filename, 'r') tracks = dpr.read_tracks() dpr.close() tracks = np.array(tracks, dtype=np.object) return tracks
def create_dataset_from_tractography(size1, size2, same=True): if same: assert(size2 >= size1) filename = 'data/tracks_dti_10K_linear.dpy' print "Loading", filename dpr = Dpy(filename, 'r') tractography = dpr.read_tracks() dpr.close() print len(tractography), "streamlines" print "Removing streamlines that are too short" tractography = filter(lambda x: len(x) > 20, tractography) # remove too short streamlines print len(tractography), "streamlines" tractography = np.array(tractography, dtype=np.object) print "Creating two simulated tractographies of sizes", size1, "and", size2 if same: ids = fft(tractography, k=max([size1, size2]), distance=bundles_distances_mam) tractography1 = tractography[ids[:size1]] else: # ids1 = np.random.permutation(len(tractography))[:size1] # ids1 = sff(tractography, k=size1, distance=bundles_distances_mam) ids1 = fft(tractography, k=size1, distance=bundles_distances_mam) tractography1 = tractography[ids1[:size1]] if same: tractography2 = tractography[ids[:size2]] else: # ids2 = np.random.permutation(len(tractography))[:size2] # ids2 = sff(tractography, k=size2, distance=bundles_distances_mam) ids2 = fft(tractography, k=size2, distance=bundles_distances_mam) tractography2 = tractography[ids2] print "Done." print "Computing the distance matrices for each tractography." dm1 = bundles_distances_mam(tractography1, tractography1) dm2 = bundles_distances_mam(tractography2, tractography2) print("Computing similarity matrices.") sigma2 = np.mean([np.median(dm1), np.median(dm2)]) ** 2.0 print("sigma2 = %f" % sigma2) A = np.exp(-dm1 * dm1 / sigma2) B = np.exp(-dm2 * dm2 / sigma2) # Note: the optimization works even using distance instead of similarity: # A = dm1 # B = dm2 return A, B
def tracks_to_fmrib58(subj,fnames,data_type='fa'): #affine transformation matrix (from flirt before nonlinear registration) faff =dname+fnames[subj]+'_affine_transf.mat' #nonlinear displacements fdis =dname+fnames[subj]+'_nonlin_displacements.nii.gz'#look at fa_to_fmrib58 if data_type=='qa': #fqa_warp_dpy fqa_warp =dname+fnames[subj]+'_QA_warp.dpy' fwarp=fqa_warp if data_type=='fa': ffa_warp =dname+fnames[subj]+'_FA_warp.dpy' fwarp=ffa_warp #fa ffa =dname+fnames[subj]+'_bet_FA.nii.gz' ref_fname = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz' print faff print ffa print fdis im2im = flirt2aff_files(faff, ffa, ref_fname) dimg=ni.load(fdis) daff=dimg.get_affine() ddata=dimg.get_data() di=ddata[:,:,:,0]#copy i dj=ddata[:,:,:,1]#copy j dk=ddata[:,:,:,2]#copy k #WARP TRACKS IN BLOCKS print fwarp dprw=Dpy(fwarp,'r+') rows=len(dprw.f.root.streamlines.tracks) blocks=np.round(np.linspace(0,rows,20)).astype(int)#lets work in blocks print rows for i in range(len(blocks)-1): print blocks[i],blocks[i+1] caboodle=dprw.f.root.streamlines.tracks[blocks[i]:blocks[i+1]] ntrack=np.dot(caboodle,im2im[:3,:3].T)+im2im[:3,3] #from image vox space to mni image vox mci=mc(di,ntrack.T,order=1) #mapping for i mcj=mc(dj,ntrack.T,order=1) #mapping for j mck=mc(dk,ntrack.T,order=1) #mapping for k wtrack=ntrack+np.vstack((mci,mcj,mck)).T caboodlew=np.dot(wtrack,daff[:3,:3].T)+daff[:3,3] dprw.f.root.streamlines.tracks[blocks[i]:blocks[i+1]]=caboodlew.astype('f4') dprw.close()
def load_tracks(method="pmt"): from nibabel import trackvis as tv dname = "/home/eg309/Data/orbital_phantoms/dwi_dir/subject1/" if method == "pmt": fname = "/home/eg309/Data/orbital_phantoms/dwi_dir/workflow/tractography/_subject_id_subject1/cam2trk_pico_twoten/data_fit_pdfs_tracked.trk" streams, hdr = tv.read(fname, points_space="voxel") tracks = [s[0] for s in streams] if method == "dti": fname = dname + "dti_tracks.dpy" if method == "dsi": fname = dname + "dsi_tracks.dpy" if method == "gqs": fname = dname + "gqi_tracks.dpy" if method == "eit": fname = dname + "eit_tracks.dpy" if method in ["dti", "dsi", "gqs", "eit"]: dpr_linear = Dpy(fname, "r") tracks = dpr_linear.read_tracks() dpr_linear.close() if method != "pmt": tracks = [t - np.array([96 / 2.0, 96 / 2.0, 55 / 2.0]) for t in tracks if track_range(t, 100 / 2.5, 150 / 2.5)] tracks = [t for t in tracks if track_range(t, 100 / 2.5, 150 / 2.5)] print "final no of tracks ", len(tracks) qb = QuickBundles(tracks, 25.0 / 2.5, 18) # from dipy.viz import fvtk # r=fvtk.ren() # fvtk.add(r,fvtk.line(qb.virtuals(),fvtk.red)) # fvtk.show(r) # show_tracks(tracks)#qb.exemplars()[0]) # qb.remove_small_clusters(40) del tracks # load tl = TrackLabeler(qb, qb.downsampled_tracks(), vol_shape=None, tracks_line_width=3.0, tracks_alpha=1) # return tracks w = World() w.add(tl) # create window wi = Window(caption="Fos", bgcolor=(1.0, 1.0, 1.0, 1.0), width=1600, height=900) wi.attach(w) # create window manager wm = WindowManager() wm.add(wi) wm.run()
def create_save_tracks(anisotropy, indices, vertices, seeds, low_thresh, filename): from dipy.tracking.eudx import EuDX eu = EuDX(FA, peak_indices, odf_vertices=vertices, seeds=seeds, a_low=low_thresh) tensor_tracks_old = [streamline for streamline in eu] # euler = EuDX(anisotropy, # ind=indices, # odf_vertices=get_sphere('symmetric362').vertices, # seeds=seeds, a_low=low_thresh) # tensor_tracks_old = [track for track in euler] # print len(tensor_tracks_old) tracks = [track for track in tensor_tracks_old if track.shape[0] > 1] dpw = Dpy(filename, "w") dpw.write_tracks(tracks) dpw.close()
def runStream(csd_peaks, roi_file, roi_label=1, ang_thr=45., a_low=0.2, step_size=0.1, seeds_per_voxel=30, out_name=None): img = nib.load(roi_file) affine = img.get_affine() mask_data = img.get_data() p = np.asarray(np.where(mask_data == roi_label)) p = p.transpose() # seed_points = None # for i in p: # points = np.random.uniform(size=[seeds_per_voxel,3]) + (i-0.5) # if seed_points is None: # seed_points = points # else: # seed_points = np.concatenate([seed_points, points], axis=0) import dipy.tracking.utils as utils seeds = utils.seeds_from_mask(mask_data==1, density=seeds_per_voxel) print '# of seeds: ',len(seeds) sphere = get_sphere('symmetric724') print "seed eudx tractography" eu = EuDX(csd_peaks.peak_values, csd_peaks.peak_indices, odf_vertices=sphere.vertices, step_sz=step_size, seeds=seeds, ang_thr=ang_thr, a_low=a_low) csa_streamlines_mult_peaks = [streamline for streamline in eu] out_file = 'tracts.dipy' if out_name: out_file = out_name+'_'+out_file from dipy.io.trackvis import save_trk save_trk(out_file, csa_streamlines_mult_peaks, affine, mask.shape) dpw = Dpy(out_file, 'w') dpw.write_tracks(csa_streamlines_mult_peaks) print 'write tracts to %s' % out_file return (csa_streamlines_mult_peaks, out_file)
def see_tracks(fdpy,N=2000): dpr=Dpy(fdpy,'r') #T=dpr.read_tracksi(range(N)) T=dpr.read_tracks() dpr.close() T=[downsample(t,5) for t in T] r=fvtk.ren() colors=np.ones((len(T),3)).astype('f4') for (i,c) in enumerate(T): orient=c[0]-c[-1] orient=np.abs(orient/np.linalg.norm(orient)) colors[i,:3]=orient fvtk.add(r,fvtk.line(T,colors,opacity=0.5)) #fos.add(r,fos.sphere((0,0,0),10)) fvtk.show(r)
def see_spherical_intersections(fdpy,fsr): dpr=Dpy(fdpy,'r') T=dpr.read_tracks() dpr.close() SR=load_pickle(fsr) r=fvtk.ren() for key in SR: ind=SR[key]['indices'] intersT=[T[i] for i in ind] fvtk.add(r,fvtk.line(intersT,np.random.rand(3))) centerw=SR[key]['centerw'] radius=SR[key]['radiusw'] fvtk.add(r,fvtk.sphere(position=centerw,radius=radius)) fvtk.show(r)
def compute_tracking(src_dti_dir, out_trk_dir, subj_name): # Loading FA and evecs data src_fa_file = os.path.join(src_dti_dir, subj_name + par_fa_suffix) fa_img = nib.load(src_fa_file) FA = fa_img.get_data() src_evecs_file = os.path.join(src_dti_dir, subj_name + par_evecs_suffix) evecs_img = nib.load(src_evecs_file) evecs = evecs_img.get_data() # Computation of streamlines sphere = get_sphere('symmetric724') peak_indices = dti.quantize_evecs(evecs, sphere.vertices) streamlines = EuDX(FA.astype('f8'), ind=peak_indices, seeds=par_eudx_seeds, odf_vertices=sphere.vertices, a_low=par_eudx_threshold) # Saving tractography voxel_size = fa_img.get_header().get_zooms()[:3] dims = FA.shape[:3] seed = par_eudx_seeds seed = "_%d%s" % (seed / 10**6 if seed > 10**5 else seed / 10**3, 'K' if seed < 1000000 else 'M') hdr = nib.trackvis.empty_header() hdr['voxel_size'] = voxel_size hdr['voxel_order'] = 'LAS' hdr['dim'] = dims strm = ((sl, None, None) for sl in streamlines if length(sl) > par_trk_min and length(sl) < par_trk_max) out_trk_file = os.path.join(out_trk_dir, subj_name + seed + par_trk_suffix) nib.trackvis.write(out_trk_file, strm, hdr, points_space='voxel') tracks = [track for track in streamlines] out_dipy_file = os.path.join(out_trk_dir, subj_name + seed + par_dipy_suffix) dpw = Dpy(out_dipy_file, 'w') dpw.write_tracks(tracks) dpw.close()
def save_tractogram(sft, filename, bbox_valid_check=True): """ Save the stateful tractogram in any format (trk, tck, vtk, fib, dpy) Parameters ---------- sft : StatefulTractogram The stateful tractogram to save filename : string Filename with valid extension bbox_valid_check : bool Verification for negative voxel coordinates or values above the volume dimensions. Default is True, to enforce valid file. Returns ------- output : bool True if the saving operation was successful """ _, extension = os.path.splitext(filename) if extension not in ['.trk', '.tck', '.vtk', '.fib', '.dpy']: raise TypeError('Output filename is not one of the supported format') if bbox_valid_check and not sft.is_bbox_in_vox_valid(): raise ValueError('Bounding box is not valid in voxel space, cannot ' + 'load a valid file if some coordinates are ' + 'invalid. Please use the function ' + 'remove_invalid_streamlines to discard invalid ' + 'streamlines or set bbox_valid_check to False') old_space = deepcopy(sft.space) old_shift = deepcopy(sft.shifted_origin) sft.to_rasmm() sft.to_center() timer = time.time() if extension in ['.trk', '.tck']: tractogram_type = detect_format(filename) header = create_tractogram_header(tractogram_type, *sft.space_attributes) new_tractogram = Tractogram(sft.streamlines, affine_to_rasmm=np.eye(4)) if extension == '.trk': new_tractogram.data_per_point = sft.data_per_point new_tractogram.data_per_streamline = sft.data_per_streamline fileobj = tractogram_type(new_tractogram, header=header) nib.streamlines.save(fileobj, filename) elif extension in ['.vtk', '.fib']: save_vtk_streamlines(sft.streamlines, filename, binary=True) elif extension in ['.dpy']: dpy_obj = Dpy(filename, mode='w') dpy_obj.write_tracks(sft.streamlines) dpy_obj.close() logging.debug('Save %s with %s streamlines in %s seconds', filename, len(sft), round(time.time() - timer, 3)) if old_space == Space.VOX: sft.to_vox() elif old_space == Space.VOXMM: sft.to_voxmm() if old_shift: sft.to_corner() return True
def load_tractogram(filename, reference, to_space=Space.RASMM, shifted_origin=False, bbox_valid_check=True, trk_header_check=True): """ Load the stateful tractogram from any format (trk, tck, vtk, fib, dpy) Parameters ---------- filename : string Filename with valid extension reference : Nifti or Trk filename, Nifti1Image or TrkFile, Nifti1Header or trk.header (dict), or 'same' if the input is a trk file. Reference that provides the spatial attribute. Typically a nifti-related object from the native diffusion used for streamlines generation to_space : Enum (dipy.io.stateful_tractogram.Space) Space to which the streamlines will be transformed after loading. shifted_origin : bool Information on the position of the origin, False is Trackvis standard, default (center of the voxel) True is NIFTI standard (corner of the voxel) bbox_valid_check : bool Verification for negative voxel coordinates or values above the volume dimensions. Default is True, to enforce valid file. trk_header_check : bool Verification that the reference has the same header as the spatial attributes as the input tractogram when a Trk is loaded Returns ------- output : StatefulTractogram The tractogram to load (must have been saved properly) """ _, extension = os.path.splitext(filename) if extension not in ['.trk', '.tck', '.vtk', '.fib', '.dpy']: logging.error('Output filename is not one of the supported format') return False if to_space not in Space: logging.error('Space MUST be one of the 3 choices (Enum)') return False if reference == 'same': if extension == '.trk': reference = filename else: logging.error('Reference must be provided, "same" is only ' + 'available for Trk file.') return False if trk_header_check and extension == '.trk': if not is_header_compatible(filename, reference): logging.error('Trk file header does not match the provided ' + 'reference') return False timer = time.time() data_per_point = None data_per_streamline = None if extension in ['.trk', '.tck']: tractogram_obj = nib.streamlines.load(filename).tractogram streamlines = tractogram_obj.streamlines if extension == '.trk': data_per_point = tractogram_obj.data_per_point data_per_streamline = tractogram_obj.data_per_streamline elif extension in ['.vtk', '.fib']: streamlines = load_vtk_streamlines(filename) elif extension in ['.dpy']: dpy_obj = Dpy(filename, mode='r') streamlines = list(dpy_obj.read_tracks()) dpy_obj.close() logging.debug('Load %s with %s streamlines in %s seconds', filename, len(streamlines), round(time.time() - timer, 3)) sft = StatefulTractogram(streamlines, reference, Space.RASMM, shifted_origin=shifted_origin, data_per_point=data_per_point, data_per_streamline=data_per_streamline) if to_space == Space.VOX: sft.to_vox() elif to_space == Space.VOXMM: sft.to_voxmm() if bbox_valid_check and not sft.is_bbox_in_vox_valid(): raise ValueError('Bounding box is not valid in voxel space, cannot ' + 'load a valid file if some coordinates are invalid.' + 'Please set bbox_valid_check to False and then use' + 'the function remove_invalid_streamlines to discard' + 'invalid streamlines.') return sft
def save_tractogram(sft, filename, bbox_valid_check=True): """ Save the stateful tractogram in any format (trk, tck, vtk, fib, dpy) Parameters ---------- sft : StatefulTractogram The stateful tractogram to save filename : string Filename with valid extension Returns ------- output : bool Did the saving work properly """ _, extension = os.path.splitext(filename) if extension not in ['.trk', '.tck', '.vtk', '.fib', '.dpy']: TypeError('Output filename is not one of the supported format') if bbox_valid_check and not sft.is_bbox_in_vox_valid(): raise ValueError('Bounding box is not valid in voxel space, cannot ' + 'save a valid file if some coordinates are invalid') old_space = deepcopy(sft.space) old_shift = deepcopy(sft.shifted_origin) sft.to_rasmm() sft.to_center() timer = time.time() if extension in ['.trk', '.tck']: tractogram_type = detect_format(filename) header = create_tractogram_header(tractogram_type, *sft.space_attribute) new_tractogram = Tractogram(sft.streamlines, affine_to_rasmm=np.eye(4)) if extension == '.trk': new_tractogram.data_per_point = sft.data_per_point new_tractogram.data_per_streamline = sft.data_per_streamline fileobj = tractogram_type(new_tractogram, header=header) nib.streamlines.save(fileobj, filename) elif extension in ['.vtk', '.fib']: save_vtk_streamlines(sft.streamlines, filename, binary=True) elif extension in ['.dpy']: dpy_obj = Dpy(filename, mode='w') dpy_obj.write_tracks(sft.streamlines) dpy_obj.close() logging.debug('Save %s with %s streamlines in %s seconds', filename, len(sft), round(time.time() - timer, 3)) if old_space == Space.VOX: sft.to_vox() elif old_space == Space.VOXMM: sft.to_voxmm() if old_shift: sft.to_corner() return True
def dpy(workingdir, input, compressed=None, restored=None, tol_error=0.01, force=False, coords_only=False, verbose=False): if not input.endswith('tck') and not input.endswith('trk'): # we need to convert print('Invalid format') return None if not compressed: compressed = input + '_compressed' + str(tol_error) + '.dpy' if not restored: restored = input + '_restored' + str(tol_error) + '.tck' original = os.path.join(workingdir, input) # # compression # c_time = -1 if not os.path.exists(os.path.join(workingdir, compressed)) or force: # compress again! t0 = time.time() loaded_original = nib.streamlines.load(original, lazy_load=False) original_streamlines = loaded_original.streamlines # parameters from Presseau15 are 0.01 and inf c_streamlines = compress_streamlines(original_streamlines, tol_error=tol_error, max_segment_length=np.inf) # write dpy file # set compression to highest but it does have any effect dpw = Dpy(os.path.join(workingdir, compressed), mode='w', compression=9) for c_s in c_streamlines: dpw.write_track(c_s) dpw.close() c_time = time.time() - t0 if verbose: print('compression done.') # # restoring # d_time = -1 if not os.path.exists(os.path.join(workingdir, restored)) or force: # restore again! t0 = time.time() restored_data = Dpy(os.path.join(workingdir, compressed), mode='r') restored_streamlines = restored_data.read_tracks() restored_data.close() d_time = time.time() - t0 with open(os.path.join(workingdir, restored), 'w') as f: f.write('restoredok.') if verbose: print('restoring done.') # # calculate errors # stats = compressed + '_stats' + str(tol_error) + '.p' if not os.path.exists(os.path.join(workingdir, stats)) or force: statsdata = Runner.error_per_streamlines(original_streamlines, restored_streamlines) sizestatsdata = Runner.sizestats( os.path.join(workingdir, original), os.path.join(workingdir, compressed)) statsdata = [ c_time, d_time, sizestatsdata, statsdata[0], statsdata[1] ] with open(os.path.join(workingdir, stats), 'wb') as f: pickle.dump(statsdata, f) else: with open(os.path.join(workingdir, stats), 'rb') as f: statsdata = pickle.load(f) [c_time, d_time, sizestatsdata, (min_e, max_e, mean_e, std_e), (end_min_e, end_max_e, end_mean_e, end_std_e)] = \ statsdata if verbose: print('Times', c_time, d_time) print('Size Stats', sizestatsdata) print('Error', min_e, max_e, mean_e, std_e) print('Endpoint Error', end_min_e, end_max_e, end_mean_e, end_std_e) return statsdata
def test_dpy(): fname = 'test.bin' dpw = Dpy(fname, 'w') A = np.ones((5, 3)) B = 2 * A.copy() C = 3 * A.copy() dpw.write_track(A) dpw.write_track(B) dpw.write_track(C) dpw.write_tracks([C, B, A]) dpw.close() dpr = Dpy(fname, 'r') assert_equal(dpr.version() == '0.0.1', True) T = dpr.read_tracksi([0, 1, 2, 0, 0, 2]) T2 = dpr.read_tracks() assert_equal(len(T2), 6) dpr.close() assert_array_equal(A, T[0]) assert_array_equal(C, T[5])
""" del tracks_np tracks2 = list(np.load('fornix.npy')) """ huge datasets use dipy.io.dpy * direct indexing from the disk * memory usage always low * extendable """ from dipy.io.dpy import Dpy dpw = Dpy('fornix.dpy', 'w') """ write many tracks at once """ dpw.write_tracks(tracks2) """ write one track """ dpw.write_track(tracks2[0] * 6) """ or one track each time """ for t in tracks:
#d101='/home/eg309/Data/TEST_MR10032/subj_10/101/' d101='/home/eg309/Data/PROC_MR10032/subj_10/101/' ffa=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_bet_FA.nii.gz' fdis=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_nonlin_displacements.nii.gz' ffareg=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_bet_FA_reg.nii.gz' flirtaff=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_affine_transf.mat' ftrack=d101+'1312211075232351192010092912092080924175865ep2dadvdiffDSI10125x25x25STs005a001_QA_native.dpy' froi='/home/eg309/Data/PROC_MR10032/NIFTI_ROIs/AnatomicalROIs/ROI01_GCC.nii' froi2='/home/eg309/Data/PROC_MR10032/NIFTI_ROIs/AnatomicalROIs/ROI02_BCC.nii' #froi3='/home/eg309/Data/PROC_MR10032/NIFTI_ROIs/AnatomicalROIs/ROI03_SCC.nii' froi3='/home/eg309/Downloads/SCC_analyze.nii' ref_fname = '/usr/share/fsl/data/standard/FMRIB58_FA_1mm.nii.gz' dpr=Dpy(ftrack,'r') print dpr.track_no T=dpr.read_indexed([0,1,2,3,2000,1000000]) for t in T: print t.shape dpr.close() track=T[4] im2im = flirt2aff_files(flirtaff, ffa, ref_fname) #ref_name to be replaced by ffareg print im2im from dipy.core.track_metrics import length print len(track) print length(track) #ntrack=np.dot(im2im[:3,:3],track.T)+im2im[:3,[3]]
def test_dpy(): fname = 'test.bin' with InTemporaryDirectory(): dpw = Dpy(fname, 'w') A = np.ones((5, 3)) B = 2 * A.copy() C = 3 * A.copy() dpw.write_track(A) dpw.write_track(B) dpw.write_track(C) dpw.write_tracks(Streamlines([C, B, A])) all_tracks = np.ascontiguousarray(np.vstack([A, B, C, C, B, A])) npt.assert_array_equal(all_tracks, dpw.tracks[:]) dpw.close() dpr = Dpy(fname, 'r') npt.assert_equal(dpr.version() == u'0.0.1', True) T = dpr.read_tracksi([0, 1, 2, 0, 0, 2]) T2 = dpr.read_tracks() npt.assert_equal(len(T2), 6) dpr.close() npt.assert_array_equal(A, T[0]) npt.assert_array_equal(C, T[5])
self.b.pack(side=Tkinter.BOTTOM) def ok(self): self.value = self.s.get() self.parent.destroy() if __name__ == '__main__': #load T1 volume registered in MNI space #img = nib.load('data/subj_05/MPRAGE_32/T1_flirt_out.nii.gz') #data = img.get_data() #affine = img.get_affine() #load the tracks registered in MNI space fdpyw = 'data/subj_05/101_32/DTI/tracks_gqi_1M_linear.dpy' dpr = Dpy(fdpyw, 'r') T = dpr.read_tracks() dpr.close() #load initial QuickBundles with threshold 30mm fpkl = 'data/subj_05/101_32/DTI/qb_gqi_1M_linear_30.pkl' #qb=QuickBundles(T,30.,12) #save_pickle(fpkl,qb) qb = load_pickle(fpkl) #create the interaction system for tracks tl = TrackLabeler('Bundle Picker', qb, qb.downsampled_tracks(), vol_shape=(182, 218, 182), tracks_alpha=1) #add a interactive slicing/masking tool
np.save('fornix.npy', streamlines_np) streamlines2 = list(np.load('fornix.npy')) """ 3. We also work on our HDF5 based file format which can read/write massive datasets (as big as the size of you free disk space). With `Dpy` we can support * direct indexing from the disk * memory usage always low * extensions to include different arrays in the same file Here is a simple example. """ from dipy.io.dpy import Dpy dpw = Dpy('fornix.dpy', 'w') """ Write many streamlines at once. """ dpw.write_tracks(streamlines2) """ Write one track """ dpw.write_track(streamlines2[0]) """ or one track each time. """ for t in streamlines:
def warp_displacements_tracks(fdpy, ffa, fmat, finv, fdis, fdisa, fref, fdpyw): """ Warp tracks from native space to the FMRIB58/MNI space We use here the fsl displacements. Have a look at create_displacements to see an example of how to use these displacements. Parameters ------------ fdpy : filename of the .dpy file with the tractography ffa : filename of nifti to be warped fmat : filename of .mat (flirt) fdis : filename of displacements (fnirtfileutils) fdisa : filename of displacements (fnirtfileutils + affine) finv : filename of invwarp displacements (invwarp) fref : filename of reference volume e.g. (FMRIB58_FA_1mm.nii.gz) fdpyw : filename of the warped tractography See also ----------- dipy.external.fsl.create_displacements """ # read the tracks from the image space dpr = Dpy(fdpy, 'r') T = dpr.read_tracks() dpr.close() # copy them in a new file dpw = Dpy(fdpyw, 'w', compression=1) dpw.write_tracks(T) dpw.close() # from fa index to ref index res = flirt2aff_files(fmat, ffa, fref) # load the reference img imgref = nib.load(fref) refaff = imgref.affine # load the invwarp displacements imginvw = nib.load(finv) invwdata = imginvw.get_data() # load the forward displacements imgdis = nib.load(fdis) disdata = imgdis.get_data() # load the forward displacements + affine imgdis2 = nib.load(fdisa) disdata2 = imgdis2.get_data() # from their difference create the affine disaff = disdata2 - disdata del disdata del disdata2 shape = nib.load(ffa).get_data().shape # transform the displacements affine back to image space disaff0 = affine_transform(disaff[..., 0], res[:3, :3], res[:3, 3], shape, order=1) disaff1 = affine_transform(disaff[..., 1], res[:3, :3], res[:3, 3], shape, order=1) disaff2 = affine_transform(disaff[..., 2], res[:3, :3], res[:3, 3], shape, order=1) # remove the transformed affine from the invwarp displacements di = invwdata[:, :, :, 0] + disaff0 dj = invwdata[:, :, :, 1] + disaff1 dk = invwdata[:, :, :, 2] + disaff2 dprw = Dpy(fdpyw, 'r+') rows = len(dprw.f.root.streamlines.tracks) blocks = np.round(np.linspace(0, rows, 10)).astype(int) # lets work in # blocks # print rows for i in range(len(blocks) - 1): # print blocks[i],blocks[i+1] # copy a lot of tracks together caboodle = dprw.f.root.streamlines.tracks[blocks[i]:blocks[i + 1]] mci = mc(di, caboodle.T, order=1) # interpolations for i displacement mcj = mc(dj, caboodle.T, order=1) # interpolations for j displacement mck = mc(dk, caboodle.T, order=1) # interpolations for k displacement D = np.vstack((mci, mcj, mck)).T # go back to mni image space WI2 = np.dot(caboodle, res[:3, :3].T) + res[:3, 3] + D # and then to mni world space caboodlew = np.dot(WI2, refaff[:3, :3].T) + refaff[:3, 3] # write back dprw.f.root.streamlines.tracks[blocks[i]:blocks[i + 1]] = ( caboodlew.astype('f4')) dprw.close()