def test_approx_ei_traj(): segs=100 t=np.linspace(0,1.75*2*np.pi,segs) x =t y=5*np.sin(5*t) z=np.zeros(x.shape) xyz=np.vstack((x,y,z)).T xyza=pf.approximate_ei_trajectory(xyz) yield assert_equal, len(xyza), 27
def test_approx_ei_traj(): segs = 100 t = np.linspace(0, 1.75 * 2 * np.pi, segs) x = t y = 5 * np.sin(5 * t) z = np.zeros(x.shape) xyz = np.vstack((x, y, z)).T xyza = pf.approximate_ei_trajectory(xyz) yield assert_equal, len(xyza), 27
def load_reduce_translate(fname, reduction=1): """ Loads a trackvis file using DIPY io, only keeping a ratio of 1/reduction tracks. Performs trajectory approximation to reduce the track lengths. Parameter --------- fname : str The Trackvis file reduction : int, optional The reduction factor (keep only 1 line per set of reduction lines) Returns ------- tracks : list A list of tracks """ lines, hdr = tv.read(fname) print 'loaded,', sys.stdout.flush() #ras = tv.get_affine(hdr) ras = tv.aff_from_hdr(hdr) if not ras[:3,-1].any(): # dot(ras[:3,:3],md_vox) + t = (0,0,0) --> t = -dot(ras[:3,:3],md_vox) md_vox = hdr['dim']/2 t = -np.dot(ras[:3,:3], md_vox) ras[:3,-1] = t tracks = [l[0] for l in lines[::reduction]] del lines ras_cmap = ni_api.AffineTransform.from_params('ijk', 'xyz', ras) flat_tracks, breaks = flatten_tracks(tracks) del tracks flat_tracks_xyz = ras_cmap(flat_tracks) tracks = recombine_flattened_tracks(flat_tracks_xyz, breaks) print 'translated,', sys.stdout.flush() tracks_reduced = [tp.approximate_ei_trajectory(line, alpha=np.pi/4) for line in tracks ] print 'reduced,' sys.stdout.flush() return tracks_reduced
print 'Copying tracks...' T=[i[0] for i in streams] print 'Representing tracks using only 3 pts...' tracks=[tm.downsample(t,3) for t in T] print 'Deleting unnecessary data...' del streams,hdr print 'Hidden Structure Clustering...' now=time.clock() C=pf.local_skeleton_clustering(tracks,d_thr=20) print 'Done in', time.clock()-now,'s.' print 'Reducing the number of points...' T=[pf.approximate_ei_trajectory(t) for t in T] print 'Showing initial dataset.' r=fos.ren() fos.add(r,fos.line(T,fos.white,opacity=0.1)) fos.show(r) print 'Showing dataset after clustering.' fos.clear(r) colors=np.zeros((len(T),3)) for c in C: color=np.random.rand(1,3) for i in C[c]['indices']: colors[i]=color fos.add(r,fos.line(T,colors,opacity=1)) fos.show(r)
for i in np.where(to_be_deleted>0)[0]: del C[k[i]] return C def most(C): for c in C: pass'pf.most_similar_track_zhang() T=pkl.load_pickle(fname) print 'Reducing the number of points...' T=[pf.approximate_ei_trajectory(t) for t in T] print 'Reducing further to tracks with 3 pts...' T2=[tm.downsample(t,3) for t in T] print 'LARCH ...' print 'Splitting ...' t=time.clock() C=pf.larch_3split(T2,None,5.) print time.clock()-t, len(C) for c in C: print c, C[c]['rep3']/C[c]['N'] r=show_rep3(C)