def test_Fiber_unique_coords(): """ Test class method Fiber.unique_coords """ arr = np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3]]) npt.assert_equal(mtf.Fiber(arr).unique_coords, np.array([[1], [2], [3]])) arr = np.array([[3, 2, 3], [2, 2, 2], [10, 10, 10]]) npt.assert_equal( mtf.Fiber(arr).unique_coords, np.array([[3, 2], [2, 2], [10, 10]])) for x in range(1000): x1 = np.random.randn() x2 = np.random.randn() y1 = np.random.randn() y2 = np.random.randn() z1 = np.random.randn() z2 = np.random.randn() # So the next line isn't too long: npta = npt.assert_almost_equal npta(mtf.Fiber([[x1, x2, x1], [y1, y2, y1], [z1, z2, z1]]).unique_coords, np.array([[x1, x2], [y1, y2], [z1, z2]]), decimal=4) arr2d = np.array([[1, 2, 1], [3, 4, 3], [5, 6, 5]]) f1 = mtf.Fiber(arr2d) npt.assert_equal(f1.unique_coords, np.array([[1, 2], [3, 4], [5, 6]]))
def test_pdb_from_fg(): """ Test writing a fiber-group to file """ coords1 = np.arange(900).reshape(3, 300) coords2 = np.arange(900).reshape(3, 300) + 100 fiber_stats = dict(foo=1, bar=2) node_stats = dict(ecc=np.arange(300)) fg = mtf.FiberGroup([ mtf.Fiber(coords1, fiber_stats=fiber_stats, node_stats=node_stats), mtf.Fiber(coords2, fiber_stats=fiber_stats, node_stats=node_stats) ]) temp_dir = tempfile.gettempdir() mio.pdb_from_fg(fg, os.path.join(temp_dir, 'fg.pdb')) # Test that the properties are preserved upon reloading: fg2 = mio.fg_from_pdb(os.path.join(temp_dir, 'fg.pdb')) npt.assert_equal(fg2[0].coords, fg[0].coords) npt.assert_equal(fg2[1].coords, fg[1].coords) npt.assert_equal(fg2[0].node_stats, fg[0].node_stats) npt.assert_equal(fg2[1].node_stats, fg[1].node_stats) npt.assert_equal(fg2.fiber_stats, fg.fiber_stats)
def test_Fiber_xform(): arr2d = np.array([[1, 2], [3, 4], [5, 6]]) affine1 = np.eye(4) f1 = mtf.Fiber(arr2d, affine=affine1) f1.xform() npt.assert_equal(f1.coords, arr2d) f2 = f1.xform(inplace=False) npt.assert_equal(f2.coords, f1.coords) f1_noaff = mtf.Fiber(arr2d, affine=None) f1_noaff_notinplace = f1_noaff.xform(affine=None, inplace=False) # Should give you back a different object: npt.assert_equal(not f1_noaff_notinplace is f1_noaff, True) # But with equal coords: npt.assert_equal(f1_noaff_notinplace.coords, f1_noaff.coords) # Keep everything the same, but translate the x coords by 1 downwards # http://en.wikipedia.org/wiki/Transformation_matrix#Affine_transformations: affine2 = np.matrix([[1, 0, 0, -1], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]) f3 = mtf.Fiber(arr2d, affine=affine2) f3.xform() npt.assert_equal(f3.coords[0], arr2d[0] - 1) # This one rotates about the x axis by 90 degrees: pi_2 = np.pi / 2 affine3 = np.matrix([[1, 0, 0, 0], [0, np.cos(pi_2), -np.sin(pi_2), 0], [0, np.sin(pi_2), np.cos(pi_2), 0], [0, 0, 0, 1]]) f4 = mtf.Fiber([0, 1, 0], affine=affine3) f4.xform() # Rotating about the x axis should move all of the length of the vector to # the z axis npt.assert_almost_equal(f4.coords, [0, 0, 1]) # Next time you call xform should bring you back to where you were to begin # with f4.xform() npt.assert_almost_equal(f4.coords, [0, 1, 0]) # If you assign into a new fiber: f5 = f4.xform(inplace=False) # This one should have an affine which is the inverse of your original # affine: npt.assert_equal(f5.affine, affine3.getI()) # xform-ing twice gives you back the same thing: npt.assert_equal( f5.xform(inplace=False).xform(inplace=False).coords, f5.coords) npt.assert_equal( f5.xform(inplace=False).xform(inplace=False).affine, f5.affine)
def test_FiberGroup(): """ Testing intialization of FiberGroup class. """ arr2d = np.array([[1, 2], [3, 4], [5, 6]]) arr1d = np.array([5, 6, 7]) f1 = mtf.Fiber(arr2d, fiber_stats=dict(a=1, b=2)) f2 = mtf.Fiber(arr1d, fiber_stats=dict(a=1)) fg1 = mtf.FiberGroup([f1, f2]) npt.assert_equal(fg1.n_fibers, 2) # We have to sort, because it could also come out as ['b', 'a']: npt.assert_equal(np.sort(fg1.fiber_stats.keys()), ['a', 'b']) # The number of nodes is just the sum of nodes/fiber: npt.assert_equal(fg1.n_nodes, f1.n_nodes + f2.n_nodes)
def fg_from_trk(trk_file, affine=None): """ Read data from a trackvis .trk file and create a FiberGroup object according to the information in it. """ # Generate right away, since we're going to do it anyway: read_trk = tv.read(trk_file, as_generator=False) fibers_trk = read_trk[0] # Per default read from the affine from the file header: if affine is not None: aff = affine else: hdr = read_trk[1] aff = tv.aff_from_hdr(hdr) # If the header contains a bogus affine, we revert to np.eye(4), so we # don't get into trouble later: try: np.matrix(aff).getI() except np.linalg.LinAlgError: e_s = "trk file contains bogus header, reverting to np.eye(4)" warnings.warn(e_s) aff = np.eye(4) fibers = [] for f in fibers_trk: fibers.append(ozf.Fiber(np.array(f[0]).T, affine=aff)) return ozf.FiberGroup(fibers, affine=aff)
def test_FiberGroup_xform(): """ Test affine transformation method of FiberGroup """ # This affine rotates vectors 90 degrees around the x axis: pi_2 = np.pi / 2 affine1 = np.matrix([[1, 0, 0, 0], [0, np.cos(pi_2), -np.sin(pi_2), 0], [0, np.sin(pi_2), np.cos(pi_2), 0], [0, 0, 0, 1]]) y = [0, 0, 1] x = [0, 1, 0] f1 = mtf.Fiber(x, affine=affine1) f2 = mtf.Fiber(y, affine=affine1) fg1 = mtf.FiberGroup([f1, f2]) fg1.xform() # The first fiber's coordinates should be like the second one originally: npt.assert_almost_equal(fg1.fibers[0].coords, y) f3 = mtf.Fiber(x) f4 = mtf.Fiber(y) fg2 = mtf.FiberGroup([f3, f4], affine=affine1) fg2.xform() # Same should be true when the affine is associated with the FiberGroup: npt.assert_almost_equal(fg2.fibers[0].coords, y) # And the transformtation should have mutated the original object: npt.assert_almost_equal(f3.coords, y) f5 = mtf.Fiber(x) f6 = mtf.Fiber(y) fg3 = mtf.FiberGroup([f5, f6]) fg3.xform(affine1) # Same should be true when the affine is provided as input: npt.assert_almost_equal(fg3.fibers[0].coords, y) # And the transformtation should have mutated the original object: npt.assert_almost_equal(f5.coords, y) # This also attaches the inverse of this affine to the original object, so # that you can always find your way back: npt.assert_almost_equal(f5.affine, affine1.getI()) f7 = mtf.Fiber(x) f8 = mtf.Fiber(y) fg4 = mtf.FiberGroup([f7, f8]) fg4.xform() npt.assert_equal(fg4.affine, None) # The affine should 'stick': fg4.xform(np.eye(4)) npt.assert_equal(fg4.affine, np.eye(4)) # Even to the fibers: npt.assert_equal(f8.affine, np.eye(4))
def test_FiberGroup_unique_coords(): """ Test class method Fiber.unique_coords """ for x in range(1000): x1 = np.random.randn() x2 = np.random.randn() y1 = np.random.randn() y2 = np.random.randn() z1 = np.random.randn() z2 = np.random.randn() # So the next line isn't too long: npta = npt.assert_almost_equal # Should work if both fibers have non-unique coords npta(mtf.FiberGroup([ mtf.Fiber([[x1, x1, x2], [y1, y1, y2], [z1, z1, z2]]), mtf.Fiber([[x1, x1, x2], [y1, y1, y2], [z1, z1, z2]]) ]).unique_coords, np.array([[x1, x2], [y1, y2], [z1, z2]]), decimal=4) # And also for extracting across fibers with unique coords npta(mtf.FiberGroup( [mtf.Fiber([[x1], [y1], [z1]]), mtf.Fiber([[x2], [y2], [z2]])]).unique_coords, np.array([[x1, x2], [y1, y2], [z1, z2]]), decimal=4) # And also for extracting across shared coords npta(mtf.FiberGroup([ mtf.Fiber([[x1], [y1], [z1]]), mtf.Fiber([[x2, x1], [y2, y1], [z2, z1]]) ]).unique_coords, np.array([[x1, x2], [y1, y2], [z1, z2]]), decimal=4)
def test_Fiber(): """ Testing initalization of the Fiber class """ # Providing a list as an input works: arr1d = [1, 2, 3] # This is the most basic example possible: f1 = mtf.Fiber(arr1d) # 2D arrays should be 3 by n: arr2d = np.array([[1, 2], [3, 4], [5, 6]]) # So this is OK: f2 = mtf.Fiber(arr2d) # But this raises a ValueError: npt.assert_raises(ValueError, mtf.Fiber, arr2d.T) # This should also raise (first dim is 4, rather than 3): npt.assert_raises(ValueError, mtf.Fiber, np.empty((4, 10))) # This should also raise (funky affine): npt.assert_raises(ValueError, mtf.Fiber, np.empty((3, 10)), np.eye(5)) # This should be OK: f3 = mtf.Fiber(np.array(arr2d), affine=np.eye(4), fiber_stats=dict(a=1)) npt.assert_equal(f3.fiber_stats, {'a': 1})
def test_Fiber_tensors(): """ Test generation of tensors from fiber coordinates """ bvecs = [[1, 0, 0], [0, 1, 0], [0, 0, 1]] bvals = [1, 1, 1] f1 = mtf.Fiber([[2, 2, 3], [3, 3, 4], [4, 4, 5]]) # Values for axial and radial diffusivity randomly chosen: ad = np.random.rand() rd = np.random.rand() tensors = f1.tensors(ad, rd) npt.assert_equal(tensors[0], np.diag([ad, rd, rd]).ravel()) npt.assert_equal(len(tensors), len(f1.coords))
def test_Fiber_predicted_signal(): """ Test fiber prediction of the signal along its coordinates """ f1 = mtf.Fiber([[2, 2, 3, 5], [3, 3, 4, 6], [4, 4, 5, 7]]) bvecs = [[1, 0, 0], [0, 1, 0], [0, 0, 1]] bvals = [1, 1, 1] ad = np.random.rand() rd = np.random.rand() sig = f1.predicted_signal(bvecs, bvals, ad, rd) # Or just one number: S0 = np.random.rand() sig = f1.predicted_signal(bvecs, bvals, ad, rd)
def track(model, data, sphere=None, step_size=1, angle_limit=20, seeds=None, density=[2, 2, 2], voxel_size=[1, 1, 1]): """ Interface for tracking based on fiber ODF models `model` needs to have a `fit` method, such that model.fit(data).odf(sphere) is a legitimate ODF (that is has dimensions (x,y,z, n_vertices), where n_vertices refers to the vertices of the provided sphere. """ # If no sphere is provided, we will use the dipy symmetrical sphere with # 724 vertcies. That should be enough if sphere is None: sphere = dpd.get_sphere('symmetric724') stepper = dpt.FixedSizeStepper(step_size) interpolator = dpt.NearestNeighborInterpolator(data, voxel_size) if seeds is None: seeds = dpu.seeds_from_mask(mask, density, voxel_size) pwt = dpt.ProbabilisticOdfWeightedTracker(model, interpolator, mask, stepper, angle_limit, seeds, sphere) pwt_streamlines = list(pwt) fibers = [] for f in pwt_streamlines: fibers.append(ozf.Fiber(f))
def fg_from_pdb(file_name, verbose=True): """ Read the definition of a fiber-group from a .pdb file Parameters ---------- file_name: str Full path to the .pdb file Returns ------- A FiberGroup object Note ---- This only reads Version 3 PDB. For the full file-format spec, see the osmosis.io module top-level docstring """ # Read the file as binary info: f_obj = file(file_name, 'r') f_read = f_obj.read() f_obj.close() # This is an updatable index into this read: idx = 0 # First part is an int encoding the offset to the fiber part: offset, idx = _unpacker(f_read, idx, 1) # Next bit are doubles, encoding the xform (4 by 4 = 16 of them): xform, idx = _unpacker(f_read, idx, 16, 'double') xform = np.reshape(xform, (4, 4)) # Next is an int encoding the number of stats: numstats, idx = _unpacker(f_read, idx, 1) # The stats header is a dict with lists holding the stat per stats_header = dict( luminance_encoding=[], # int => bool computed_per_point=[], # int => bool viewable=[], # int => bool agg_name=[], # char array => string local_name=[], # char array => string uid=[] # int ) # Read the stats header: counter = 0 while counter < numstats: counter += 1 for k in ["luminance_encoding", "computed_per_point", "viewable"]: this, idx = _unpacker(f_read, idx, 1) stats_header[k].append(np.bool(this)) for k in ["agg_name", "local_name"]: this, idx = _unpacker(f_read, idx, 255, 'char') stats_header[k].append(_word_maker(this)) # Must have integer reads be word aligned (?): idx += 2 this, idx = _unpacker(f_read, idx, 1) stats_header["uid"].append(this) # We skip the whole bit with the algorithms and go straight to the version # number, which is one int length before the fibers: idx = offset - 4 version, idx = _unpacker(f_read, idx, 1) if int(version) < 2: raise ValueError("Can only read PDB version 2 or version 3 files") elif verbose: print("Loading a PDB version %s file from: %s" % (int(version), file_name)) if int(version) == 2: idx = offset # How many fibers? numpaths, idx = _unpacker(f_read, idx, 1) if int(version) == 2: pts = [] if verbose: prog_bar = ProgressBar(numpaths[0]) f_name = inspect.stack()[0][3] f_stats = [] n_stats = [] for p_idx in range(numpaths): f_stats_dict = {} n_stats_dict = {} # Keep track of where you are right now ppos = idx path_offset, idx = _unpacker(f_read, idx, 1) n_nodes, idx = _unpacker(f_read, idx, 1) # As far as I can tell the following two don't matter much: algo_type, idx = _unpacker(f_read, idx, 1) seed_pt_idx, idx = _unpacker(f_read, idx, 1) # Read out the per-path stats: for stat_idx in range(numstats): per_fiber_stat, idx = _unpacker(f_read, idx, 1, 'double') f_stats_dict[stats_header["local_name"][stat_idx]] = \ per_fiber_stat f_stats.append(f_stats_dict) # Skip forward to where the paths themselves are: idx = ppos # Read the nodes: pathways, idx = _unpacker(f_read, idx, n_nodes * 3, 'double') pts.append(np.reshape(pathways, (n_nodes, 3)).T) for stat_idx in range(numstats): if stats_header["computed_per_point"][stat_idx]: name = stats_header["local_name"][stat_idx] n_stats_dict[name], idx = _unpacker( f_read, idx, n_nodes, 'double') n_stats.append(n_stats_dict) fibers = [] # Initialize all the fibers: for p_idx in range(numpaths): this_fstats_dict = f_stats[p_idx] f_stat_k = this_fstats_dict.keys() f_stat_v = [this_fstats_dict[k] for k in f_stat_k] this_nstats_dict = n_stats[p_idx] n_stats_k = this_nstats_dict.keys() n_stats_v = [this_nstats_dict[k] for k in n_stats_k] fibers.append( ozf.Fiber(pts[p_idx], xform, fiber_stats=dict(zip(f_stat_k, f_stat_v)), node_stats=dict(zip(n_stats_k, n_stats_v)))) elif int(version) == 3: # The next few bytes encode the number of points in each fiber: pts_per_fiber, idx = _unpacker(f_read, idx, numpaths) total_pts = np.sum(pts_per_fiber) # Next we have the xyz coords of the nodes in all fibers: fiber_pts, idx = _unpacker(f_read, idx, total_pts * 3, 'double') # We extract the information on a fiber-by-fiber basis pts_read = 0 pts = [] if verbose: prog_bar = ProgressBar(numpaths[0]) f_name = inspect.stack()[0][3] for p_idx in range(numpaths): n_nodes = pts_per_fiber[p_idx] pts.append( np.reshape(fiber_pts[pts_read * 3:(pts_read + n_nodes) * 3], (n_nodes, 3)).T) pts_read += n_nodes if verbose: prog_bar.animate(p_idx, f_name=f_name) f_stats_dict = {} for stat_idx in range(numstats): per_fiber_stat, idx = _unpacker(f_read, idx, numpaths, 'double') # This is a fiber-stat only if it's not computed per point: if not stats_header["computed_per_point"][stat_idx]: f_stats_dict[stats_header["local_name"][stat_idx]] =\ per_fiber_stat per_point_stat = [] n_stats_dict = {} for stat_idx in range(numstats): pts_read = 0 # If it is computer per point, it's a node-stat: if stats_header["computed_per_point"][stat_idx]: name = stats_header["local_name"][stat_idx] n_stats_dict[name] = [] per_point_stat, idx = _unpacker(f_read, idx, total_pts, 'double') for p_idx in range(numpaths): n_stats_dict[name].append( per_point_stat[pts_read:pts_read + pts_per_fiber[p_idx]]) pts_read += pts_per_fiber[p_idx] else: per_point_stat.append([]) fibers = [] # Initialize all the fibers: for p_idx in range(numpaths): f_stat_k = f_stats_dict.keys() f_stat_v = [f_stats_dict[k][p_idx] for k in f_stat_k] n_stats_k = n_stats_dict.keys() n_stats_v = [n_stats_dict[k][p_idx] for k in n_stats_k] fibers.append( ozf.Fiber(pts[p_idx], xform, fiber_stats=dict(zip(f_stat_k, f_stat_v)), node_stats=dict(zip(n_stats_k, n_stats_v)))) if verbose: print("Done reading from file") name = os.path.split(file_name)[-1].split('.')[0] return ozf.FiberGroup(fibers, name=name, affine=xform)