def test_sf_to_sh(): # Subdividing a hemi_icosahedron twice produces 81 unique points, which # is more than enough to fit a order 8 (45 coefficients) spherical harmonic sphere = hemi_icosahedron.subdivide(2) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) mevecs = [np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), np.array([[0, 1, 0], [1, 0, 0], [0, 0, 1]])] odf = multi_tensor_odf(sphere.vertices, [0.5, 0.5], mevals, mevecs) # 1D case with the 3 bases functions odf_sh = sf_to_sh(odf, sphere, 8) odf2 = sh_to_sf(odf_sh, sphere, 8) assert_array_almost_equal(odf, odf2, 2) odf_sh = sf_to_sh(odf, sphere, 8, "mrtrix") odf2 = sh_to_sf(odf_sh, sphere, 8, "mrtrix") assert_array_almost_equal(odf, odf2, 2) odf_sh = sf_to_sh(odf, sphere, 8, "fibernav") odf2 = sh_to_sf(odf_sh, sphere, 8, "fibernav") assert_array_almost_equal(odf, odf2, 2) # 2D case odf2d = np.vstack((odf2, odf)) odf2d_sh = sf_to_sh(odf2d, sphere, 8) odf2d_sf = sh_to_sf(odf2d_sh, sphere, 8) assert_array_almost_equal(odf2d, odf2d_sf, 2)
def create_anisopowermap(bvec_path, diffdata): import os import nibabel as nib from dipy.reconst.shm import anisotropic_power import numpy as np from dipy.core.sphere import HemiSphere from dipy.reconst.shm import sf_to_sh bvecs_xyz = np.loadtxt(bvec_path) bvecs_xyz_array = np.array(bvecs_xyz[:,1:]).transpose() gtab_hemisphere = HemiSphere(xyz=bvecs_xyz_array) img = nib.load(diffdata) diffdata = img.get_data() diffdatashell = diffdata[:,:,:,1:] aff = img.get_affine() myshs = sf_to_sh(diffdatashell, gtab_hemisphere, sh_order=2) anisomap = anisotropic_power(myshs) #Add in a brain masking step here, if beneficial to end result anisopwr_savepath = os.path.abspath('anisotropic_power_map.nii.gz') img = nib.Nifti1Image(anisomap, aff) img.to_filename(anisopwr_savepath) return anisopwr_savepath
def test_r2_term_odf_sharp(): SNR = None S0 = 1 angle = 75 _, fbvals, fbvecs = get_data('small_64D') #get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) sphere = get_sphere('symmetric724') gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) S, sticks = multi_tensor(gtab, mevals, S0, angles=[(0, 0), (angle, 0)], fractions=[50, 50], snr=SNR) mevecs = [all_tensor_evecs(sticks[0]).T, all_tensor_evecs(sticks[1]).T] odf_gt = multi_tensor_odf(sphere.vertices, [0.5, 0.5], mevals, mevecs) odfs_sh = sf_to_sh(odf_gt, sphere, sh_order=8, basis_type=None) fodf_sh = odf_sh_to_sharp(odfs_sh, sphere, basis=None, ratio=3 / 15., sh_order=8, lambda_=1., tau=0.1, r2_term=True) fodf = sh_to_sf(fodf_sh, sphere, sh_order=8, basis_type=None) directions_gt, _, _ = peak_directions(odf_gt, sphere) directions, _, _ = peak_directions(fodf, sphere) ang_sim = angular_similarity(directions_gt, directions) assert_equal(ang_sim > 1.9, True) assert_equal(directions.shape[0], 2)
def sh_estimate(inFile, dirsInFile, outFile, rank=4, smoothness=0.0): in_nifti = nib.load(inFile) refaff = in_nifti.get_affine() data=in_nifti.get_data() vertices = np.loadtxt( dirsInFile ) sphere = Sphere( xyz=vertices ) odf_sh = sf_to_sh( data, sphere, int(rank), "mrtrix", smoothness ) sh_out = nib.Nifti1Image(odf_sh.astype('float32'), refaff) nib.save(sh_out, outFile)
def gqi(training, category, snr, denoised, odeconv, tv, method, weight=0.1, sl=3.): data, affine, gtab, mask, evals, S0, prefix = prepare(training, category, snr, denoised, odeconv, tv, method) model = GeneralizedQSamplingModel(gtab, method='gqi2', sampling_length=sl, normalize_peaks=False) fit = model.fit(data, mask) sphere = get_sphere('symmetric724') odf = fit.odf(sphere) if odeconv == True: odf_sh = sf_to_sh(odf, sphere, sh_order=8, basis_type='mrtrix') # # nib.save(nib.Nifti1Image(odf_sh, affine), model_tag + 'odf_sh.nii.gz') reg_sphere = get_sphere('symmetric724') fodf_sh = odf_sh_to_sharp(odf_sh, reg_sphere, basis='mrtrix', ratio=3.8 / 16.6, sh_order=8, Lambda=1., tau=1.) # # nib.save(nib.Nifti1Image(odf_sh, affine), model_tag + 'fodf_sh.nii.gz') fodf_sh[np.isnan(fodf_sh)]=0 r, theta, phi = cart2sphere(sphere.x, sphere.y, sphere.z) B_regul, m, n = real_sph_harm_mrtrix(8, theta[:, None], phi[:, None]) fodf = np.dot(fodf_sh, B_regul.T) odf = fodf if tv == True: odf = tv_denoise_4d(odf, weight=weight) save_odfs_peaks(training, odf, affine, sphere, dres, prefix)
def save_odfs_peaks(training, odf, affine, sphere, dres, prefix): nib.save(nib.Nifti1Image(odf, affine), dres + prefix + 'odf.nii.gz') peaks_extract(dres + prefix + 'peaks.nii.gz', odf, affine, sphere, relative_peak_threshold=.3, peak_normalize=1, min_separation_angle=25, max_peak_number=5) odf_sh = sf_to_sh(odf, sphere, sh_order=8, basis_type='mrtrix') nib.save(nib.Nifti1Image(odf_sh, affine), dres + prefix + 'odf_sh.nii.gz') if training == True: return training_check(dres, prefix)
def test_r2_term_odf_sharp(): SNR = None S0 = 1 angle = 45 #45 degrees is a very tight angle to disentangle _, fbvals, fbvecs = get_data('small_64D') #get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) sphere = get_sphere('symmetric724') gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (angle, 0)] S, sticks = multi_tensor(gtab, mevals, S0, angles=angles, fractions=[50, 50], snr=SNR) odf_gt = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) odfs_sh = sf_to_sh(odf_gt, sphere, sh_order=8, basis_type=None) fodf_sh = odf_sh_to_sharp(odfs_sh, sphere, basis=None, ratio=3 / 15., sh_order=8, lambda_=1., tau=0.1, r2_term=True) fodf = sh_to_sf(fodf_sh, sphere, sh_order=8, basis_type=None) directions_gt, _, _ = peak_directions(odf_gt, sphere) directions, _, _ = peak_directions(fodf, sphere) ang_sim = angular_similarity(directions_gt, directions) assert_equal(ang_sim > 1.9, True) assert_equal(directions.shape[0], 2) # This should pass as well sdt_model = ConstrainedSDTModel(gtab, ratio=3/15., sh_order=8) sdt_fit = sdt_model.fit(S) fodf = sdt_fit.odf(sphere) directions_gt, _, _ = peak_directions(odf_gt, sphere) directions, _, _ = peak_directions(fodf, sphere) ang_sim = angular_similarity(directions_gt, directions) assert_equal(ang_sim > 1.9, True) assert_equal(directions.shape[0], 2)
def test_odf_sh_to_sharp(): SNR = None S0 = 1 _, fbvals, fbvecs = get_data('small_64D') bvals = np.load(fbvals) bvecs = np.load(fbvecs) gtab = gradient_table(bvals, bvecs) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) S, sticks = multi_tensor(gtab, mevals, S0, angles=[(10, 0), (100, 0)], fractions=[50, 50], snr=SNR) sphere = get_sphere('symmetric724') qb = QballModel(gtab, sh_order=8, assume_normed=True) qbfit = qb.fit(S) odf_gt = qbfit.odf(sphere) Z = np.linalg.norm(odf_gt) odfs_gt = np.zeros((3, 1, 1, odf_gt.shape[0])) odfs_gt[:,:,:] = odf_gt[:] odfs_sh = sf_to_sh(odfs_gt, sphere, sh_order=8, basis_type=None) odfs_sh /= Z fodf_sh = odf_sh_to_sharp(odfs_sh, sphere, basis=None, ratio=3 / 15., sh_order=8, lambda_=1., tau=0.1) fodf = sh_to_sf(fodf_sh, sphere, sh_order=8, basis_type=None) directions2, _, _ = peak_directions(fodf[0, 0, 0], sphere) assert_equal(directions2.shape[0], 2)
def test_sf_to_sh(): # Subdividing a hemi_icosahedron twice produces 81 unique points, which # is more than enough to fit a order 8 (45 coefficients) spherical harmonic sphere = hemi_icosahedron.subdivide(2) mevals = np.array(([0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003])) angles = [(0, 0), (90, 0)] odf = multi_tensor_odf(sphere.vertices, mevals, angles, [50, 50]) # 1D case with the 3 bases functions odf_sh = sf_to_sh(odf, sphere, 8) odf2 = sh_to_sf(odf_sh, sphere, 8) assert_array_almost_equal(odf, odf2, 2) odf_sh = sf_to_sh(odf, sphere, 8, "tournier07") odf2 = sh_to_sf(odf_sh, sphere, 8, "tournier07") assert_array_almost_equal(odf, odf2, 2) # Test the basis naming deprecation with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always", DeprecationWarning) odf_sh_mrtrix = sf_to_sh(odf, sphere, 8, "mrtrix") odf2_mrtrix = sh_to_sf(odf_sh_mrtrix, sphere, 8, "mrtrix") assert_array_almost_equal(odf, odf2_mrtrix, 2) assert len(w) != 0 assert issubclass(w[-1].category, DeprecationWarning) warnings.simplefilter("default", DeprecationWarning) odf_sh = sf_to_sh(odf, sphere, 8, "descoteaux07") odf2 = sh_to_sf(odf_sh, sphere, 8, "descoteaux07") assert_array_almost_equal(odf, odf2, 2) # Test the basis naming deprecation with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always", DeprecationWarning) odf_sh_fibernav = sf_to_sh(odf, sphere, 8, "fibernav") odf2_fibernav = sh_to_sf(odf_sh_fibernav, sphere, 8, "fibernav") assert_array_almost_equal(odf, odf2_fibernav, 2) assert len(w) != 0 assert issubclass(w[-1].category, DeprecationWarning) warnings.simplefilter("default", DeprecationWarning) # 2D case odf2d = np.vstack((odf2, odf)) odf2d_sh = sf_to_sh(odf2d, sphere, 8) odf2d_sf = sh_to_sf(odf2d_sh, sphere, 8) assert_array_almost_equal(odf2d, odf2d_sf, 2)
def test_normalization(): """ Test the normalization routine applied after a convolution""" # create kernel D33 = 1.0 D44 = 0.04 t = 1 num_orientations = 5 k = EnhancementKernel(D33, D44, t, orientations=num_orientations, force_recompute=True) # create a constant dataset numorientations = k.get_orientations().shape[0] spike = np.ones((7, 7, 7, numorientations), dtype=np.float64) # convert dataset to SH spike_sh = sf_to_sh(spike, k.get_sphere(), sh_order=8) # convolve kernel with delta spike and apply normalization csd_enh = convolve(spike_sh, k, sh_order=8, test_mode=True, normalize=True) # convert dataset to DSF csd_enh_dsf = sh_to_sf(csd_enh, k.get_sphere(), sh_order=8, basis_type=None) # test if the normalization is performed correctly npt.assert_almost_equal(np.amax(csd_enh_dsf), np.amax(spike))
We can now express this signal as a series of SH coefficients using ``sf_to_sh``. This function converts a series of SF coefficients in a series of SH coefficients. For more information on SH basis, see :ref:`sh-basis`. For this example, we will use the ``descoteaux07`` basis up to a maximum SH order of 8. """ from dipy.reconst.shm import sf_to_sh # Change this value to try out other bases sh_basis = 'descoteaux07' # Change this value to try other maximum orders sh_order = 8 sh_coeffs = sf_to_sh(odf, sph, sh_order, sh_basis) """ ``sh_coeffs`` is an array containing the SH coefficients multiplying the SH functions of the chosen basis. We can use it as input of ``sh_to_sf`` to reconstruct our original signal. We will now reproject our signal on a high resolution sphere using ``sh_to_sf``. """ from dipy.data import get_sphere from dipy.reconst.shm import sh_to_sf high_res_sph = get_sphere('symmetric724').subdivide(2) reconst = sh_to_sf(sh_coeffs, high_res_sph, sh_order, sh_basis) scene.clear() odf_actor = actor.odf_slicer(reconst[None, None, None, :], sphere=high_res_sph)
sphere = get_sphere('symmetric724') for NC in [1, 2, 3]: for iso in [0, 1]: for fr in [0, 1]: for snr in [30, 10]: for typ in [1]: for ang_t in [23,33,25,35]:#[0, 10, 20, 30]: for category in ['dti', 'hardi']: filename = '{}_pso_odf_sf_sel={}_NC={}_iso={}_fr={}_Np={}_Ni={}_snr={}_type={}'.format(category, ang_t, NC, iso, fr, Np, Ni, snr, typ) filepath = '/media/Data/work/isbi2013/pso_odf_sf/' + filename + '.nii.gz' if os.path.exists(filepath): odf = nib.load(filepath) affine = odf.get_affine() odf = odf.get_data() print(filename) odf_sh = sf_to_sh(odf, sphere, sh_order=8,basis_type='mrtrix') filename2 = '{}_pso_odf_sh_sel={}_NC={}_iso={}_fr={}_Np={}_Ni={}_snr={}_type={}'.format(category, ang_t, NC, iso, fr, Np, Ni, snr, typ) nib.save(nib.Nifti1Image(odf_sh, affine), '/media/Data/work/isbi2013/pso_odf_sh/' + filename2 + '.nii.gz')
def compute_sh_coefficients(dwi, gradient_table, sh_order=4, basis_type='descoteaux07', smooth=0.006, use_attenuation=False, force_b0_threshold=False, mask=None, sphere=None): """Fit a diffusion signal with spherical harmonics coefficients. Parameters ---------- dwi : nib.Nifti1Image object Diffusion signal as weighted images (4D). gradient_table : GradientTable Dipy object that contains all bvals and bvecs. sh_order : int, optional SH order to fit, by default 4. smooth : float, optional Lambda-regularization coefficient in the SH fit, by default 0.006. basis_type: str Either 'tournier07' or 'descoteaux07' use_attenuation: bool, optional If true, we will use DWI attenuation. [False] force_b0_threshold : bool, optional If set, will continue even if the minimum bvalue is suspiciously high. mask: nib.Nifti1Image object, optional Binary mask. Only data inside the mask will be used for computations and reconstruction. sphere: Sphere Dipy object. If not provided, will use Sphere(xyz=bvecs). Returns ------- sh_coeffs : np.ndarray with shape (X, Y, Z, #coeffs) Spherical harmonics coefficients at every voxel. The actual number of coefficients depends on `sh_order`. """ # Extracting infos b0_mask = gradient_table.b0s_mask bvecs = gradient_table.bvecs bvals = gradient_table.bvals # Checks if not is_normalized_bvecs(bvecs): logging.warning("Your b-vectors do not seem normalized...") bvecs = normalize_bvecs(bvecs) check_b0_threshold(force_b0_threshold, bvals.min()) # Ensure that this is on a single shell. shell_values, _ = identify_shells(bvals) shell_values.sort() if force_b0_threshold: b0_threshold = bvals.min() else: b0_threshold = DEFAULT_B0_THRESHOLD if shell_values.shape[0] != 2 or shell_values[0] > b0_threshold: raise ValueError("Can only work on single shell signals.") # Keeping b0-based infos bvecs = bvecs[np.logical_not(b0_mask)] weights = dwi[..., np.logical_not(b0_mask)] # Compute attenuation using the b0. if use_attenuation: b0 = dwi[..., b0_mask].mean(axis=3) weights = compute_dwi_attenuation(weights, b0) # Get cartesian coords from bvecs if sphere is None: sphere = Sphere(xyz=bvecs) # Fit SH sh = sf_to_sh(weights, sphere, sh_order, basis_type, smooth=smooth) # Apply mask if mask is not None: sh *= mask[..., None] return sh
def test_sf_to_sh(): # Subdividing a hemi_icosahedron twice produces 81 unique points, which # is more than enough to fit a order 8 (45 coefficients) spherical harmonic hemisphere = hemi_icosahedron.subdivide(2) mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (60, 0)] odf = multi_tensor_odf(hemisphere.vertices, mevals, angles, [50, 50]) # 1D case with the 2 symmetric bases functions # Tournier basis with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=tournier07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(odf, hemisphere, 8, "tournier07") odf_reconst = sh_to_sf(odf_sh, hemisphere, 8, "tournier07") assert_array_almost_equal(odf, odf_reconst, 2) # Legacy definition with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=tournier07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(odf, hemisphere, 8, "tournier07", legacy=True) odf_reconst = sh_to_sf(odf_sh, hemisphere, 8, "tournier07", legacy=True) assert_array_almost_equal(odf, odf_reconst, 2) # Descoteaux basis with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=descoteaux07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(odf, hemisphere, 8, "descoteaux07") odf_reconst = sh_to_sf(odf_sh, hemisphere, 8, "descoteaux07") assert_array_almost_equal(odf, odf_reconst, 2) # Legacy definition with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=descoteaux07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(odf, hemisphere, 8, "descoteaux07", legacy=True) odf_reconst = sh_to_sf(odf_sh, hemisphere, 8, "descoteaux07", legacy=True) assert_array_almost_equal(odf, odf_reconst, 2) # We now create an asymmetric signal # to try out our full SH basis mevals = np.array([[0.0015, 0.0003, 0.0003]]) angles = [(0, 0)] odf2 = multi_tensor_odf(hemisphere.vertices, mevals, angles, [100]) # We simulate our asymmetric signal by using a different ODF # per hemisphere. The sphere used is a concatenation of the # vertices of our hemisphere, for a total of 162 vertices. sphere = Sphere(xyz=np.vstack((hemisphere.vertices, -hemisphere.vertices))) asym_odf = np.append(odf, odf2) # Try out full bases with order 10 (121 coefficients) # Tournier basis with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=tournier07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(asym_odf, sphere, 10, 'tournier07', full_basis=True) odf_reconst = sh_to_sf(odf_sh, sphere, 10, 'tournier07', full_basis=True) assert_array_almost_equal(odf_reconst, asym_odf, 2) # Legacy definition with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=tournier07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(asym_odf, sphere, 10, 'tournier07', full_basis=True, legacy=True) odf_reconst = sh_to_sf(odf_sh, sphere, 10, 'tournier07', full_basis=True, legacy=True) assert_array_almost_equal(odf_reconst, asym_odf, 2) # Descoteaux basis with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=descoteaux07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(asym_odf, sphere, 10, 'descoteaux07', full_basis=True) odf_reconst = sh_to_sf(odf_sh, sphere, 10, 'descoteaux07', full_basis=True) assert_array_almost_equal(odf_reconst, asym_odf, 2) # Legacy definition with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=descoteaux07_legacy_msg, category=PendingDeprecationWarning) odf_sh = sf_to_sh(asym_odf, sphere, 10, 'descoteaux07', full_basis=True, legacy=True) odf_reconst = sh_to_sf(odf_sh, sphere, 10, 'descoteaux07', full_basis=True, legacy=True) assert_array_almost_equal(odf_reconst, asym_odf, 2) # An invalid basis name should raise an error assert_raises(ValueError, sh_to_sf, odf, hemisphere, basis_type="") assert_raises(ValueError, sf_to_sh, odf_sh, hemisphere, basis_type="") # 2D case odf2d = np.vstack((odf, odf)) with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=descoteaux07_legacy_msg, category=PendingDeprecationWarning) odf2d_sh = sf_to_sh(odf2d, hemisphere, 8) odf2d_sf = sh_to_sf(odf2d_sh, hemisphere, 8) assert_array_almost_equal(odf2d, odf2d_sf, 2)
We can now express this signal as a series of SH coefficients using ``sf_to_sh``. This function converts a series of SF coefficients in a series of SH coefficients. For more information on SH basis, see :ref:`sh-basis`. For this example, we will use the ``descoteaux07`` basis up to a maximum SH order of 8. """ from dipy.reconst.shm import sf_to_sh # Change this value to try out other bases sh_basis = 'descoteaux07' # Change this value to try other maximum orders sh_order = 8 sh_coeffs = sf_to_sh(odf, sph, sh_order, sh_basis) """ ``sh_coeffs`` is an array containing the SH coefficients multiplying the SH functions of the chosen basis. We can use it as input of ``sh_to_sf`` to reconstruct our original signal. We will now reproject our signal on a high resolution sphere using ``sh_to_sf``. """ from dipy.data import get_sphere from dipy.reconst.shm import sh_to_sf high_res_sph = get_sphere('symmetric724').subdivide(2) reconst = sh_to_sf(sh_coeffs, high_res_sph, sh_order, sh_basis) window.rm_all(ren) odf_actor = actor.odf_slicer(reconst[None, None, None, :], sphere=high_res_sph)
def test_convert_sh_to_legacy(): hemisphere = hemi_icosahedron.subdivide(2) mevals = np.array([[0.0015, 0.0003, 0.0003], [0.0015, 0.0003, 0.0003]]) angles = [(0, 0), (60, 0)] odf = multi_tensor_odf(hemisphere.vertices, mevals, angles, [50, 50]) sh_coeffs = sf_to_sh(odf, hemisphere, 8, legacy=False) converted_coeffs = convert_sh_to_legacy(sh_coeffs, 'descoteaux07') with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=descoteaux07_legacy_msg, category=PendingDeprecationWarning) expected_coeffs = sf_to_sh(odf, hemisphere, 8, legacy=True) assert_array_almost_equal(converted_coeffs, expected_coeffs, 2) sh_coeffs = sf_to_sh(odf, hemisphere, 8, basis_type='tournier07', legacy=False) converted_coeffs = convert_sh_to_legacy(sh_coeffs, 'tournier07') with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=tournier07_legacy_msg, category=PendingDeprecationWarning) expected_coeffs = sf_to_sh(odf, hemisphere, 8, basis_type='tournier07', legacy=True) assert_array_almost_equal(converted_coeffs, expected_coeffs, 2) # 2D case odfs = np.array([odf, odf]) sh_coeffs = sf_to_sh(odfs, hemisphere, 8, basis_type='tournier07', full_basis=True, legacy=False) converted_coeffs = convert_sh_to_legacy(sh_coeffs, 'tournier07', full_basis=True) with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=tournier07_legacy_msg, category=PendingDeprecationWarning) expected_coeffs = sf_to_sh(odfs, hemisphere, 8, basis_type='tournier07', legacy=True, full_basis=True) assert_array_almost_equal(converted_coeffs, expected_coeffs, 2) assert_raises(ValueError, convert_sh_to_legacy, sh_coeffs, '', True)
def main(): logging.basicConfig(level=logging.INFO) parser = _build_arg_parser() args = parser.parse_args() required = [args.in_bundle, args.in_fodf, args.in_mask] assert_inputs_exist(parser, required) out_efod = os.path.join(args.out_dir, '{0}efod.nii.gz'.format(args.out_prefix)) out_priors = os.path.join(args.out_dir, '{0}priors.nii.gz'.format(args.out_prefix)) out_todi_mask = os.path.join(args.out_dir, '{0}todi_mask.nii.gz'.format(args.out_prefix)) out_endpoints_mask = os.path.join(args.out_dir, '{0}endpoints_mask.nii.gz'.format( args.out_prefix)) if args.out_dir and not os.path.isdir(args.out_dir): os.mkdir(args.out_dir) required = [out_efod, out_priors, out_todi_mask, out_endpoints_mask] assert_outputs_exist(parser, args, required) img_sh = nib.load(args.in_fodf) sh_shape = img_sh.shape sh_order = find_order_from_nb_coeff(sh_shape) img_mask = nib.load(args.in_mask) sft = load_tractogram(args.in_bundle, args.in_fodf, trk_header_check=True) sft.to_vox() streamlines = sft.streamlines if len(streamlines) < 1: raise ValueError('The input bundle contains no streamline.') # Compute TODI from streamlines with TrackOrientationDensityImaging(img_mask.shape, 'repulsion724') as todi_obj: todi_obj.compute_todi(streamlines, length_weights=True) todi_obj.smooth_todi_dir() todi_obj.smooth_todi_spatial(sigma=args.todi_sigma) # Fancy masking of 1d indices to limit spatial dilation to WM sub_mask_3d = np.logical_and(get_data_as_mask(img_mask), todi_obj.reshape_to_3d(todi_obj.get_mask())) sub_mask_1d = sub_mask_3d.flatten()[todi_obj.get_mask()] todi_sf = todi_obj.get_todi()[sub_mask_1d] ** 2 # The priors should always be between 0 and 1 # A minimum threshold is set to prevent misaligned FOD from disappearing todi_sf /= np.max(todi_sf, axis=-1, keepdims=True) todi_sf[todi_sf < args.sf_threshold] = args.sf_threshold # Memory friendly saving, as soon as possible saving then delete priors_3d = np.zeros(sh_shape) sphere = get_sphere('repulsion724') priors_3d[sub_mask_3d] = sf_to_sh(todi_sf, sphere, sh_order=sh_order, basis_type=args.sh_basis) nib.save(nib.Nifti1Image(priors_3d, img_mask.affine), out_priors) del priors_3d input_sh_3d = img_sh.get_fdata(dtype=np.float32) input_sf_1d = sh_to_sf(input_sh_3d[sub_mask_3d], sphere, sh_order=sh_order, basis_type=args.sh_basis) # Creation of the enhanced-FOD (direction-wise multiplication) mult_sf_1d = input_sf_1d * todi_sf del todi_sf input_max_value = np.max(input_sf_1d, axis=-1, keepdims=True) mult_max_value = np.max(mult_sf_1d, axis=-1, keepdims=True) mult_positive_mask = np.squeeze(mult_max_value) > 0.0 mult_sf_1d[mult_positive_mask] = mult_sf_1d[mult_positive_mask] * \ input_max_value[mult_positive_mask] / \ mult_max_value[mult_positive_mask] # Memory friendly saving input_sh_3d[sub_mask_3d] = sf_to_sh(mult_sf_1d, sphere, sh_order=sh_order, basis_type=args.sh_basis) nib.save(nib.Nifti1Image(input_sh_3d, img_mask.affine), out_efod) del input_sh_3d nib.save(nib.Nifti1Image(sub_mask_3d.astype( np.int16), img_mask.affine), out_todi_mask) endpoints_mask = np.zeros(img_mask.shape, dtype=np.int16) for streamline in streamlines: if get_data_as_mask(img_mask)[tuple(streamline[0].astype(np.int16))]: endpoints_mask[tuple(streamline[0].astype(np.int16))] = 1 endpoints_mask[tuple(streamline[-1].astype(np.int16))] = 1 nib.save(nib.Nifti1Image(endpoints_mask, img_mask.affine), out_endpoints_mask)
def create_anisopowermap(gtab_file, dwi_file, B0_mask): ''' Estimate an anisotropic power map image to use for registrations. Parameters ---------- gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted image. B0_mask : str File path to B0 brain mask. Returns ------- anisopwr_path : str File path to the anisotropic power Nifti1Image. B0_mask : str File path to B0 brain mask Nifti1Image. gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted Nifti1Image. ''' import os from dipy.io import load_pickle from dipy.reconst.shm import anisotropic_power from dipy.core.sphere import HemiSphere from dipy.reconst.shm import sf_to_sh gtab = load_pickle(gtab_file) gtab_hemisphere = HemiSphere(xyz=gtab.bvecs[np.where( gtab.b0s_mask == False)]) img = nib.load(dwi_file) aff = img.affine anisopwr_path = "%s%s" % (os.path.dirname(B0_mask), '/aniso_power.nii.gz') if os.path.isfile(anisopwr_path): pass else: print('Generating anisotropic power map to use for registrations...') nodif_B0_img = nib.load(B0_mask) dwi_data = np.asarray(img.dataobj) for b0 in sorted(list(np.where(gtab.b0s_mask == True)[0]), reverse=True): dwi_data = np.delete(dwi_data, b0, 3) anisomap = anisotropic_power( sf_to_sh(dwi_data, gtab_hemisphere, sh_order=2)) anisomap[np.isnan(anisomap)] = 0 masked_data = anisomap * np.asarray( nodif_B0_img.dataobj).astype('bool') img = nib.Nifti1Image(masked_data.astype(np.float32), aff) img.to_filename(anisopwr_path) nodif_B0_img.uncache() del anisomap return anisopwr_path, B0_mask, gtab_file, dwi_file
def main(): parser = buildArgsParser() args = parser.parse_args() logging.basicConfig(level=logging.INFO) if not args.not_all: if not args.odf: args.odf = 'shore_dodf.nii.gz' if not args.rtop: args.rtop = 'rtop.nii.gz' if not args.msd: args.msd = 'msd.nii.gz' if not args.pa: args.pa = 'pa.nii.gz' arglist = [args.odf, args.rtop, args.msd, args.pa] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' + 'one file to output.') for out in arglist: if os.path.isfile(out): if args.overwrite: logging.info('Overwriting "{0}".'.format(out)) else: parser.error( '"{0}" already exists! Use -f to overwrite it.'.format( out)) vol = nib.load(args.input) data = vol.get_data() affine = vol.get_affine() bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) if bvals.min() != 0: if bvals.min() > 20: raise ValueError('The minimal bvalue is greater than 20. ' + 'This is highly suspicious. Please check ' + 'your data to ensure everything is correct.\n' + 'Value found: {0}'.format(bvals.min())) else: logging.warning('Warning: no b=0 image. Setting b0_threshold to ' + 'bvals.min() = {0}'.format(bvals.min())) gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) else: gtab = gradient_table(bvals, bvecs) if args.mask is None: mask = None else: mask = nib.load(args.mask).get_data().astype(np.bool) voxels_with_values_mask = data[:, :, :, 0] > 0 mask = voxels_with_values_mask * mask sphere = get_sphere('repulsion100') if args.regul_weighting <= 0: logging.info('Now computing SHORE ODF of radial order {0}'.format( args.radial_order) + ' and Laplacian generalized cross-validation') shore_model = ShoreOzarslanModel(gtab, radial_order=args.radial_order, laplacian_regularization=True, laplacian_weighting='GCV') else: logging.info('Now computing SHORE ODF of radial order {0}'.format( args.radial_order) + ' and Laplacian regularization weight of {0}'.format( args.regul_weighting)) shore_model = ShoreOzarslanModel( gtab, radial_order=args.radial_order, laplacian_regularization=True, laplacian_weighting=args.regul_weighting) smfit = shore_model.fit(data, mask) odf = smfit.odf(sphere, radial_moment=args.radial_moment) odf_sh = sf_to_sh(odf, sphere, sh_order=8, basis_type=args.basis, smooth=0.0) rtop = smfit.rtop() msd = smfit.msd() pa = smfit.propagator_anisotropy() if args.odf: nib.save(nib.Nifti1Image(odf_sh.astype(np.float32), affine), args.odf) if args.rtop: nib.save(nib.Nifti1Image(rtop.astype(np.float32), affine), args.rtop) if args.msd: nib.save(nib.Nifti1Image(msd.astype(np.float32), affine), args.msd) if args.pa: nib.save(nib.Nifti1Image(pa.astype(np.float32), affine), args.pa)
t = 1 k = EnhancementKernel(D33, D44, t) """ Visualize the kernel """ from dipy.viz import fvtk from dipy.data import get_sphere from dipy.reconst.shm import sf_to_sh, sh_to_sf ren = fvtk.ren() # convolve kernel with delta spike spike = np.zeros((7, 7, 7, k.get_orientations().shape[0]), dtype=np.float64) spike[3, 3, 3, 0] = 1 spike_shm_conv = convolve(sf_to_sh(spike, k.get_sphere(), sh_order=8), k, sh_order=8, test_mode=True) sphere = get_sphere('symmetric724') spike_sf_conv = sh_to_sf(spike_shm_conv, sphere, sh_order=8) model_kernel = fvtk.sphere_funcs((spike_sf_conv * 6)[3,:,:,:], sphere, norm=False, radial_scale=True) fvtk.add(ren, model_kernel) fvtk.camera(ren, pos=(30, 0, 0), focal=(0, 0, 0), viewup=(0, 0, 1), verbose=False) fvtk.record(ren, out_path='kernel.png', size=(900, 900)) """ .. figure:: kernel.png :align: center
def create_anisopowermap(gtab_file, dwi_file, B0_mask): """ Estimate an anisotropic power map image to use for registrations. Parameters ---------- gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted image. B0_mask : str File path to B0 brain mask. Returns ------- anisopwr_path : str File path to the anisotropic power Nifti1Image. B0_mask : str File path to B0 brain mask Nifti1Image. gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted Nifti1Image. References ---------- .. [1] Chen, D. Q., Dell’Acqua, F., Rokem, A., Garyfallidis, E., Hayes, D., Zhong, J., & Hodaie, M. (2018). Diffusion Weighted Image Co-registration: Investigation of Best Practices. PLoS ONE. """ import os from dipy.io import load_pickle from dipy.reconst.shm import anisotropic_power from dipy.core.sphere import HemiSphere, Sphere from dipy.reconst.shm import sf_to_sh gtab = load_pickle(gtab_file) dwi_vertices = gtab.bvecs[np.where(gtab.b0s_mask == False)] gtab_hemisphere = HemiSphere(xyz=gtab.bvecs[np.where( gtab.b0s_mask == False)]) try: assert len(gtab_hemisphere.vertices) == len(dwi_vertices) except BaseException: gtab_hemisphere = Sphere(xyz=gtab.bvecs[np.where( gtab.b0s_mask == False)]) img = nib.load(dwi_file) aff = img.affine anisopwr_path = f"{os.path.dirname(B0_mask)}{'/aniso_power.nii.gz'}" if os.path.isfile(anisopwr_path): pass else: print("Generating anisotropic power map to use for registrations...") nodif_B0_img = nib.load(B0_mask) dwi_data = img.get_fdata(dtype=np.float32) for b0 in sorted(list(np.where(gtab.b0s_mask)[0]), reverse=True): dwi_data = np.delete(dwi_data, b0, 3) anisomap = anisotropic_power( sf_to_sh(dwi_data, gtab_hemisphere, sh_order=2)) anisomap[np.isnan(anisomap)] = 0 masked_data = anisomap * \ np.asarray(nodif_B0_img.dataobj).astype("bool") img = nib.Nifti1Image(masked_data.astype(np.float32), aff) img.to_filename(anisopwr_path) nodif_B0_img.uncache() del anisomap img.uncache() return anisopwr_path, B0_mask, gtab_file, dwi_file
t = 1 k = EnhancementKernel(D33, D44, t) """ Visualize the kernel """ from dipy.viz import window, actor from dipy.data import get_sphere from dipy.reconst.shm import sf_to_sh, sh_to_sf ren = window.Renderer() # convolve kernel with delta spike spike = np.zeros((7, 7, 7, k.get_orientations().shape[0]), dtype=np.float64) spike[3, 3, 3, 0] = 1 spike_shm_conv = convolve(sf_to_sh(spike, k.get_sphere(), sh_order=8), k, sh_order=8, test_mode=True) sphere = get_sphere('symmetric724') spike_sf_conv = sh_to_sf(spike_shm_conv, sphere, sh_order=8) model_kernel = actor.odf_slicer(spike_sf_conv * 6, sphere=sphere, norm=False, scale=0.4) model_kernel.display(x=3) ren.add(model_kernel) ren.set_camera(position=(30, 0, 0), focal_point=(0, 0, 0), view_up=(0, 0, 1)) window.record(ren, out_path='kernel.png', size=(900, 900)) if interactive: