def test_anisotropic_power(): for n_coeffs in [6, 15, 28, 45, 66, 91]: for norm_factor in [0.0005, 0.00001]: # Create some really simple cases: coeffs = np.ones((3, n_coeffs)) max_order = calculate_max_order(coeffs.shape[-1]) # For the case where all coeffs == 1, the ap is simply log of the # number of even orders up to the maximal order: analytic = (np.log(len(range(2, max_order + 2, 2))) - np.log(norm_factor)) answers = [analytic] * 3 apvals = anisotropic_power(coeffs, norm_factor=norm_factor) assert_array_almost_equal(apvals, answers) # Test that this works for single voxel arrays as well: assert_array_almost_equal( anisotropic_power(coeffs[1], norm_factor=norm_factor), answers[1]) # Test that even when we look at an all-zeros voxel, this # avoids a log-of-zero warning: with warnings.catch_warnings(record=True) as w: assert_equal(anisotropic_power(np.zeros(6)), 0) assert len(w) == 0
def test_anisotropic_power(): for n_coeffs in [6, 15, 28, 45, 66, 91]: for norm_factor in [0.0005, 0.00001]: # Create some really simple cases: coeffs = np.ones((3, n_coeffs)) max_order = calculate_max_order(coeffs.shape[-1]) # For the case where all coeffs == 1, the ap is simply log of the # number of even orders up to the maximal order: analytic = (np.log(len(range(2, max_order + 2, 2))) - np.log(norm_factor)) answers = [analytic] * 3 apvals = anisotropic_power(coeffs, norm_factor=norm_factor) assert_array_almost_equal(apvals, answers) # Test that this works for single voxel arrays as well: assert_array_almost_equal( anisotropic_power(coeffs[1], norm_factor=norm_factor), answers[1]) # Test that even when we look at an all-zeros voxel, this # avoids a log-of-zero warning: with warnings.catch_warnings(record=True) as w: assert_equal(anisotropic_power(np.zeros(6)), 0) assert len(w) == 0
def _run_interface(self, runtime): from dipy.reconst import shm from dipy.data import get_sphere from dipy.reconst.peaks import peaks_from_model gtab = self._get_gradient_table() img = nb.load(self.inputs.in_file) data = img.get_data() affine = img.affine mask = None if isdefined(self.inputs.mask_file): mask = nb.load(self.inputs.mask_file).get_data() # Fit it model = shm.QballModel(gtab, 8) sphere = get_sphere('symmetric724') peaks = peaks_from_model(model=model, data=data, relative_peak_threshold=.5, min_separation_angle=25, sphere=sphere, mask=mask) apm = shm.anisotropic_power(peaks.shm_coeff) out_file = self._gen_filename('apm') nb.Nifti1Image(apm.astype("float32"), affine).to_filename(out_file) IFLOGGER.info('APM qball image saved as %s', out_file) return runtime
def _run_interface(self, runtime): from dipy.reconst import shm from dipy.data import get_sphere from dipy.reconst.peaks import peaks_from_model gtab = self._get_gradient_table() img = nb.load(self.inputs.in_file) data = img.get_data() affine = img.affine mask = None if isdefined(self.inputs.mask_file): mask = nb.load(self.inputs.mask_file).get_data() # Fit it model = shm.QballModel(gtab, 8) sphere = get_sphere('symmetric724') peaks = peaks_from_model( model=model, data=data, relative_peak_threshold=.5, min_separation_angle=25, sphere=sphere, mask=mask) apm = shm.anisotropic_power(peaks.shm_coeff) out_file = self._gen_filename('apm') nb.Nifti1Image(apm.astype("float32"), affine).to_filename(out_file) IFLOGGER.info('APM qball image saved as %s', out_file) return runtime
def anisotropic_power_map(data, mask, gtab, power=2, nbr_processes=None, verbose=False): # compute response if verbose: print(' - computing response') response, _ = auto_response(gtab, data, roi_radius=10, fa_thr=0.7) # compute spherical harmonics from spherical deconvoluten if verbose: print(' - preparing spherical deconvolution model') csd_model = ConstrainedSphericalDeconvModel(gtab, response) sphere = get_sphere('symmetric724') if verbose: print(' - computing spherical harmonics from spherical deconvolution') csd_peaks = peaks_from_model(model=csd_model, data=data, mask=mask, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, parallel=True, nbr_processes=nbr_processes) # compute anisotropic power map if verbose: print(' - computing anisotropic power map') return anisotropic_power(csd_peaks.shm_coeff, norm_factor=1e-05, power=power, non_negative=True)
def create_anisopowermap(bvec_path, diffdata): import os import nibabel as nib from dipy.reconst.shm import anisotropic_power import numpy as np from dipy.core.sphere import HemiSphere from dipy.reconst.shm import sf_to_sh bvecs_xyz = np.loadtxt(bvec_path) bvecs_xyz_array = np.array(bvecs_xyz[:,1:]).transpose() gtab_hemisphere = HemiSphere(xyz=bvecs_xyz_array) img = nib.load(diffdata) diffdata = img.get_data() diffdatashell = diffdata[:,:,:,1:] aff = img.get_affine() myshs = sf_to_sh(diffdatashell, gtab_hemisphere, sh_order=2) anisomap = anisotropic_power(myshs) #Add in a brain masking step here, if beneficial to end result anisopwr_savepath = os.path.abspath('anisotropic_power_map.nii.gz') img = nib.Nifti1Image(anisomap, aff) img.to_filename(anisopwr_savepath) return anisopwr_savepath
def fit_anisotropic_power_map(dwi, gtab, mask=None): """ Fits an anisotropic power map. Parameters ---------- dwi : str, ndarray, or nifti1image Data to greate map with. gtab : GradientTable A GradientTable with all the gradient information. mask : str or nifti1image, optional mask to mask the data with. Default: None. Returns ------- ndarray containing an anisotropic power map. """ if isinstance(dwi, str): dwi = nib.load(dwi) if isinstance(dwi, nib.Nifti1Image): dwi_data = dwi.get_fdata() else: dwi_data = dwi if isinstance(mask, str): mask = nib.load(mask) mask = mask.get_fdata() model = _model(gtab, dwi_data) sphere = dpd.get_sphere('symmetric724') peaks = csd.peaks_from_model(model=model, data=dwi_data, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask) ap = shm.anisotropic_power(peaks.shm_coeff) return ap
def create_anisopowermap(gtab_file, dwi_file, B0_mask): """ Estimate an anisotropic power map image to use for registrations. Parameters ---------- gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted image. B0_mask : str File path to B0 brain mask. Returns ------- anisopwr_path : str File path to the anisotropic power Nifti1Image. B0_mask : str File path to B0 brain mask Nifti1Image. gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted Nifti1Image. References ---------- .. [1] Chen, D. Q., Dell’Acqua, F., Rokem, A., Garyfallidis, E., Hayes, D., Zhong, J., & Hodaie, M. (2018). Diffusion Weighted Image Co-registration: Investigation of Best Practices. PLoS ONE. """ import os from dipy.io import load_pickle from dipy.reconst.shm import anisotropic_power from dipy.core.sphere import HemiSphere, Sphere from dipy.reconst.shm import sf_to_sh gtab = load_pickle(gtab_file) dwi_vertices = gtab.bvecs[np.where(gtab.b0s_mask == False)] gtab_hemisphere = HemiSphere(xyz=gtab.bvecs[np.where( gtab.b0s_mask == False)]) try: assert len(gtab_hemisphere.vertices) == len(dwi_vertices) except BaseException: gtab_hemisphere = Sphere(xyz=gtab.bvecs[np.where( gtab.b0s_mask == False)]) img = nib.load(dwi_file) aff = img.affine anisopwr_path = f"{os.path.dirname(B0_mask)}{'/aniso_power.nii.gz'}" if os.path.isfile(anisopwr_path): pass else: print("Generating anisotropic power map to use for registrations...") nodif_B0_img = nib.load(B0_mask) dwi_data = np.asarray(img.dataobj, dtype=np.float32) for b0 in sorted(list(np.where(gtab.b0s_mask)[0]), reverse=True): dwi_data = np.delete(dwi_data, b0, 3) anisomap = anisotropic_power( sf_to_sh(dwi_data, gtab_hemisphere, sh_order=2)) anisomap[np.isnan(anisomap)] = 0 masked_data = anisomap * \ np.asarray(nodif_B0_img.dataobj).astype("bool") img = nib.Nifti1Image(masked_data.astype(np.float32), aff) img.to_filename(anisopwr_path) nodif_B0_img.uncache() del anisomap return anisopwr_path, B0_mask, gtab_file, dwi_file
[DellAcqua2014]_. To do so, we make use of the Q-ball Model as follows: """ qball_model = shm.QballModel(gtab, 8) """ We generate the peaks from the ``qball_model`` as follows: """ peaks = dp.peaks_from_model(model=qball_model, data=denoised_arr, relative_peak_threshold=.5, min_separation_angle=25, sphere=sphere, mask=mask) ap = shm.anisotropic_power(peaks.shm_coeff) plt.matshow(np.rot90(ap[:, :, 10]), cmap=plt.cm.bone) #plt.savefig("anisotropic_power_map.png") plt.show() """ .. figure:: anisotropic_power_map.png :align: center Anisotropic Power Map (Axial Slice) """ print(ap.shape) """ The above figure is a visualization of the axial slice of the Anisotropic Power Map. It can be treated as a pseudo-T1 for classification purposes
def main(): params = readArgs() # read in from the command line read_args = params.collect_args() params.check_args(read_args) # get img obj dwi_img = nib.load(params.dwi_) mask_img = nib.load(params.mask_) from dipy.io import read_bvals_bvecs bvals, bvecs = read_bvals_bvecs(params.bval_, params.bvec_) # need to create the gradient table yo from dipy.core.gradients import gradient_table gtab = gradient_table(bvals, bvecs, b0_threshold=25) # get the data from image objects dwi_data = dwi_img.get_data() mask_data = mask_img.get_data() # and get affine img_affine = dwi_img.affine from dipy.data import get_sphere sphere = get_sphere('repulsion724') from dipy.segment.mask import applymask dwi_data = applymask(dwi_data, mask_data) printfl('dwi_data.shape (%d, %d, %d, %d)' % dwi_data.shape) printfl('\nYour bvecs look like this:{0}'.format(bvecs)) printfl('\nYour bvals look like this:{0}\n'.format(bvals)) from dipy.reconst.shm import anisotropic_power, sph_harm_lookup, smooth_pinv, normalize_data from dipy.core.sphere import HemiSphere smooth = 0.0 normed_data = normalize_data(dwi_data, gtab.b0s_mask) normed_data = normed_data[..., np.where(1 - gtab.b0s_mask)[0]] from dipy.core.gradients import gradient_table_from_bvals_bvecs gtab2 = gradient_table_from_bvals_bvecs( gtab.bvals[np.where(1 - gtab.b0s_mask)[0]], gtab.bvecs[np.where(1 - gtab.b0s_mask)[0]]) signal_native_pts = HemiSphere(xyz=gtab2.bvecs) sph_harm_basis = sph_harm_lookup.get(None) Ba, m, n = sph_harm_basis(params.sh_order_, signal_native_pts.theta, signal_native_pts.phi) L = -n * (n + 1) invB = smooth_pinv(Ba, np.sqrt(smooth) * L) # fit SH basis to DWI signal normed_data_sh = np.dot(normed_data, invB.T) # power map call printfl("fitting power map") pow_map = anisotropic_power(normed_data_sh, norm_factor=0.00001, power=2, non_negative=True) pow_map_img = nib.Nifti1Image(pow_map.astype(np.float32), img_affine) # make output name out_name = ''.join( [params.output_, '_powMap_sh', str(params.sh_order_), '.nii.gz']) printfl("writing power map to: {}".format(out_name)) nib.save(pow_map_img, out_name)
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.gfa = args.gfa or 'gfa.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_indices = args.peak_indices or 'peaks_indices.nii.gz' args.sh = args.sh or 'sh.nii.gz' args.nufo = args.nufo or 'nufo.nii.gz' args.a_power = args.a_power or 'anisotropic_power.nii.gz' arglist = [ args.gfa, args.peaks, args.peak_indices, args.sh, args.nufo, args.a_power ] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' + 'one file to output.') assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs]) assert_outputs_exists(parser, args, arglist) nbr_processes = args.nbr_processes parallel = True if nbr_processes <= 0: nbr_processes = None elif nbr_processes == 1: parallel = False # Load data img = nib.load(args.input) data = img.get_data() affine = img.get_affine() bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) if not is_normalized_bvecs(bvecs): logging.warning('Your b-vectors do not seem normalized...') bvecs = normalize_bvecs(bvecs) if bvals.min() != 0: if bvals.min() > 20: raise ValueError( 'The minimal bvalue is greater than 20. This is highly ' 'suspicious. Please check your data to ensure everything is ' 'correct.\nValue found: {0}'.format(bvals.min())) else: logging.warning( 'Warning: no b=0 image. Setting b0_threshold to ' 'bvals.min() = %s', bvals.min()) gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) else: gtab = gradient_table(bvals, bvecs) sphere = get_sphere('symmetric724') if args.mask is None: mask = None else: mask = nib.load(args.mask).get_data().astype(np.bool) if args.use_qball: model = QballModel(gtab, sh_order=int(args.sh_order), smooth=0.006) else: model = CsaOdfModel(gtab, sh_order=int(args.sh_order), smooth=0.006) odfpeaks = peaks_from_model(model=model, data=data, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True, return_sh=True, sh_order=int(args.sh_order), sh_basis_type=args.basis, npeaks=5, parallel=parallel, nbr_processes=nbr_processes) if args.gfa: nib.save(nib.Nifti1Image(odfpeaks.gfa.astype(np.float32), affine), args.gfa) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(odfpeaks), affine), args.peaks) if args.peak_indices: nib.save(nib.Nifti1Image(odfpeaks.peak_indices, affine), args.peak_indices) if args.sh: nib.save( nib.Nifti1Image(odfpeaks.shm_coeff.astype(np.float32), affine), args.sh) if args.nufo: peaks_count = (odfpeaks.peak_indices > -1).sum(3) nib.save(nib.Nifti1Image(peaks_count.astype(np.int32), affine), args.nufo) if args.a_power: odf_a_power = anisotropic_power(odfpeaks.shm_coeff) nib.save(nib.Nifti1Image(odf_a_power.astype(np.float32), affine), args.a_power)
def create_anisopowermap(gtab_file, dwi_file, B0_mask): ''' Estimate an anisotropic power map image to use for registrations. Parameters ---------- gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted image. B0_mask : str File path to B0 brain mask. Returns ------- anisopwr_path : str File path to the anisotropic power Nifti1Image. B0_mask : str File path to B0 brain mask Nifti1Image. gtab_file : str File path to pickled DiPy gradient table object. dwi_file : str File path to diffusion weighted Nifti1Image. ''' import os from dipy.io import load_pickle from dipy.reconst.shm import anisotropic_power from dipy.core.sphere import HemiSphere from dipy.reconst.shm import sf_to_sh gtab = load_pickle(gtab_file) gtab_hemisphere = HemiSphere(xyz=gtab.bvecs[np.where( gtab.b0s_mask == False)]) img = nib.load(dwi_file) aff = img.affine anisopwr_path = "%s%s" % (os.path.dirname(B0_mask), '/aniso_power.nii.gz') if os.path.isfile(anisopwr_path): pass else: print('Generating anisotropic power map to use for registrations...') nodif_B0_img = nib.load(B0_mask) dwi_data = np.asarray(img.dataobj) for b0 in sorted(list(np.where(gtab.b0s_mask == True)[0]), reverse=True): dwi_data = np.delete(dwi_data, b0, 3) anisomap = anisotropic_power( sf_to_sh(dwi_data, gtab_hemisphere, sh_order=2)) anisomap[np.isnan(anisomap)] = 0 masked_data = anisomap * np.asarray( nodif_B0_img.dataobj).astype('bool') img = nib.Nifti1Image(masked_data.astype(np.float32), aff) img.to_filename(anisopwr_path) nodif_B0_img.uncache() del anisomap return anisopwr_path, B0_mask, gtab_file, dwi_file
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.gfa = args.gfa or 'gfa.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_indices = args.peak_indices or 'peaks_indices.nii.gz' args.sh = args.sh or 'sh.nii.gz' args.nufo = args.nufo or 'nufo.nii.gz' args.a_power = args.a_power or 'anisotropic_power.nii.gz' arglist = [ args.gfa, args.peaks, args.peak_indices, args.sh, args.nufo, args.a_power ] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' + 'one file to output.') assert_inputs_exist(parser, [args.in_dwi, args.in_bval, args.in_bvec]) assert_outputs_exist(parser, args, arglist) validate_nbr_processes(parser, args) nbr_processes = args.nbr_processes parallel = nbr_processes > 1 # Load data img = nib.load(args.in_dwi) data = img.get_fdata(dtype=np.float32) bvals, bvecs = read_bvals_bvecs(args.in_bval, args.in_bvec) if not is_normalized_bvecs(bvecs): logging.warning('Your b-vectors do not seem normalized...') bvecs = normalize_bvecs(bvecs) check_b0_threshold(args, bvals.min()) gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) sphere = get_sphere('symmetric724') mask = None if args.mask: mask = get_data_as_mask(nib.load(args.mask)) # Sanity check on shape of mask if mask.shape != data.shape[:-1]: raise ValueError('Mask shape does not match data shape.') if args.use_qball: model = QballModel(gtab, sh_order=args.sh_order, smooth=DEFAULT_SMOOTH) else: model = CsaOdfModel(gtab, sh_order=args.sh_order, smooth=DEFAULT_SMOOTH) odfpeaks = peaks_from_model(model=model, data=data, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True, return_sh=True, sh_order=int(args.sh_order), sh_basis_type=args.sh_basis, npeaks=5, parallel=parallel, nbr_processes=nbr_processes) if args.gfa: nib.save(nib.Nifti1Image(odfpeaks.gfa.astype(np.float32), img.affine), args.gfa) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(odfpeaks), img.affine), args.peaks) if args.peak_indices: nib.save(nib.Nifti1Image(odfpeaks.peak_indices, img.affine), args.peak_indices) if args.sh: nib.save( nib.Nifti1Image(odfpeaks.shm_coeff.astype(np.float32), img.affine), args.sh) if args.nufo: peaks_count = (odfpeaks.peak_indices > -1).sum(3) nib.save(nib.Nifti1Image(peaks_count.astype(np.int32), img.affine), args.nufo) if args.a_power: odf_a_power = anisotropic_power(odfpeaks.shm_coeff) nib.save(nib.Nifti1Image(odf_a_power.astype(np.float32), img.affine), args.a_power)
FA_data = FA_img.get_fdata() ########################################################################## # Calculate CSD: # ------------------------- print("Calculating CSD...") if not op.exists(op.join(working_dir, 'csd_sh_coeff.nii.gz')): sh_coeff = csd.fit_csd(hardi_fdata, hardi_fbval, hardi_fbvec, sh_order=4, out_dir=working_dir) else: sh_coeff = op.join(working_dir, "csd_sh_coeff.nii.gz") apm = shm.anisotropic_power(nib.load(sh_coeff).get_fdata()) ########################################################################## # Register the individual data to a template: # ------------------------------------------- # For the purpose of bundle segmentation, the individual brain is registered to # the MNI T1 template. The waypoint ROIs used in segmentation are then each # brought into each subject's native space to test streamlines for whether they # fulfill the segmentation criteria. # # .. note:: # # To find the right place for the waypoint ROIs, we calculate a non-linear # transformation between the individual's brain DWI measurement (the b0 # measurements) and the MNI T1 template. # Before calculating this non-linear warping, we perform a pre-alignment