def main(): start = time.time() with open('config.json') as config_json: config = json.load(config_json) # Load the data dmri_image = nib.load(config['data_file']) dmri = dmri_image.get_data() print('Dmri' + str(dmri.shape)) #aparc_im = nib.load(config['freesurfer']) aparc_im = nib.load("volume.nii.gz") aparc = aparc_im.get_data() print('Aparc' + str(aparc.shape)) end = time.time() print('Loaded Files: ' + str((end - start))) # Create the white matter mask start = time.time() wm_regions = [ 2, 41, 16, 17, 28, 60, 51, 53, 12, 52, 12, 52, 13, 18, 54, 50, 11, 251, 252, 253, 254, 255, 10, 49, 46, 7 ] wm_mask = np.zeros(aparc.shape) for l in wm_regions: wm_mask[aparc == l] = 1 print('Created white matter mask: ' + str(time.time() - start)) # Create the gradient table from the bvals and bvecs start = time.time() bvals, bvecs = read_bvals_bvecs(config['data_bval'], config['data_bvec']) gtab = gradient_table(bvals, bvecs, b0_threshold=100) end = time.time() print('Created Gradient Table: ' + str((end - start))) # Create the csa model and calculate peaks start = time.time() csa_model = CsaOdfModel(gtab, sh_order=6) print('Created CSA Model: ' + str(time.time() - start)) csa_peaks = peaks_from_model(csa_model, dmri, default_sphere, relative_peak_threshold=.8, min_separation_angle=45, mask=wm_mask) print('Generated peaks: ' + str(time.time() - start)) save_peaks('peaks.pam5', csa_peaks)
def test_io_peaks(): with InTemporaryDirectory(): fname = 'test.pam5' pam = PeaksAndMetrics() pam.affine = np.eye(4) pam.peak_dirs = np.random.rand(10, 10, 10, 5, 3) pam.peak_values = np.zeros((10, 10, 10, 5)) pam.peak_indices = np.zeros((10, 10, 10, 5)) pam.shm_coeff = np.zeros((10, 10, 10, 45)) pam.sphere = default_sphere pam.B = np.zeros((45, default_sphere.vertices.shape[0])) pam.total_weight = 0.5 pam.ang_thr = 60 pam.gfa = np.zeros((10, 10, 10)) pam.qa = np.zeros((10, 10, 10, 5)) pam.odf = np.zeros((10, 10, 10, default_sphere.vertices.shape[0])) save_peaks(fname, pam) pam2 = load_peaks(fname, verbose=True) npt.assert_array_equal(pam.peak_dirs, pam2.peak_dirs) pam2.affine = None fname2 = 'test2.pam5' save_peaks(fname2, pam2, np.eye(4)) pam2_res = load_peaks(fname2, verbose=True) npt.assert_array_equal(pam.peak_dirs, pam2_res.peak_dirs) pam3 = load_peaks(fname2, verbose=False) for attr in [ 'peak_dirs', 'peak_values', 'peak_indices', 'gfa', 'qa', 'shm_coeff', 'B', 'odf' ]: npt.assert_array_equal(getattr(pam3, attr), getattr(pam, attr)) npt.assert_equal(pam3.total_weight, pam.total_weight) npt.assert_equal(pam3.ang_thr, pam.ang_thr) npt.assert_array_almost_equal(pam3.sphere.vertices, pam.sphere.vertices) fname3 = 'test3.pam5' pam4 = PeaksAndMetrics() npt.assert_raises(ValueError, save_peaks, fname3, pam4) fname4 = 'test4.pam5' del pam.affine save_peaks(fname4, pam, affine=None) fname5 = 'test5.pkm' npt.assert_raises(IOError, save_peaks, fname5, pam) pam.affine = np.eye(4) fname6 = 'test6.pam5' save_peaks(fname6, pam, verbose=True) del pam.shm_coeff save_peaks(fname6, pam, verbose=False) pam.shm_coeff = np.zeros((10, 10, 10, 45)) del pam.odf save_peaks(fname6, pam) pam_tmp = load_peaks(fname6, True) npt.assert_equal(pam_tmp.odf, None) fname7 = 'test7.paw' npt.assert_raises(IOError, load_peaks, fname7) del pam.shm_coeff save_peaks(fname6, pam, verbose=True) fname_shm = 'shm.nii.gz' fname_dirs = 'dirs.nii.gz' fname_values = 'values.nii.gz' fname_indices = 'indices.nii.gz' fname_gfa = 'gfa.nii.gz' pam.shm_coeff = np.ones((10, 10, 10, 45)) peaks_to_niftis(pam, fname_shm, fname_dirs, fname_values, fname_indices, fname_gfa, reshape_dirs=False) os.path.isfile(fname_shm) os.path.isfile(fname_dirs) os.path.isfile(fname_values) os.path.isfile(fname_indices) os.path.isfile(fname_gfa)
def run(self, input_files, bvalues_files, bvectors_files, mask_files, sh_order=6, odf_to_sh_order=8, b0_threshold=50.0, bvecs_tol=0.01, extract_pam_values=False, parallel=False, nbr_processes=None, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Constant Solid Angle. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues_files : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors_files : string Path to the bvectors files. This path may contain wildcards to use multiple bvectors files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) sh_order : int, optional Spherical harmonics order used in the CSA fit. odf_to_sh_order : int, optional Spherical harmonics order used for peak_from_model to compress the ODF to spherical harmonics coefficients. b0_threshold : float, optional Threshold used to find b0 volumes. bvecs_tol : float, optional Threshold used so that norm(bvec)=1. extract_pam_values : bool, optional Whether or not to save pam volumes as single nifti files. parallel : bool, optional Whether to use parallelization in peak-finding during the calibration procedure. nbr_processes : int, optional If `parallel` is True, the number of subprocesses to use (default multiprocessing.cpu_count()). out_dir : string, optional Output directory. (default current directory) out_pam : string, optional Name of the peaks volume to be saved. out_shm : string, optional Name of the spherical harmonics volume to be saved. out_peaks_dir : string, optional Name of the peaks directions volume to be saved. out_peaks_values : string, optional Name of the peaks values volume to be saved. out_peaks_indices : string, optional Name of the peaks indices volume to be saved. out_gfa : string, optional Name of the generalized FA volume to be saved. References ---------- .. [1] Aganj, I., et al. 2009. ODF Reconstruction in Q-Ball Imaging with Solid Angle Consideration. """ io_it = self.get_io_iterator() for (dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa) in io_it: logging.info('Loading {0}'.format(dwi)) data, affine = load_nifti(dwi) bvals, bvecs = read_bvals_bvecs(bval, bvec) if b0_threshold < bvals.min(): warn( "b0_threshold (value: {0}) is too low, increase your " "b0_threshold. It should be higher than the first b0 value " "({1}).".format(b0_threshold, bvals.min())) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold, atol=bvecs_tol) mask_vol = load_nifti_data(maskfile).astype(bool) peaks_sphere = default_sphere logging.info('Starting CSA computations {0}'.format(dwi)) csa_model = CsaOdfModel(gtab, sh_order) peaks_csa = peaks_from_model(model=csa_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=odf_to_sh_order, normalize_peaks=True, parallel=parallel, nbr_processes=nbr_processes) peaks_csa.affine = affine save_peaks(opam, peaks_csa) logging.info('Finished CSA {0}'.format(dwi)) if extract_pam_values: peaks_to_niftis(peaks_csa, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) dname_ = os.path.dirname(opam) if dname_ == '': logging.info('Pam5 file saved in current directory') else: logging.info('Pam5 file saved in {0}'.format(dname_)) return io_it
def run(self, input_files, bvalues_files, bvectors_files, mask_files, b0_threshold=50.0, bvecs_tol=0.01, roi_center=None, roi_radii=10, fa_thr=0.7, frf=None, extract_pam_values=False, sh_order=8, odf_to_sh_order=8, parallel=False, nbr_processes=None, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Constrained spherical deconvolution Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues_files : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors_files : string Path to the bvectors files. This path may contain wildcards to use multiple bvectors files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) b0_threshold : float, optional Threshold used to find b0 volumes. bvecs_tol : float, optional Bvecs should be unit vectors. roi_center : variable int, optional Center of ROI in data. If center is None, it is assumed that it is the center of the volume with shape `data.shape[:3]`. roi_radii : int or array-like, optional radii of cuboid ROI in voxels. fa_thr : float, optional FA threshold for calculating the response function. frf : variable float, optional Fiber response function can be for example inputed as 15 4 4 (from the command line) or [15, 4, 4] from a Python script to be converted to float and multiplied by 10**-4 . If None the fiber response function will be computed automatically. extract_pam_values : bool, optional Save or not to save pam volumes as single nifti files. sh_order : int, optional Spherical harmonics order used in the CSA fit. odf_to_sh_order : int, optional Spherical harmonics order used for peak_from_model to compress the ODF to spherical harmonics coefficients. parallel : bool, optional Whether to use parallelization in peak-finding during the calibration procedure. nbr_processes : int, optional If `parallel` is True, the number of subprocesses to use (default multiprocessing.cpu_count()). out_dir : string, optional Output directory. (default current directory) out_pam : string, optional Name of the peaks volume to be saved. out_shm : string, optional Name of the spherical harmonics volume to be saved. out_peaks_dir : string, optional Name of the peaks directions volume to be saved. out_peaks_values : string, optional Name of the peaks values volume to be saved. out_peaks_indices : string, optional Name of the peaks indices volume to be saved. out_gfa : string, optional Name of the generalized FA volume to be saved. References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution. """ io_it = self.get_io_iterator() for (dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa) in io_it: logging.info('Loading {0}'.format(dwi)) data, affine = load_nifti(dwi) bvals, bvecs = read_bvals_bvecs(bval, bvec) print(b0_threshold, bvals.min()) if b0_threshold < bvals.min(): warn( "b0_threshold (value: {0}) is too low, increase your " "b0_threshold. It should be higher than the first b0 value " "({1}).".format(b0_threshold, bvals.min())) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold, atol=bvecs_tol) mask_vol = load_nifti_data(maskfile).astype(bool) n_params = ((sh_order + 1) * (sh_order + 2)) / 2 if data.shape[-1] < n_params: raise ValueError('You need at least {0} unique DWI volumes to ' 'compute fiber odfs. You currently have: {1}' ' DWI volumes.'.format( n_params, data.shape[-1])) if frf is None: logging.info('Computing response function') if roi_center is not None: logging.info( 'Response ROI center:\n{0}'.format(roi_center)) logging.info('Response ROI radii:\n{0}'.format(roi_radii)) response, ratio = auto_response_ssst(gtab, data, roi_center=roi_center, roi_radii=roi_radii, fa_thr=fa_thr) response = list(response) else: logging.info('Using response function') if isinstance(frf, str): l01 = np.array(literal_eval(frf), dtype=np.float64) else: l01 = np.array(frf, dtype=np.float64) l01 *= 10**-4 response = np.array([l01[0], l01[1], l01[1]]) ratio = l01[1] / l01[0] response = (response, ratio) logging.info("Eigenvalues for the frf of the input" " data are :{0}".format(response[0])) logging.info( 'Ratio for smallest to largest eigen value is {0}'.format( ratio)) peaks_sphere = default_sphere logging.info('CSD computation started.') csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=sh_order) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=sh_order, normalize_peaks=True, parallel=parallel, nbr_processes=nbr_processes) peaks_csd.affine = affine save_peaks(opam, peaks_csd) logging.info('CSD computation completed.') if extract_pam_values: peaks_to_niftis(peaks_csd, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) dname_ = os.path.dirname(opam) if dname_ == '': logging.info('Pam5 file saved in current directory') else: logging.info('Pam5 file saved in {0}'.format(dname_)) return io_it
def test_io_peaks(): with InTemporaryDirectory(): fname = 'test.pam5' sphere = get_sphere('repulsion724') pam = PeaksAndMetrics() pam.affine = np.eye(4) pam.peak_dirs = np.random.rand(10, 10, 10, 5, 3) pam.peak_values = np.zeros((10, 10, 10, 5)) pam.peak_indices = np.zeros((10, 10, 10, 5)) pam.shm_coeff = np.zeros((10, 10, 10, 45)) pam.sphere = sphere pam.B = np.zeros((45, sphere.vertices.shape[0])) pam.total_weight = 0.5 pam.ang_thr = 60 pam.gfa = np.zeros((10, 10, 10)) pam.qa = np.zeros((10, 10, 10, 5)) pam.odf = np.zeros((10, 10, 10, sphere.vertices.shape[0])) save_peaks(fname, pam) pam2 = load_peaks(fname, verbose=True) npt.assert_array_equal(pam.peak_dirs, pam2.peak_dirs) pam2.affine = None fname2 = 'test2.pam5' save_peaks(fname2, pam2, np.eye(4)) pam2_res = load_peaks(fname2, verbose=True) npt.assert_array_equal(pam.peak_dirs, pam2_res.peak_dirs) pam3 = load_peaks(fname2, verbose=False) for attr in ['peak_dirs', 'peak_values', 'peak_indices', 'gfa', 'qa', 'shm_coeff', 'B', 'odf']: npt.assert_array_equal(getattr(pam3, attr), getattr(pam, attr)) npt.assert_equal(pam3.total_weight, pam.total_weight) npt.assert_equal(pam3.ang_thr, pam.ang_thr) npt.assert_array_almost_equal(pam3.sphere.vertices, pam.sphere.vertices) fname3 = 'test3.pam5' pam4 = PeaksAndMetrics() npt.assert_raises(ValueError, save_peaks, fname3, pam4) fname4 = 'test4.pam5' del pam.affine save_peaks(fname4, pam, affine=None) fname5 = 'test5.pkm' npt.assert_raises(IOError, save_peaks, fname5, pam) pam.affine = np.eye(4) fname6 = 'test6.pam5' save_peaks(fname6, pam, verbose=True) del pam.shm_coeff save_peaks(fname6, pam, verbose=False) pam.shm_coeff = np.zeros((10, 10, 10, 45)) del pam.odf save_peaks(fname6, pam) pam_tmp = load_peaks(fname6, True) npt.assert_equal(pam_tmp.odf, None) fname7 = 'test7.paw' npt.assert_raises(IOError, load_peaks, fname7) del pam.shm_coeff save_peaks(fname6, pam, verbose=True) fname_shm = 'shm.nii.gz' fname_dirs = 'dirs.nii.gz' fname_values = 'values.nii.gz' fname_indices = 'indices.nii.gz' fname_gfa = 'gfa.nii.gz' pam.shm_coeff = np.ones((10, 10, 10, 45)) peaks_to_niftis(pam, fname_shm, fname_dirs, fname_values, fname_indices, fname_gfa, reshape_dirs=False) os.path.isfile(fname_shm) os.path.isfile(fname_dirs) os.path.isfile(fname_values) os.path.isfile(fname_indices) os.path.isfile(fname_gfa)
def run(self, input_files, bvalues_files, bvectors_files, mask_files, sh_order=6, odf_to_sh_order=8, b0_threshold=50.0, bvecs_tol=0.01, extract_pam_values=False, parallel=False, nbr_processes=None, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Constant Solid Angle. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues_files : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors_files : string Path to the bvectors files. This path may contain wildcards to use multiple bvectors files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) sh_order : int, optional Spherical harmonics order (default 6) used in the CSA fit. odf_to_sh_order : int, optional Spherical harmonics order used for peak_from_model to compress the ODF to spherical harmonics coefficients (default 8) b0_threshold : float, optional Threshold used to find b=0 directions bvecs_tol : float, optional Threshold used so that norm(bvec)=1 (default 0.01) extract_pam_values : bool, optional Wheter or not to save pam volumes as single nifti files. parallel : bool, optional Whether to use parallelization in peak-finding during the calibration procedure. Default: False nbr_processes : int, optional If `parallel` is True, the number of subprocesses to use (default multiprocessing.cpu_count()). out_dir : string, optional Output directory (default input file directory) out_pam : string, optional Name of the peaks volume to be saved (default 'peaks.pam5') out_shm : string, optional Name of the shperical harmonics volume to be saved (default 'shm.nii.gz') out_peaks_dir : string, optional Name of the peaks directions volume to be saved (default 'peaks_dirs.nii.gz') out_peaks_values : string, optional Name of the peaks values volume to be saved (default 'peaks_values.nii.gz') out_peaks_indices : string, optional Name of the peaks indices volume to be saved (default 'peaks_indices.nii.gz') out_gfa : string, optional Name of the generalise fa volume to be saved (default 'gfa.nii.gz') References ---------- .. [1] Aganj, I., et al. 2009. ODF Reconstruction in Q-Ball Imaging with Solid Angle Consideration. """ io_it = self.get_io_iterator() for (dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa) in io_it: logging.info('Loading {0}'.format(dwi)) data, affine = load_nifti(dwi) bvals, bvecs = read_bvals_bvecs(bval, bvec) if b0_threshold < bvals.min(): warn("b0_threshold (value: {0}) is too low, increase your " "b0_threshold. It should higher than the first b0 value " "({1}).".format(b0_threshold, bvals.min())) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold, atol=bvecs_tol) mask_vol = nib.load(maskfile).get_data().astype(np.bool) peaks_sphere = get_sphere('repulsion724') logging.info('Starting CSA computations {0}'.format(dwi)) csa_model = CsaOdfModel(gtab, sh_order) peaks_csa = peaks_from_model(model=csa_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=odf_to_sh_order, normalize_peaks=True, parallel=parallel, nbr_processes=nbr_processes) peaks_csa.affine = affine save_peaks(opam, peaks_csa) logging.info('Finished CSA {0}'.format(dwi)) if extract_pam_values: peaks_to_niftis(peaks_csa, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) dname_ = os.path.dirname(opam) if dname_ == '': logging.info('Pam5 file saved in current directory') else: logging.info( 'Pam5 file saved in {0}'.format(dname_)) return io_it
def run(self, input_files, bvalues_files, bvectors_files, mask_files, b0_threshold=50.0, bvecs_tol=0.01, roi_center=None, roi_radius=10, fa_thr=0.7, frf=None, extract_pam_values=False, sh_order=8, odf_to_sh_order=8, parallel=False, nbr_processes=None, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Constrained spherical deconvolution Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues_files : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors_files : string Path to the bvectors files. This path may contain wildcards to use multiple bvectors files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) b0_threshold : float, optional Threshold used to find b=0 directions bvecs_tol : float, optional Bvecs should be unit vectors. (default:0.01) roi_center : variable int, optional Center of ROI in data. If center is None, it is assumed that it is the center of the volume with shape `data.shape[:3]` (default None) roi_radius : int, optional radius of cubic ROI in voxels (default 10) fa_thr : float, optional FA threshold for calculating the response function (default 0.7) frf : variable float, optional Fiber response function can be for example inputed as 15 4 4 (from the command line) or [15, 4, 4] from a Python script to be converted to float and mutiplied by 10**-4 . If None the fiber response function will be computed automatically (default: None). extract_pam_values : bool, optional Save or not to save pam volumes as single nifti files. sh_order : int, optional Spherical harmonics order (default 6) used in the CSA fit. odf_to_sh_order : int, optional Spherical harmonics order used for peak_from_model to compress the ODF to spherical harmonics coefficients (default 8) parallel : bool, optional Whether to use parallelization in peak-finding during the calibration procedure. Default: False nbr_processes : int, optional If `parallel` is True, the number of subprocesses to use (default multiprocessing.cpu_count()). out_dir : string, optional Output directory (default input file directory) out_pam : string, optional Name of the peaks volume to be saved (default 'peaks.pam5') out_shm : string, optional Name of the shperical harmonics volume to be saved (default 'shm.nii.gz') out_peaks_dir : string, optional Name of the peaks directions volume to be saved (default 'peaks_dirs.nii.gz') out_peaks_values : string, optional Name of the peaks values volume to be saved (default 'peaks_values.nii.gz') out_peaks_indices : string, optional Name of the peaks indices volume to be saved (default 'peaks_indices.nii.gz') out_gfa : string, optional Name of the generalise fa volume to be saved (default 'gfa.nii.gz') References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution. """ io_it = self.get_io_iterator() for (dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa) in io_it: logging.info('Loading {0}'.format(dwi)) data, affine = load_nifti(dwi) bvals, bvecs = read_bvals_bvecs(bval, bvec) print(b0_threshold, bvals.min()) if b0_threshold < bvals.min(): warn("b0_threshold (value: {0}) is too low, increase your " "b0_threshold. It should higher than the first b0 value " "({1}).".format(b0_threshold, bvals.min())) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold, atol=bvecs_tol) mask_vol = nib.load(maskfile).get_data().astype(np.bool) n_params = ((sh_order + 1) * (sh_order + 2)) / 2 if data.shape[-1] < n_params: raise ValueError( 'You need at least {0} unique DWI volumes to ' 'compute fiber odfs. You currently have: {1}' ' DWI volumes.'.format(n_params, data.shape[-1])) if frf is None: logging.info('Computing response function') if roi_center is not None: logging.info('Response ROI center:\n{0}' .format(roi_center)) logging.info('Response ROI radius:\n{0}' .format(roi_radius)) response, ratio, nvox = auto_response( gtab, data, roi_center=roi_center, roi_radius=roi_radius, fa_thr=fa_thr, return_number_of_voxels=True) response = list(response) else: logging.info('Using response function') if isinstance(frf, str): l01 = np.array(literal_eval(frf), dtype=np.float64) else: l01 = np.array(frf, dtype=np.float64) l01 *= 10 ** -4 response = np.array([l01[0], l01[1], l01[1]]) ratio = l01[1] / l01[0] response = (response, ratio) logging.info("Eigenvalues for the frf of the input" " data are :{0}".format(response[0])) logging.info('Ratio for smallest to largest eigen value is {0}' .format(ratio)) peaks_sphere = get_sphere('repulsion724') logging.info('CSD computation started.') csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=sh_order) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=sh_order, normalize_peaks=True, parallel=parallel, nbr_processes=nbr_processes) peaks_csd.affine = affine save_peaks(opam, peaks_csd) logging.info('CSD computation completed.') if extract_pam_values: peaks_to_niftis(peaks_csd, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) dname_ = os.path.dirname(opam) if dname_ == '': logging.info('Pam5 file saved in current directory') else: logging.info( 'Pam5 file saved in {0}'.format(dname_)) return io_it
def run(self, input_files, bvalues, bvectors, mask_files, sh_order=6, odf_to_sh_order=8, b0_threshold=0.0, bvecs_tol=0.01, extract_pam_values=False, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Constant Solid Angle. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors : string Path to the bvectors files. This path may contain wildcards to use multiple bvectors files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) sh_order : int, optional Spherical harmonics order (default 6) used in the CSA fit. odf_to_sh_order : int, optional Spherical harmonics order used for peak_from_model to compress the ODF to spherical harmonics coefficients (default 8) b0_threshold : float, optional Threshold used to find b=0 directions bvecs_tol : float, optional Threshold used so that norm(bvec)=1 (default 0.01) extract_pam_values : bool, optional Wheter or not to save pam volumes as single nifti files. out_dir : string, optional Output directory (default input file directory) out_pam : string, optional Name of the peaks volume to be saved (default 'peaks.pam5') out_shm : string, optional Name of the shperical harmonics volume to be saved (default 'shm.nii.gz') out_peaks_dir : string, optional Name of the peaks directions volume to be saved (default 'peaks_dirs.nii.gz') out_peaks_values : string, optional Name of the peaks values volume to be saved (default 'peaks_values.nii.gz') out_peaks_indices : string, optional Name of the peaks indices volume to be saved (default 'peaks_indices.nii.gz') out_gfa : string, optional Name of the generalise fa volume to be saved (default 'gfa.nii.gz') References ---------- .. [1] Aganj, I., et. al. 2009. ODF Reconstruction in Q-Ball Imaging with Solid Angle Consideration. """ io_it = self.get_io_iterator() for (dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa) in io_it: logging.info('Loading {0}'.format(dwi)) vol = nib.load(dwi) data = vol.get_data() affine = vol.affine bvals, bvecs = read_bvals_bvecs(bval, bvec) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold, atol=bvecs_tol) mask_vol = nib.load(maskfile).get_data().astype(np.bool) peaks_sphere = get_sphere('repulsion724') logging.info('Starting CSA computations {0}'.format(dwi)) csa_model = CsaOdfModel(gtab, sh_order) peaks_csa = peaks_from_model(model=csa_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=odf_to_sh_order, normalize_peaks=True, parallel=False) peaks_csa.affine = affine save_peaks(opam, peaks_csa) logging.info('Finished CSA {0}'.format(dwi)) if extract_pam_values: peaks_to_niftis(peaks_csa, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) dname_ = os.path.dirname(opam) if dname_ == '': logging.info('Pam5 file saved in current directory') else: logging.info( 'Pam5 file saved in {0}'.format(dname_)) return io_it
def run(self, input_files, bvalues, bvectors, mask_files, b0_threshold=0.0, bvecs_tol=0.01, roi_center=None, roi_radius=10, fa_thr=0.7, frf=None, extract_pam_values=False, sh_order=8, odf_to_sh_order=8, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Constrained spherical deconvolution Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors : string Path to the bvectors files. This path may contain wildcards to use multiple bvectors files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) b0_threshold : float, optional Threshold used to find b=0 directions bvecs_tol : float, optional Bvecs should be unit vectors. (default:0.01) roi_center : variable int, optional Center of ROI in data. If center is None, it is assumed that it is the center of the volume with shape `data.shape[:3]` (default None) roi_radius : int, optional radius of cubic ROI in voxels (default 10) fa_thr : float, optional FA threshold for calculating the response function (default 0.7) frf : variable float, optional Fiber response function can be for example inputed as 15 4 4 (from the command line) or [15, 4, 4] from a Python script to be converted to float and mutiplied by 10**-4 . If None the fiber response function will be computed automatically (default: None). extract_pam_values : bool, optional Save or not to save pam volumes as single nifti files. sh_order : int, optional Spherical harmonics order (default 6) used in the CSA fit. odf_to_sh_order : int, optional Spherical harmonics order used for peak_from_model to compress the ODF to spherical harmonics coefficients (default 8) out_dir : string, optional Output directory (default input file directory) out_pam : string, optional Name of the peaks volume to be saved (default 'peaks.pam5') out_shm : string, optional Name of the shperical harmonics volume to be saved (default 'shm.nii.gz') out_peaks_dir : string, optional Name of the peaks directions volume to be saved (default 'peaks_dirs.nii.gz') out_peaks_values : string, optional Name of the peaks values volume to be saved (default 'peaks_values.nii.gz') out_peaks_indices : string, optional Name of the peaks indices volume to be saved (default 'peaks_indices.nii.gz') out_gfa : string, optional Name of the generalise fa volume to be saved (default 'gfa.nii.gz') References ---------- .. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the fibre orientation distribution in diffusion MRI: Non-negativity constrained super-resolved spherical deconvolution. """ io_it = self.get_io_iterator() for (dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa) in io_it: logging.info('Loading {0}'.format(dwi)) img = nib.load(dwi) data = img.get_data() affine = img.affine bvals, bvecs = read_bvals_bvecs(bval, bvec) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold, atol=bvecs_tol) mask_vol = nib.load(maskfile).get_data().astype(np.bool) sh_order = 8 if data.shape[-1] < 15: raise ValueError( 'You need at least 15 unique DWI volumes to ' 'compute fiber odfs. You currently have: {0}' ' DWI volumes.'.format(data.shape[-1])) elif data.shape[-1] < 30: sh_order = 6 if frf is None: logging.info('Computing response function') if roi_center is not None: logging.info('Response ROI center:\n{0}' .format(roi_center)) logging.info('Response ROI radius:\n{0}' .format(roi_radius)) response, ratio, nvox = auto_response( gtab, data, roi_center=roi_center, roi_radius=roi_radius, fa_thr=fa_thr, return_number_of_voxels=True) response = list(response) else: logging.info('Using response function') if isinstance(frf, str): l01 = np.array(literal_eval(frf), dtype=np.float64) else: l01 = np.array(frf, dtype=np.float64) l01 *= 10 ** -4 response = np.array([l01[0], l01[1], l01[1]]) ratio = l01[1] / l01[0] response = (response, ratio) logging.info( 'Eigenvalues for the frf of the input data are :{0}' .format(response[0])) logging.info('Ratio for smallest to largest eigen value is {0}' .format(ratio)) peaks_sphere = get_sphere('repulsion724') logging.info('CSD computation started.') csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=sh_order) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=sh_order, normalize_peaks=True, parallel=False) peaks_csd.affine = affine save_peaks(opam, peaks_csd) logging.info('CSD computation completed.') if extract_pam_values: peaks_to_niftis(peaks_csd, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) dname_ = os.path.dirname(opam) if dname_ == '': logging.info('Pam5 file saved in current directory') else: logging.info( 'Pam5 file saved in {0}'.format(dname_)) return io_it
def dmri_recon(sid, data_dir, out_dir, resolution, recon='csd', dirs='', num_threads=2): import tempfile #tempfile.tempdir = '/om/scratch/Fri/ksitek/' import os oldval = None if 'MKL_NUM_THREADS' in os.environ: oldval = os.environ['MKL_NUM_THREADS'] os.environ['MKL_NUM_THREADS'] = '%d' % num_threads ompoldval = None if 'OMP_NUM_THREADS' in os.environ: ompoldval = os.environ['OMP_NUM_THREADS'] os.environ['OMP_NUM_THREADS'] = '%d' % num_threads import nibabel as nib import numpy as np from glob import glob if resolution == '0.2mm': filename = 'Reg_S64550_nii4d.nii' #filename = 'angular_resample/dwi_%s.nii.gz'%dirs fimg = os.path.abspath(glob(os.path.join(data_dir, filename))[0]) else: filename = 'Reg_S64550_nii4d_resamp-%s.nii.gz' % (resolution) fimg = os.path.abspath( glob(os.path.join(data_dir, 'resample', filename))[0]) print("dwi file = %s" % fimg) fbval = os.path.abspath( glob(os.path.join(data_dir, 'bvecs', 'camino_120_RAS.bvals'))[0]) print("bval file = %s" % fbval) fbvec = os.path.abspath( glob(os.path.join(data_dir, 'bvecs', 'camino_120_RAS_flipped-xy.bvecs'))[0]) # 'angular_resample', # 'dwi_%s.bvecs'%dirs))[0]) print("bvec file = %s" % fbvec) img = nib.load(fimg) data = img.get_fdata() affine = img.get_affine() prefix = sid from dipy.io import read_bvals_bvecs bvals, bvecs = read_bvals_bvecs(fbval, fbvec) ''' from dipy.core.gradients import vector_norm b0idx = [] for idx, val in enumerate(bvals): if val < 1: pass #bvecs[idx] = [1, 0, 0] else: b0idx.append(idx) #print "b0idx=%d"%idx #print "input bvecs:" #print bvecs bvecs[b0idx, :] = bvecs[b0idx, :]/vector_norm(bvecs[b0idx])[:, None] #print "bvecs after normalization:" #print bvecs ''' from dipy.core.gradients import gradient_table gtab = gradient_table(bvals, bvecs) gtab.bvecs.shape == bvecs.shape gtab.bvecs gtab.bvals.shape == bvals.shape gtab.bvals #from dipy.segment.mask import median_otsu #b0_mask, mask = median_otsu(data[:, :, :, b0idx].mean(axis=3).squeeze(), 4, 4) if resolution == '0.2mm': mask_name = 'Reg_S64550_nii_b0-slice_mask.nii.gz' fmask1 = os.path.join(data_dir, mask_name) else: mask_name = 'Reg_S64550_nii_b0-slice_mask_resamp-%s.nii.gz' % ( resolution) fmask1 = os.path.join(data_dir, 'resample', mask_name) print("fmask file = %s" % fmask1) mask = nib.load(fmask1).get_fdata() ''' DTI model & save metrics ''' from dipy.reconst.dti import TensorModel print("running tensor model") tenmodel = TensorModel(gtab) tenfit = tenmodel.fit(data, mask) from dipy.reconst.dti import fractional_anisotropy print("running FA") FA = fractional_anisotropy(tenfit.evals) FA[np.isnan(FA)] = 0 fa_img = nib.Nifti1Image(FA, img.get_affine()) tensor_fa_file = os.path.abspath('%s_tensor_fa.nii.gz' % (prefix)) nib.save(fa_img, tensor_fa_file) from dipy.reconst.dti import axial_diffusivity print("running AD") AD = axial_diffusivity(tenfit.evals) AD[np.isnan(AD)] = 0 ad_img = nib.Nifti1Image(AD, img.get_affine()) tensor_ad_file = os.path.abspath('%s_tensor_ad.nii.gz' % (prefix)) nib.save(ad_img, tensor_ad_file) from dipy.reconst.dti import radial_diffusivity print("running RD") RD = radial_diffusivity(tenfit.evals) RD[np.isnan(RD)] = 0 rd_img = nib.Nifti1Image(RD, img.get_affine()) tensor_rd_file = os.path.abspath('%s_tensor_rd.nii.gz' % (prefix)) nib.save(rd_img, tensor_rd_file) from dipy.reconst.dti import mean_diffusivity print("running MD") MD = mean_diffusivity(tenfit.evals) MD[np.isnan(MD)] = 0 md_img = nib.Nifti1Image(MD, img.get_affine()) tensor_md_file = os.path.abspath('%s_tensor_md.nii.gz' % (prefix)) nib.save(md_img, tensor_md_file) evecs = tenfit.evecs evec_img = nib.Nifti1Image(evecs, img.get_affine()) tensor_evec_file = os.path.abspath('%s_tensor_evec.nii.gz' % (prefix)) nib.save(evec_img, tensor_evec_file) ''' ODF model ''' useFA = True print("creating %s model" % recon) if recon == 'csd': from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel from dipy.reconst.csdeconv import auto_response response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.5) # 0.7 model = ConstrainedSphericalDeconvModel(gtab, response) useFA = True return_sh = True elif recon == 'csa': from dipy.reconst.shm import CsaOdfModel, normalize_data model = CsaOdfModel(gtab, sh_order=8) useFA = True return_sh = True elif recon == 'gqi': from dipy.reconst.gqi import GeneralizedQSamplingModel model = GeneralizedQSamplingModel(gtab) return_sh = False else: raise ValueError('only csd, csa supported currently') from dipy.reconst.dsi import (DiffusionSpectrumDeconvModel, DiffusionSpectrumModel) model = DiffusionSpectrumDeconvModel(gtab) '''reconstruct ODFs''' from dipy.data import get_sphere sphere = get_sphere('symmetric724') #odfs = fit.odf(sphere) # with CSD/GQI, uses > 50GB per core; don't get greedy with cores! from dipy.reconst.peaks import peaks_from_model print("running peaks_from_model") peaks = peaks_from_model( model=model, data=data, sphere=sphere, mask=mask, return_sh=return_sh, return_odf=False, normalize_peaks=True, npeaks=5, relative_peak_threshold=.5, min_separation_angle=10, #25, parallel=num_threads > 1, nbr_processes=num_threads) # save the peaks from dipy.io.peaks import save_peaks peaks_file = os.path.abspath('%s_peaks.pam5' % (prefix)) save_peaks(peaks_file, peaks) # save the spherical harmonics shm_coeff_file = os.path.abspath('%s_shm_coeff.nii.gz' % (prefix)) if return_sh: shm_coeff = peaks.shm_coeff nib.save(nib.Nifti1Image(shm_coeff, img.get_affine()), shm_coeff_file) else: # if it's not a spherical model, output it as an essentially null file np.savetxt(shm_coeff_file, [0]) # save the generalized fractional anisotropy image gfa_img = nib.Nifti1Image(peaks.gfa, img.get_affine()) model_gfa_file = os.path.abspath('%s_%s_gfa.nii.gz' % (prefix, recon)) nib.save(gfa_img, model_gfa_file) #from dipy.reconst.dti import quantize_evecs #peak_indices = quantize_evecs(tenfit.evecs, sphere.vertices) #eu = EuDX(FA, peak_indices, odf_vertices = sphere.vertices, #a_low=0.2, seeds=10**6, ang_thr=35) ''' probabilistic tracking ''' ''' from dipy.direction import ProbabilisticDirectionGetter from dipy.tracking.local import LocalTracking from dipy.tracking.streamline import Streamlines from dipy.io.streamline import save_trk prob_dg = ProbabilisticDirectionGetter.from_shcoeff(shm_coeff, max_angle=45., sphere=sphere) streamlines_generator = LocalTracking(prob_dg, affine, step_size=.5, max_cross=1) # Generate streamlines object streamlines = Streamlines(streamlines_generator) affine = img.get_affine() vox_size=fa_img.get_header().get_zooms()[:3] fname = os.path.abspath('%s_%s_prob_streamline.trk' % (prefix, recon)) save_trk(fname, streamlines, affine, vox_size=vox_size) ''' ''' deterministic tracking with EuDX method''' from dipy.tracking.eudx import EuDX print("reconstructing with EuDX") if useFA: eu = EuDX( FA, peaks.peak_indices[..., 0], odf_vertices=sphere.vertices, a_low=0.001, # default is 0.0239 seeds=10**6, ang_thr=75) else: eu = EuDX( peaks.gfa, peaks.peak_indices[..., 0], odf_vertices=sphere.vertices, #a_low=0.1, seeds=10**6, ang_thr=45) sl_fname = os.path.abspath('%s_%s_det_streamline.trk' % (prefix, recon)) # trying new dipy.io.streamline module, per email to neuroimaging list # 2018.04.05 from nibabel.streamlines import Field from nibabel.orientations import aff2axcodes affine = img.get_affine() vox_size = fa_img.get_header().get_zooms()[:3] fov_shape = FA.shape[:3] if vox_size is not None and fov_shape is not None: hdr = {} hdr[Field.VOXEL_TO_RASMM] = affine.copy() hdr[Field.VOXEL_SIZES] = vox_size hdr[Field.DIMENSIONS] = fov_shape hdr[Field.VOXEL_ORDER] = "".join(aff2axcodes(affine)) tractogram = nib.streamlines.Tractogram(eu) tractogram.affine_to_rasmm = affine trk_file = nib.streamlines.TrkFile(tractogram, header=hdr) nib.streamlines.save(trk_file, sl_fname) if oldval: os.environ['MKL_NUM_THREADS'] = oldval else: del os.environ['MKL_NUM_THREADS'] if ompoldval: os.environ['OMP_NUM_THREADS'] = ompoldval else: del os.environ['OMP_NUM_THREADS'] print('all output files created') return (tensor_fa_file, tensor_evec_file, model_gfa_file, sl_fname, affine, tensor_ad_file, tensor_rd_file, tensor_md_file, shm_coeff_file, peaks_file)
def tracking(folder): print('Tracking in ' + folder) output_folder = folder + 'dipy_out/' # make a folder to save new data into try: Path(output_folder).mkdir(parents=True, exist_ok=True) except OSError: print('Could not create output dir. Aborting...') return # load data print('Loading data...') img = nib.load(folder + 'data.nii.gz') dmri = np.asarray(img.dataobj) affine = img.affine mask, _ = load_nifti(folder + 'nodif_brain_mask.nii.gz') bvals, bvecs = read_bvals_bvecs(folder + 'bvals', folder + 'bvecs') gtab = gradient_table(bvals, bvecs) # extract peaksoutput_folder + 'peak_vals.nii.gz' if Path(output_folder + 'peaks.pam5').exists(): peaks = load_peaks(output_folder + 'peaks.pam5') else: print('Extracting peaks...') response, ration = auto_response(gtab, dmri, roi_radius=10, fa_thr=.7) csd_model = ConstrainedSphericalDeconvModel(gtab, response) peaks = peaks_from_model(model=csd_model, data=dmri, sphere=default_sphere, relative_peak_threshold=.5, min_separation_angle=25, parallel=True) save_peaks(output_folder + 'peaks.pam5', peaks, affine) scaled = peaks.peak_dirs * np.repeat( np.expand_dims(peaks.peak_values, -1), 3, -1) cropped = scaled[:, :, :, :3, :].reshape(dmri.shape[:3] + (9, )) save_nifti(output_folder + 'peaks.nii.gz', cropped, affine) #save_nifti(output_folder + 'peak_dirs.nii.gz', peaks.peak_dirs, affine) #save_nifti(output_folder + 'peak_vals.nii.gz', peaks.peak_values, affine) # tracking print('Tracking...') maskdata, mask = median_otsu(dmri, vol_idx=range(0, dmri.shape[3]), median_radius=3, numpass=1, autocrop=True, dilate=2) tensor_model = TensorModel(gtab, fit_method='WLS') tensor_fit = tensor_model.fit(maskdata) fa = fractional_anisotropy(tensor_fit.evals) fa[np.isnan(fa)] = 0 bla = np.average(fa) tissue_classifier = ThresholdStoppingCriterion(fa, .1) seeds = random_seeds_from_mask(fa > 1e-5, affine, seeds_count=1) streamline_generator = LocalTracking(direction_getter=peaks, stopping_criterion=tissue_classifier, seeds=seeds, affine=affine, step_size=.5) streamlines = Streamlines(streamline_generator) save_trk(StatefulTractogram(streamlines, img, Space.RASMM), output_folder + 'whole_brain.trk')
# Elef add CSD version and add MTMSCSD when is ready. pass pam = peaks_from_model(model, data, sphere, relative_peak_threshold=.8, min_separation_angle=45, mask=mask, parallel=parallel) ten_model = TensorModel(gtab) fa = ten_model.fit(data, mask).fa save_nifti(ffa, fa, affine) save_peaks(fpam5, pam, affine) show_odfs_and_fa(fa, pam, mask, None, sphere, ftmp='odf.mmap', basis_type=None) pve_csf, pve_gm, pve_wm = pve[..., 0], pve[..., 1], pve[..., 2] cmc_classifier = CmcTissueClassifier.from_pve( pve_wm, pve_gm, pve_csf, step_size=step_size, average_voxel_size=np.average(vox_size)) seed_mask = np.zeros(mask.shape) seed_mask[mask > 0] = 1 seed_mask[pve_wm < 0.5] = 0
pass pam = peaks_from_model(model, data, sphere, relative_peak_threshold=.8, min_separation_angle=45, mask=mask, parallel=parallel) ten_model = TensorModel(gtab) fa = ten_model.fit(data, mask).fa save_nifti(ffa, fa, affine) save_peaks(fpam5, pam, affine) show_odfs_and_fa(fa, pam, mask, None, sphere, ftmp='odf.mmap', basis_type=None) pve_csf, pve_gm, pve_wm = pve[..., 0], pve[..., 1], pve[..., 2] cmc_classifier = CmcTissueClassifier.from_pve( pve_wm, pve_gm, pve_csf, step_size=step_size, average_voxel_size=np.average(vox_size)) seed_mask = np.zeros(mask.shape) seed_mask[mask > 0] = 1
def run(self, input_files, bvalues, bvectors, mask_files, b0_threshold=0.0, extract_pam_values=False, out_dir='', out_pam='peaks.pam5', out_shm='shm.nii.gz', out_peaks_dir='peaks_dirs.nii.gz', out_peaks_values='peaks_values.nii.gz', out_peaks_indices='peaks_indices.nii.gz', out_gfa='gfa.nii.gz'): """ Workflow for peaks computation. Peaks computation is done by 'globing' ``input_files`` and saves the peaks in a directory specified by ``out_dir``. Parameters ---------- input_files : string Path to the input volumes. This path may contain wildcards to process multiple inputs at once. bvalues : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. bvectors : string Path to the bvalues files. This path may contain wildcards to use multiple bvalues files at once. mask_files : string Path to the input masks. This path may contain wildcards to use multiple masks at once. (default: No mask used) b0_threshold : float, optional Threshold used to find b=0 directions extract_pam_values : bool, optional Wheter or not to save pam volumes as single nifti files. out_dir : string, optional Output directory (default input file directory) out_pam : string, optional Name of the peaks volume to be saved (default 'peaks.pam5') out_shm : string, optional Name of the shperical harmonics volume to be saved (default 'shm.nii.gz') out_peaks_dir : string, optional Name of the peaks directions volume to be saved (default 'peaks_dirs.nii.gz') out_peaks_values : string, optional Name of the peaks values volume to be saved (default 'peaks_values.nii.gz') out_peaks_indices : string, optional Name of the peaks indices volume to be saved (default 'peaks_indices.nii.gz') out_gfa : string, optional Name of the generalise fa volume to be saved (default 'gfa.nii.gz') """ io_it = self.get_io_iterator() for dwi, bval, bvec, maskfile, opam, oshm, opeaks_dir, \ opeaks_values, opeaks_indices, ogfa in io_it: logging.info('Computing fiber odfs for {0}'.format(dwi)) vol = nib.load(dwi) data = vol.get_data() affine = vol.get_affine() bvals, bvecs = read_bvals_bvecs(bval, bvec) gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold) mask_vol = nib.load(maskfile).get_data().astype(np.bool) sh_order = 8 if data.shape[-1] < 15: raise ValueError('You need at least 15 unique DWI volumes to ' 'compute fiber odfs. You currently have: {0}' ' DWI volumes.'.format(data.shape[-1])) elif data.shape[-1] < 30: sh_order = 6 response, ratio = auto_response(gtab, data) response = list(response) logging.info( 'Eigenvalues for the frf of the input data are :{0}'.format( response[0])) logging.info( 'Ratio for smallest to largest eigen value is {0}'.format( ratio)) peaks_sphere = get_sphere('symmetric362') csa_model = CsaOdfModel(gtab, sh_order) peaks_csa = peaks_from_model(model=csa_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask_vol, return_sh=True, sh_order=sh_order, normalize_peaks=True, parallel=False) peaks_csa.affine = affine save_peaks(opam, peaks_csa) if extract_pam_values: peaks_to_niftis(peaks_csa, oshm, opeaks_dir, opeaks_values, opeaks_indices, ogfa, reshape_dirs=True) logging.info('Peaks saved in {0}'.format(os.path.dirname(opam))) return io_it