def test_reshape_peaks_for_visualization(): data1 = np.random.randn(10, 5, 3).astype('float32') data2 = np.random.randn(10, 2, 5, 3).astype('float32') data3 = np.random.randn(10, 2, 12, 5, 3).astype('float32') data1_reshape = reshape_peaks_for_visualization(data1) data2_reshape = reshape_peaks_for_visualization(data2) data3_reshape = reshape_peaks_for_visualization(data3) assert_array_equal(data1_reshape.shape, (10, 15)) assert_array_equal(data2_reshape.shape, (10, 2, 15)) assert_array_equal(data3_reshape.shape, (10, 2, 12, 15)) assert_array_equal(data1_reshape.reshape(10, 5, 3), data1) assert_array_equal(data2_reshape.reshape(10, 2, 5, 3), data2) assert_array_equal(data3_reshape.reshape(10, 2, 12, 5, 3), data3)
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.afd = args.afd or 'afd_max.nii.gz' args.afd_total = args.afd_total or 'afd_total_sh0.nii.gz' args.afd_sum = args.afd_sum or 'afd_sum.nii.gz' args.nufo = args.nufo or 'nufo.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' arglist = [args.afd, args.afd_total, args.afd_sum, args.nufo, args.peaks] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' 'one file to output.') assert_inputs_exist(parser, []) assert_outputs_exists(parser, args, arglist) data, affine = load(args.input) if args.mask is None: mask = np.ones(data.shape[:-1]) else: mask, affine2 = load(args.mask) nufo_map, afd_map, afd_sum, peaks_dirs = get_maps(data, mask, args) # Save result if args.nufo: save(nufo_map, affine, args.nufo) if args.afd: save(afd_map, affine, args.afd) if args.afd_total: # this is the analytical afd total afd_tot = data[:, :, :, 0] save(afd_tot, affine, args.afd_total) if args.afd_sum: save(afd_sum, affine, args.afd_sum) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(peaks_dirs), affine), args.peaks) if args.visu: if nufo_map.max() > nufo_map.min(): nufo_map = (255 * (nufo_map - nufo_map.min()) / (nufo_map.max() - nufo_map.min())) if afd_map.max() > afd_map.min(): afd_map = (255 * (afd_map - afd_map.min()) / (afd_map.max() - afd_map.min())) save(nufo_map, affine, args.nufo, True) save(afd_map, affine, args.afd, True)
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.gfa = args.gfa or 'gfa.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_indices = args.peak_indices or 'peaks_indices.nii.gz' args.sh = args.sh or 'sh.nii.gz' args.nufo = args.nufo or 'nufo.nii.gz' args.a_power = args.a_power or 'anisotropic_power.nii.gz' arglist = [ args.gfa, args.peaks, args.peak_indices, args.sh, args.nufo, args.a_power ] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' + 'one file to output.') assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs]) assert_outputs_exists(parser, args, arglist) nbr_processes = args.nbr_processes parallel = True if nbr_processes <= 0: nbr_processes = None elif nbr_processes == 1: parallel = False # Load data img = nib.load(args.input) data = img.get_data() affine = img.get_affine() bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) if not is_normalized_bvecs(bvecs): logging.warning('Your b-vectors do not seem normalized...') bvecs = normalize_bvecs(bvecs) if bvals.min() != 0: if bvals.min() > 20: raise ValueError( 'The minimal bvalue is greater than 20. This is highly ' 'suspicious. Please check your data to ensure everything is ' 'correct.\nValue found: {0}'.format(bvals.min())) else: logging.warning( 'Warning: no b=0 image. Setting b0_threshold to ' 'bvals.min() = %s', bvals.min()) gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) else: gtab = gradient_table(bvals, bvecs) sphere = get_sphere('symmetric724') if args.mask is None: mask = None else: mask = nib.load(args.mask).get_data().astype(np.bool) if args.use_qball: model = QballModel(gtab, sh_order=int(args.sh_order), smooth=0.006) else: model = CsaOdfModel(gtab, sh_order=int(args.sh_order), smooth=0.006) odfpeaks = peaks_from_model(model=model, data=data, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True, return_sh=True, sh_order=int(args.sh_order), sh_basis_type=args.basis, npeaks=5, parallel=parallel, nbr_processes=nbr_processes) if args.gfa: nib.save(nib.Nifti1Image(odfpeaks.gfa.astype(np.float32), affine), args.gfa) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(odfpeaks), affine), args.peaks) if args.peak_indices: nib.save(nib.Nifti1Image(odfpeaks.peak_indices, affine), args.peak_indices) if args.sh: nib.save( nib.Nifti1Image(odfpeaks.shm_coeff.astype(np.float32), affine), args.sh) if args.nufo: peaks_count = (odfpeaks.peak_indices > -1).sum(3) nib.save(nib.Nifti1Image(peaks_count.astype(np.int32), affine), args.nufo) if args.a_power: odf_a_power = anisotropic_power(odfpeaks.shm_coeff) nib.save(nib.Nifti1Image(odf_a_power.astype(np.float32), affine), args.a_power)
def main(): parser = _build_arg_parser() args = parser.parse_args() logging.basicConfig(level=logging.INFO) if not args.not_all: args.fodf = args.fodf or 'fodf.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_indices = args.peak_indices or 'peak_indices.nii.gz' arglist = [args.fodf, args.peaks, args.peak_indices] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' 'one file to output.') assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs]) assert_outputs_exists(parser, args, arglist) nbr_processes = args.nbr_processes parallel = True if nbr_processes <= 0: nbr_processes = None elif nbr_processes == 1: parallel = False # Check for FRF filename base_odf_name, _ = split_name_with_nii(args.fodf) frf_filename = base_odf_name + '_frf.txt' if os.path.isfile(frf_filename) and not args.overwrite: parser.error('Cannot save frf file, "{0}" already exists. ' 'Use -f to overwrite.'.format(frf_filename)) vol = nib.load(args.input) data = vol.get_data() bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) if args.mask_wm is not None: wm_mask = nib.load(args.mask_wm).get_data().astype('bool') else: wm_mask = np.ones_like(data[..., 0], dtype=np.bool) logging.info( 'No white matter mask specified! mask_data will be used instead, ' 'if it has been supplied. \nBe *VERY* careful about the ' 'estimation of the fiber response function for the CSD.') data_in_wm = applymask(data, wm_mask) if not is_normalized_bvecs(bvecs): logging.warning('Your b-vectors do not seem normalized...') bvecs = normalize_bvecs(bvecs) if bvals.min() != 0: if bvals.min() > 20: raise ValueError( 'The minimal bvalue is greater than 20. This is highly ' 'suspicious. Please check your data to ensure everything is ' 'correct.\nValue found: {}'.format(bvals.min())) else: logging.warning( 'Warning: no b=0 image. Setting b0_threshold to ' 'bvals.min() = %s', bvals.min()) gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) else: gtab = gradient_table(bvals, bvecs) if args.mask is None: mask = None else: mask = nib.load(args.mask).get_data().astype(np.bool) # Raise warning for sh order if there is not enough DWIs if data.shape[-1] < (args.sh_order + 1) * (args.sh_order + 2) / 2: warnings.warn( 'We recommend having at least %s unique DWIs volumes, but you ' 'currently have %s volumes. Try lowering the parameter --sh_order ' 'in case of non convergence.', (args.sh_order + 1) * (args.sh_order + 2) / 2), data.shape[-1] fa_thresh = args.fa_thresh # If threshold is too high, try lower until enough indices are found # estimating a response function with fa < 0.5 does not make sense nvox = 0 while nvox < 300 and fa_thresh > 0.5: response, ratio, nvox = auto_response(gtab, data_in_wm, roi_center=args.roi_center, roi_radius=args.roi_radius, fa_thr=fa_thresh, return_number_of_voxels=True) logging.info('Number of indices is %s with threshold of %s', nvox, fa_thresh) fa_thresh -= 0.05 if fa_thresh <= 0: raise ValueError( 'Could not find at least 300 voxels for estimating the frf!') logging.info('Found %s valid voxels for frf estimation.', nvox) response = list(response) logging.info('Response function is %s', response) if args.frf is not None: l01 = np.array(literal_eval(args.frf), dtype=np.float64) if not args.no_factor: l01 *= 10**-4 response[0] = np.array([l01[0], l01[1], l01[1]]) ratio = l01[1] / l01[0] logging.info("Eigenvalues for the frf of the input data are: %s", response[0]) logging.info("Ratio for smallest to largest eigen value is %s", ratio) np.savetxt(frf_filename, response[0]) if not args.frf_only: reg_sphere = get_sphere('symmetric362') peaks_sphere = get_sphere('symmetric724') csd_model = ConstrainedSphericalDeconvModel(gtab, response, reg_sphere=reg_sphere, sh_order=args.sh_order) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_sh=True, sh_basis_type=args.basis, sh_order=args.sh_order, normalize_peaks=True, parallel=parallel, nbr_processes=nbr_processes) if args.fodf: nib.save( nib.Nifti1Image(peaks_csd.shm_coeff.astype(np.float32), vol.affine), args.fodf) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(peaks_csd), vol.affine), args.peaks) if args.peak_indices: nib.save(nib.Nifti1Image(peaks_csd.peak_indices, vol.affine), args.peak_indices)
sphere = get_sphere('symmetric362') print "loading bval/bvec files" bvals, bvecs = read_bvals_bvecs("tp3_data//bvals2000", "tp3_data//bvecs2000") gtab = gradient_table(bvals, bvecs) print "loading nifti files" img = nib.load("tp3_data//dwi2000.nii.gz") affine = img.get_affine() data = img.get_data() mask = nib.load("tp3_data//_binary_mask.nii.gz").get_data() ## Apply mask data_in_wm = applymask(data, mask) response, ratio = auto_response(gtab, data_in_wm) # Computing ODF print "computing fODF... please wait an hour" csd_model = ConstrainedSphericalDeconvModel(gtab, response, reg_sphere=sphere) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=sphere, relative_peak_threshold=.25, min_separation_angle=25, mask=mask, normalize_peaks=True, parallel=True) # Saving files print "saving files" nib.save(nib.Nifti1Image(peaks_csd.shm_coeff.astype(np.float32), affine), "tp3_data//_fodf.nii.gz") nib.save(nib.Nifti1Image(reshape_peaks_for_visualization(peaks_csd), affine), "tp3_data//_fodfpeaks.nii.gz")
gtab = gradient_table(bvals, bvecs) img = nib.load(dwi_filename) affine = img.get_affine() data = img.get_data() mask = nib.load(mask_filename).get_data() data_in_wm = applymask(data, mask) response, ratio = auto_response(gtab, data_in_wm) csd_model = ConstrainedSphericalDeconvModel(gtab, response, reg_sphere=sphere) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=sphere, relative_peak_threshold=.25, min_separation_angle=25, mask=mask, normalize_peaks=True, parallel=True) nib.save(nib.Nifti1Image(peaks_csd.shm_coeff.astype(np.float32), affine), fODF_filename) nib.save(nib.Nifti1Image(reshape_peaks_for_visualization(peaks_csd), affine), fODFpeaks_filename)