def peaks_to_niftis(pam, fname_shm, fname_dirs, fname_values, fname_indices, fname_gfa, reshape_dirs=False): """ Save SH, directions, indices and values of peaks to Nifti. """ save_nifti(fname_shm, pam.shm_coeff.astype(np.float32), pam.affine) if reshape_dirs: pam_dirs = reshape_peaks_for_visualization(pam) else: pam_dirs = pam.peak_dirs.astype(np.float32) save_nifti(fname_dirs, pam_dirs, pam.affine) save_nifti(fname_values, pam.peak_values.astype(np.float32), pam.affine) save_nifti(fname_indices, pam.peak_indices, pam.affine) save_nifti(fname_gfa, pam.gfa, pam.affine)
def test_reshape_peaks_for_visualization(): data1 = np.random.randn(10, 5, 3).astype('float32') data2 = np.random.randn(10, 2, 5, 3).astype('float32') data3 = np.random.randn(10, 2, 12, 5, 3).astype('float32') data1_reshape = reshape_peaks_for_visualization(data1) data2_reshape = reshape_peaks_for_visualization(data2) data3_reshape = reshape_peaks_for_visualization(data3) assert_array_equal(data1_reshape.shape, (10, 15)) assert_array_equal(data2_reshape.shape, (10, 2, 15)) assert_array_equal(data3_reshape.shape, (10, 2, 12, 15)) assert_array_equal(data1_reshape.reshape(10, 5, 3), data1) assert_array_equal(data2_reshape.reshape(10, 2, 5, 3), data2) assert_array_equal(data3_reshape.reshape(10, 2, 12, 5, 3), data3)
def test_reshape_peaks_for_visualization(): data1 = np.random.randn(10, 5, 3).astype('float32') data2 = np.random.randn(10, 2, 5, 3).astype('float32') data3 = np.random.randn(10, 2, 12, 5, 3).astype('float32') data1_reshape = reshape_peaks_for_visualization(data1) data2_reshape = reshape_peaks_for_visualization(data2) data3_reshape = reshape_peaks_for_visualization(data3) assert_array_equal(data1_reshape.shape, (10, 15)) assert_array_equal(data2_reshape.shape, (10, 2, 15)) assert_array_equal(data3_reshape.shape, (10, 2, 12, 15)) assert_array_equal(data1_reshape.reshape(10, 5, 3), data1) assert_array_equal(data2_reshape.reshape(10, 2, 5, 3), data2) assert_array_equal(data3_reshape.reshape(10, 2, 12, 5, 3), data3)
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.afd = args.afd or 'afd_max.nii.gz' args.afd_total = args.afd_total or 'afd_total_sh0.nii.gz' args.afd_sum = args.afd_sum or 'afd_sum.nii.gz' args.nufo = args.nufo or 'nufo.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' arglist = [args.afd, args.afd_total, args.afd_sum, args.nufo, args.peaks] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' 'one file to output.') assert_inputs_exist(parser, []) assert_outputs_exists(parser, args, arglist) data, affine = load(args.input) if args.mask is None: mask = np.ones(data.shape[:-1]) else: mask, affine2 = load(args.mask) nufo_map, afd_map, afd_sum, peaks_dirs = get_maps(data, mask, args) # Save result if args.nufo: save(nufo_map, affine, args.nufo) if args.afd: save(afd_map, affine, args.afd) if args.afd_total: # this is the analytical afd total afd_tot = data[:, :, :, 0] save(afd_tot, affine, args.afd_total) if args.afd_sum: save(afd_sum, affine, args.afd_sum) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(peaks_dirs), affine), args.peaks) if args.visu: if nufo_map.max() > nufo_map.min(): nufo_map = (255 * (nufo_map - nufo_map.min()) / (nufo_map.max() - nufo_map.min())) if afd_map.max() > afd_map.min(): afd_map = (255 * (afd_map - afd_map.min()) / (afd_map.max() - afd_map.min())) save(nufo_map, affine, args.nufo, True) save(afd_map, affine, args.afd, True)
def main(): parser = _build_arg_parser() args = parser.parse_args() logging.basicConfig(level=logging.INFO) # Checking args if not args.not_all: args.fodf = args.fodf or 'fodf.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_indices = args.peak_indices or 'peak_indices.nii.gz' arglist = [args.fodf, args.peaks, args.peak_indices] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' 'one file to output.') assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs, args.frf_file]) assert_outputs_exist(parser, args, arglist) full_frf = np.loadtxt(args.frf_file) vol = nib.load(args.input) data = vol.dataobj bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) if args.mask is None: mask = None else: mask = np.asanyarray(nib.load(args.mask).dataobj).astype(np.bool) # Computing fODF peaks_csd = compute_fodf(data, bvals, bvecs, full_frf, sh_order=args.sh_order, nbr_processes=args.nbr_processes, mask=mask, sh_basis=args.sh_basis, return_sh=True, force_b0_threshold=args.force_b0_threshold) # Saving results if args.fodf: nib.save( nib.Nifti1Image(peaks_csd.shm_coeff.astype(np.float32), vol.affine), args.fodf) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(peaks_csd), vol.affine), args.peaks) if args.peak_indices: nib.save(nib.Nifti1Image(peaks_csd.peak_indices, vol.affine), args.peak_indices)
def main(): parser = _build_arg_parser() args = parser.parse_args() logging.basicConfig(level=logging.INFO) if not args.not_all: args.fodf = args.fodf or 'fodf.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_indices = args.peak_indices or 'peak_indices.nii.gz' arglist = [args.fodf, args.peaks, args.peak_indices] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' 'one file to output.') assert_inputs_exist(parser, [args.input, args.bvals, args.bvecs, args.frf_file]) assert_outputs_exist(parser, args, arglist) nbr_processes = args.nbr_processes parallel = True if nbr_processes is not None: if nbr_processes <= 0: nbr_processes = None elif nbr_processes == 1: parallel = False full_frf = np.loadtxt(args.frf_file) if not full_frf.shape[0] == 4: raise ValueError('FRF file did not contain 4 elements. ' 'Invalid or deprecated FRF format') frf = full_frf[0:3] mean_b0_val = full_frf[3] vol = nib.load(args.input) data = vol.get_data() bvals, bvecs = read_bvals_bvecs(args.bvals, args.bvecs) if not is_normalized_bvecs(bvecs): logging.warning('Your b-vectors do not seem normalized...') bvecs = normalize_bvecs(bvecs) check_b0_threshold(args, bvals.min()) gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) if args.mask is None: mask = None else: mask = nib.load(args.mask).get_data().astype(np.bool) # Raise warning for sh order if there is not enough DWIs if data.shape[-1] < (args.sh_order + 1) * (args.sh_order + 2) / 2: warnings.warn( 'We recommend having at least {} unique DWIs volumes, but you ' 'currently have {} volumes. Try lowering the parameter --sh_order ' 'in case of non convergence.'.format( (args.sh_order + 1) * (args.sh_order + 2) / 2, data.shape[-1])) reg_sphere = get_sphere('symmetric362') peaks_sphere = get_sphere('symmetric724') csd_model = ConstrainedSphericalDeconvModel(gtab, (frf, mean_b0_val), reg_sphere=reg_sphere, sh_order=args.sh_order) peaks_csd = peaks_from_model(model=csd_model, data=data, sphere=peaks_sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_sh=True, sh_basis_type=args.sh_basis, sh_order=args.sh_order, normalize_peaks=True, parallel=parallel, nbr_processes=nbr_processes) if args.fodf: nib.save( nib.Nifti1Image(peaks_csd.shm_coeff.astype(np.float32), vol.affine), args.fodf) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(peaks_csd), vol.affine), args.peaks) if args.peak_indices: nib.save(nib.Nifti1Image(peaks_csd.peak_indices, vol.affine), args.peak_indices)
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.afd_max = args.afd_max or 'afd_max.nii.gz' args.afd_total = args.afd_total or 'afd_total_sh0.nii.gz' args.afd_sum = args.afd_sum or 'afd_sum.nii.gz' args.nufo = args.nufo or 'nufo.nii.gz' args.rgb = args.rgb or 'rgb.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_values = args.peak_values or 'peak_values.nii.gz' args.peak_indices = args.peak_indices or 'peak_indices.nii.gz' arglist = [args.afd_max, args.afd_total, args.afd_sum, args.nufo, args.rgb, args.peaks, args.peak_values, args.peak_indices] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' 'one file to output.') assert_inputs_exist(parser, args.in_fODF) assert_outputs_exist(parser, args, arglist) vol = nib.load(args.in_fODF) data = vol.get_fdata(dtype=np.float32) affine = vol.affine if args.mask is None: mask = None else: mask = get_data_as_mask(nib.load(args.mask), dtype=bool) if mask.shape != data.shape[:-1]: raise ValueError("Mask is not the same shape as data.") sphere = get_sphere(args.sphere) # Computing peaks peak_dirs, peak_values, \ peak_indices = peaks_from_sh(data, sphere, mask=mask, relative_peak_threshold=args.r_threshold, absolute_threshold=args.a_threshold, min_separation_angle=25, normalize_peaks=False, sh_basis_type=args.sh_basis, nbr_processes=args.nbr_processes) # Computing maps nufo_map, afd_max, afd_sum, rgb_map, \ _, _ = maps_from_sh(data, peak_dirs, peak_values, peak_indices, sphere, nbr_processes=args.nbr_processes) # Save result if args.nufo: nib.save(nib.Nifti1Image(nufo_map.astype(np.float32), affine), args.nufo) if args.afd_max: nib.save(nib.Nifti1Image(afd_max.astype(np.float32), affine), args.afd_max) if args.afd_total: # this is the analytical afd total afd_tot = data[:, :, :, 0] nib.save(nib.Nifti1Image(afd_tot.astype(np.float32), affine), args.afd_total) if args.afd_sum: nib.save(nib.Nifti1Image(afd_sum.astype(np.float32), affine), args.afd_sum) if args.rgb: nib.save(nib.Nifti1Image(rgb_map.astype('uint8'), affine), args.rgb) if args.peaks or args.peak_values: peak_values = np.divide(peak_values, peak_values[..., 0, None], out=np.zeros_like(peak_values), where=peak_values[..., 0, None]!=0) peak_dirs[...] *= peak_values[..., :, None] if args.peaks: nib.save(nib.Nifti1Image(reshape_peaks_for_visualization(peak_dirs), affine), args.peaks) if args.peak_values: nib.save(nib.Nifti1Image(peak_values, vol.affine), args.peak_values) if args.peak_indices: nib.save(nib.Nifti1Image(peak_indices, vol.affine), args.peak_indices)
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.gfa = args.gfa or 'gfa.nii.gz' args.peaks = args.peaks or 'peaks.nii.gz' args.peak_indices = args.peak_indices or 'peaks_indices.nii.gz' args.sh = args.sh or 'sh.nii.gz' args.nufo = args.nufo or 'nufo.nii.gz' args.a_power = args.a_power or 'anisotropic_power.nii.gz' arglist = [ args.gfa, args.peaks, args.peak_indices, args.sh, args.nufo, args.a_power ] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' + 'one file to output.') assert_inputs_exist(parser, [args.in_dwi, args.in_bval, args.in_bvec]) assert_outputs_exist(parser, args, arglist) validate_nbr_processes(parser, args) nbr_processes = args.nbr_processes parallel = nbr_processes > 1 # Load data img = nib.load(args.in_dwi) data = img.get_fdata(dtype=np.float32) bvals, bvecs = read_bvals_bvecs(args.in_bval, args.in_bvec) if not is_normalized_bvecs(bvecs): logging.warning('Your b-vectors do not seem normalized...') bvecs = normalize_bvecs(bvecs) check_b0_threshold(args, bvals.min()) gtab = gradient_table(bvals, bvecs, b0_threshold=bvals.min()) sphere = get_sphere('symmetric724') mask = None if args.mask: mask = get_data_as_mask(nib.load(args.mask)) # Sanity check on shape of mask if mask.shape != data.shape[:-1]: raise ValueError('Mask shape does not match data shape.') if args.use_qball: model = QballModel(gtab, sh_order=args.sh_order, smooth=DEFAULT_SMOOTH) else: model = CsaOdfModel(gtab, sh_order=args.sh_order, smooth=DEFAULT_SMOOTH) odfpeaks = peaks_from_model(model=model, data=data, sphere=sphere, relative_peak_threshold=.5, min_separation_angle=25, mask=mask, return_odf=False, normalize_peaks=True, return_sh=True, sh_order=int(args.sh_order), sh_basis_type=args.sh_basis, npeaks=5, parallel=parallel, nbr_processes=nbr_processes) if args.gfa: nib.save(nib.Nifti1Image(odfpeaks.gfa.astype(np.float32), img.affine), args.gfa) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(odfpeaks), img.affine), args.peaks) if args.peak_indices: nib.save(nib.Nifti1Image(odfpeaks.peak_indices, img.affine), args.peak_indices) if args.sh: nib.save( nib.Nifti1Image(odfpeaks.shm_coeff.astype(np.float32), img.affine), args.sh) if args.nufo: peaks_count = (odfpeaks.peak_indices > -1).sum(3) nib.save(nib.Nifti1Image(peaks_count.astype(np.int32), img.affine), args.nufo) if args.a_power: odf_a_power = anisotropic_power(odfpeaks.shm_coeff) nib.save(nib.Nifti1Image(odf_a_power.astype(np.float32), img.affine), args.a_power)
def main(): parser = _build_arg_parser() args = parser.parse_args() if not args.not_all: args.cos_asym_map = args.cos_asym_map or 'cos_asym_map.nii.gz' args.odd_power_map = args.odd_power_map or 'odd_power_map.nii.gz' args.peaks = args.peaks or 'asym_peaks.nii.gz' args.peak_values = args.peak_values or 'asym_peak_values.nii.gz' args.peak_indices = args.peak_indices or 'asym_peak_indices.nii.gz' args.nupeaks = args.nupeaks or 'nupeaks.nii.gz' arglist = [ args.cos_asym_map, args.odd_power_map, args.peaks, args.peak_values, args.peak_indices, args.nupeaks ] if args.not_all and not any(arglist): parser.error('When using --not_all, you need to specify at least ' 'one file to output.') inputs = [args.in_sh] if args.mask: inputs.append(args.mask) assert_inputs_exist(parser, inputs) assert_outputs_exist(parser, args, arglist) sh_img = nib.load(args.in_sh) sh = sh_img.get_fdata() sphere = get_sphere(args.sphere) sh_order, full_basis = get_sh_order_and_fullness(sh.shape[-1]) if not full_basis: parser.error('Invalid SH image. A full SH basis is expected.') if args.mask: mask = get_data_as_mask(nib.load(args.mask), dtype=bool) else: mask = np.sum(np.abs(sh), axis=-1) > 0 if args.cos_asym_map: cos_asym_map = compute_cos_asym_map(sh, sh_order, mask) nib.save(nib.Nifti1Image(cos_asym_map, sh_img.affine), args.cos_asym_map) if args.odd_power_map: odd_power_map = compute_odd_power_map(sh, sh_order, mask) nib.save(nib.Nifti1Image(odd_power_map, sh_img.affine), args.odd_power_map) if args.peaks or args.peak_values or args.peak_indices or args.nupeaks: peaks, values, indices =\ peaks_from_sh(sh, sphere, mask=mask, relative_peak_threshold=args.r_threshold, absolute_threshold=args.a_threshold, min_separation_angle=25, normalize_peaks=False, # because v and -v are unique, we want twice # the usual default value (5) of npeaks npeaks=10, sh_basis_type=args.sh_basis, nbr_processes=args.nbr_processes, full_basis=True, is_symmetric=False) if args.peaks: nib.save( nib.Nifti1Image(reshape_peaks_for_visualization(peaks), sh_img.affine), args.peaks) if args.peak_values: nib.save(nib.Nifti1Image(values, sh_img.affine), args.peak_values) if args.peak_indices: nib.save(nib.Nifti1Image(indices.astype(np.uint8), sh_img.affine), args.peak_indices) if args.nupeaks: nupeaks = np.count_nonzero(values, axis=-1).astype(np.uint8) nib.save(nib.Nifti1Image(nupeaks, sh_img.affine), args.nupeaks)