def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_file', help='Path of the volume (nii or nii.gz).') p.add_argument('out_file', help='Output filename of the dilated labels.') p.add_argument('--distance', type=float, default=2.0, help='Maximal distance to dilated (in mm) [%(default)s].') p.add_argument('--label_to_dilate', type=int, nargs='+', default=None, help='Label list to dilate, by default it dilate all that\n' ' are not in label_to_fill nor label_not_to_dilate.') p.add_argument('--label_to_fill', type=int, nargs='+', default=[0], help='Background id / labels to be filled [%(default)s],\n' ' the first one is given as output background value.') p.add_argument('--label_not_to_dilate', type=int, nargs='+', default=[], help='Label list not to dilate.') p.add_argument('--mask', help='Only dilate values inside the mask.') add_processes_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG) p.add_argument('in_bingham', help='Input Bingham image.') p.add_argument('--out_fd', default='', help='Path to output fiber density. [fd.nii.gz]') p.add_argument('--out_fs', default='', help='Path to output fiber spread. [fs.nii.gz]') p.add_argument('--out_ff', default='', help='Path to fiber fraction file. [ff.nii.gz]') p.add_argument('--not_all', action='store_true', help='Do not compute all metrics.') p.add_argument('--mask', help='Optional mask image. Only voxels inside ' 'the mask are computed.') p.add_argument('--nbr_integration_steps', type=int, default=50, help='Number of integration steps along the theta axis for' ' fiber density estimation. [%(default)s]') add_overwrite_arg(p) add_verbose_arg(p) add_processes_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_image', help='Path of the image file to denoise.') p.add_argument('out_image', help='Path to save the denoised image file.') p.add_argument('number_coils', type=int, help='Number of receiver coils of the scanner.\nUse ' 'number_coils=1 in the case of a SENSE (GE, Philips) ' 'reconstruction and \nnumber_coils >= 1 for GRAPPA ' 'reconstruction (Siemens). number_coils=4 works well ' 'for the 1.5T\n in Sherbrooke. Use number_coils=0 if ' 'the noise is considered Gaussian distributed.') p.add_argument('--mask', metavar='', help='Path to a binary mask. Only the data inside the mask' ' will be used for computations') p.add_argument('--sigma', metavar='float', type=float, help='The standard deviation of the noise to use instead ' 'of computing it automatically.') p.add_argument('--log', dest="logfile", help='If supplied, name of the text file to store ' 'the logs.') add_processes_arg(p) add_verbose_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter, ) p.add_argument('in_hdf5', help='Input filename for the hdf5 container (.h5).\n' 'Obtained from scil_decompose_connectivity.py.') p.add_argument('in_labels', help='Labels file name (nifti).\n' 'This generates a NxN connectivity matrix.') p.add_argument('--volume', metavar='OUT_FILE', help='Output file for the volume weighted matrix (.npy).') p.add_argument('--streamline_count', metavar='OUT_FILE', help='Output file for the streamline count weighted matrix ' '(.npy).') p.add_argument('--length', metavar='OUT_FILE', help='Output file for the length weighted matrix (.npy).') p.add_argument('--similarity', nargs=2, metavar=('IN_FOLDER', 'OUT_FILE'), help='Input folder containing the averaged bundle density\n' 'maps (.nii.gz) and output file for the similarity ' 'weighted matrix (.npy).') p.add_argument('--maps', nargs=2, action='append', metavar=('IN_FOLDER', 'OUT_FILE'), help='Input folder containing pre-computed maps (.nii.gz)\n' 'and output file for the weighted matrix (.npy).') p.add_argument('--metrics', nargs=2, action='append', metavar=('IN_FILE', 'OUT_FILE'), help='Input (.nii.gz). and output file (.npy) for a metric ' 'weighted matrix.') p.add_argument( '--density_weighting', action="store_true", help='Use density-weighting for the metric weighted matrix.') p.add_argument('--no_self_connection', action="store_true", help='Eliminate the diagonal from the matrices.') p.add_argument('--include_dps', metavar='OUT_DIR', help='Save matrices from data_per_streamline in the output ' 'directory.\nWill always overwrite files.') p.add_argument('--force_labels_list', help='Path to a labels list (.txt) in case of missing ' 'labels in the atlas.') add_processes_arg(p) add_verbose_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output json file.') p.add_argument('--streamline_dice', action='store_true', help='Streamlines-wise Dice coefficient will be computed \n' 'Tractograms must be identical [%(default)s].') p.add_argument('--disable_streamline_distance', action='store_true', help='Will not compute the streamlines distance \n' '[%(default)s].') p.add_argument('--single_compare', help='Compare inputs to this single file.') p.add_argument('--keep_tmp', action='store_true', help='Will not delete the tmp folder at the end.') add_processes_arg(p) add_reference_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_bundle', help='Path of the input bundle.') p.add_argument('min_distance', type=float, help='Distance threshold for 2 streamlines to be ' 'considered similar (mm).') p.add_argument('out_bundle', help='Path of the output tractography file') p.add_argument('--clustering_thr', type=float, default=6, help='Clustering threshold for QB/QBx (mm), during ' 'the first approximation [%(default)s].') p.add_argument('--min_cluster_size', type=int, default=5, help='Minimum cluster size for the first iteration ' '[%(default)s].') p.add_argument('--convergence', type=int, default=100, help='Streamlines count difference threshold to stop ' 're-running the algorithm [%(default)s].') add_reference_arg(p) add_processes_arg(p) add_overwrite_arg(p) add_verbose_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_dwi', help='Path of the input diffusion volume.') p.add_argument('in_bval', help='Path of the bvals file, in FSL format.') p.add_argument('in_bvec', help='Path of the bvecs file, in FSL format.') p.add_argument('frf_file', help='Path of the FRF file') p.add_argument('out_fODF', help='Output path for the fiber ODF coefficients.') p.add_argument( '--sh_order', metavar='int', default=8, type=int, help='SH order used for the CSD. (Default: 8)') p.add_argument( '--mask', metavar='', help='Path to a binary mask. Only the data inside the mask will be ' 'used for computations and reconstruction.') add_force_b0_arg(p) add_sh_basis_args(p) add_processes_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG) p.add_argument('in_sh', help='Input SH image.') p.add_argument('out_bingham', help='Output Bingham functions image.') p.add_argument('--max_lobes', type=int, default=5, help='Maximum number of lobes per voxel' ' to extract. [%(default)s]') p.add_argument('--at', type=float, default=0.0, help='Absolute threshold for peaks' ' extraction. [%(default)s]') p.add_argument('--rt', type=float, default=0.1, help='Relative threshold for peaks' ' extraction. [%(default)s]') p.add_argument('--min_sep_angle', type=float, default=25., help='Minimum separation angle between' ' two peaks. [%(default)s]') p.add_argument('--max_fit_angle', type=float, default=15., help='Maximum distance in degrees around a peak direction' ' for fitting the Bingham function. [%(default)s]') p.add_argument('--mask', help='Optional mask file. Only SH inside' ' the mask are fitted.') add_overwrite_arg(p) add_verbose_arg(p) add_processes_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output json file.') p.add_argument('--streamline_dice', action='store_true', help='Compute streamline-wise dice coefficient.\n' 'Tractograms must be identical [%(default)s].') p.add_argument('--bundle_adjency_no_overlap', action='store_true', help='If set, do not count zeros in the average BA.') p.add_argument('--disable_streamline_distance', action='store_true', help='Will not compute the streamlines distance \n' '[%(default)s].') p.add_argument('--single_compare', help='Compare inputs to this single file.') p.add_argument('--keep_tmp', action='store_true', help='Will not delete the tmp folder at the end.') p.add_argument('--ratio', action='store_true', help='Compute overlap and overreach as a ratio over the\n' 'reference tractogram in a Tractometer-style way.\n' 'Can only be used if also using the `single_compare` ' 'option.') add_processes_arg(p) add_reference_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_sh', help='Input SH image.') p.add_argument('--mask', default='', help='Optional mask.') # outputs p.add_argument('--cos_asym_map', default='', help='Output asymmetry map using cos similarity.') p.add_argument('--odd_power_map', default='', help='Output odd power map.') p.add_argument('--peaks', default='', help='Output filename for the extracted peaks.') p.add_argument('--peak_values', default='', help='Output filename for the extracted peaks values.') p.add_argument('--peak_indices', default='', help='Output filename for the generated peaks indices on ' 'the sphere.') p.add_argument('--nupeaks', default='', help='Output filename for the nupeaks file.') p.add_argument('--not_all', action='store_true', help='If set, only saves the files specified using the ' 'file flags [%(default)s].') p.add_argument('--at', dest='a_threshold', type=float, default='0.0', help='Absolute threshold on fODF amplitude. This ' 'value should be set to\napproximately 1.5 to 2 times ' 'the maximum fODF amplitude in isotropic voxels\n' '(ie. ventricles).\n' 'Use compute_fodf_max_in_ventricles.py to find the ' 'maximal value.\n' 'See [Dell\'Acqua et al HBM 2013] [%(default)s].') p.add_argument('--rt', dest='r_threshold', type=float, default='0.1', help='Relative threshold on fODF amplitude in percentage ' '[%(default)s].') p.add_argument('--sphere', default='symmetric724', choices=sorted(SPHERE_FILES.keys()), help='Sphere to use for peak directions estimation ' '[%(default)s].') add_processes_arg(p) add_sh_basis_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) p.add_argument('in_dwi', help='Path of the input diffusion volume.') p.add_argument('in_bval', help='Path of the bval file, in FSL format.') p.add_argument('in_bvec', help='Path of the bvec file, in FSL format.') p.add_argument('in_wm_frf', help='Text file of WM response function.') p.add_argument('in_gm_frf', help='Text file of GM response function.') p.add_argument('in_csf_frf', help='Text file of CSF response function.') p.add_argument('--sh_order', metavar='int', default=8, type=int, help='SH order used for the CSD. (Default: 8)') p.add_argument('--mask', metavar='', help='Path to a binary mask. Only the data inside the ' 'mask will be used for computations and reconstruction.') add_force_b0_arg(p) add_sh_basis_args(p) add_processes_arg(p) add_overwrite_arg(p) p.add_argument('--not_all', action='store_true', help='If set, only saves the files specified using the ' 'file flags. (Default: False)') g = p.add_argument_group(title='File flags') g.add_argument('--wm_out_fODF', metavar='file', default='', help='Output filename for the WM fODF coefficients.') g.add_argument('--gm_out_fODF', metavar='file', default='', help='Output filename for the GM fODF coefficients.') g.add_argument('--csf_out_fODF', metavar='file', default='', help='Output filename for the CSF fODF coefficients.') g.add_argument('--vf', metavar='file', default='', help='Output filename for the volume fractions map.') g.add_argument('--vf_rgb', metavar='file', default='', help='Output filename for the volume fractions map in rgb.') return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, epilog=EPILOG, formatter_class=argparse.RawDescriptionHelpFormatter) p.add_argument('in_dwi', help='DWI file acquired with a NODDI compatible protocol ' '(single-shell data not suited).') p.add_argument('in_bval', help='b-values filename, in FSL format (.bval).') p.add_argument('in_bvec', help='b-vectors filename, in FSL format (.bvec).') p.add_argument('--mask', help='Brain mask filename.') p.add_argument('--out_dir', default="results", help='Output directory for the NODDI results. ' '[%(default)s]') p.add_argument('--b_thr', type=int, default=40, help='Limit value to consider that a b-value is on an ' 'existing shell. Above this limit, the b-value is ' 'placed on a new shell. This includes b0s values.') g1 = p.add_argument_group(title='Model options') g1.add_argument('--para_diff', type=float, default=1.7e-3, help='Axial diffusivity (AD) in the CC. [%(default)s]') g1.add_argument('--iso_diff', type=float, default=3e-3, help='Mean diffusivity (MD) in ventricles. [%(default)s]') g1.add_argument('--lambda1', type=float, default=5e-1, help='First regularization parameter. [%(default)s]') g1.add_argument('--lambda2', type=float, default=1e-3, help='Second regularization parameter. [%(default)s]') g2 = p.add_argument_group(title='Kernels options') kern = g2.add_mutually_exclusive_group() kern.add_argument('--save_kernels', metavar='DIRECTORY', help='Output directory for the COMMIT kernels.') kern.add_argument('--load_kernels', metavar='DIRECTORY', help='Input directory where the COMMIT kernels are ' 'located.') add_processes_arg(p) add_overwrite_arg(p) add_verbose_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__, epilog=EPILOG) p.add_argument('in_tractogram', help='Input tractogram filename (.trk or .tck).') p.add_argument('in_config_file', help='Path of the config file (.json)') p.add_argument('in_models_directories', nargs='+', help='Path for the directories containing model.') p.add_argument('in_transfo', help='Path for the transformation to model space ' '(.txt, .npy or .mat).') p.add_argument('--out_dir', default='voting_results', help='Path for the output directory [%(default)s].') p.add_argument('--log_level', default='INFO', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], help='Log level of the logging class.') p.add_argument('--multi_parameters', type=int, default=1, help='Pick parameters from the potential combinations\n' 'Will multiply the number of times Recobundles is ran.\n' 'See the documentation [%(default)s].') p.add_argument('--minimal_vote_ratio', type=float, default=0.5, help='Streamlines will only be considered for saving if\n' 'recognized often enough [%(default)s].') p.add_argument( '--tractogram_clustering_thr', type=int, default=[12], nargs='+', help='Input tractogram clustering thresholds %(default)smm.') p.add_argument('--seeds', type=int, default=[0], nargs='+', help='Random number generator seed %(default)s\n' 'Will multiply the number of times Recobundles is ran.') p.add_argument('--inverse', action='store_true', help='Use the inverse transformation.') add_processes_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_dwi', help='Path of the input diffusion volume.') p.add_argument('in_bval', help='Path of the bvals file, in FSL format.') p.add_argument('in_bvec', help='Path of the bvecs file, in FSL format.') add_overwrite_arg(p) p.add_argument('--sh_order', default=4, type=int, help='Spherical harmonics order. Must be a positive even ' 'number [%(default)s].') p.add_argument('--mask', help='Path to a binary mask. Only data inside the mask will' ' be used for computations and reconstruction ' '[%(default)s].') p.add_argument('--use_qball', action='store_true', help='If set, qball will be used as the odf reconstruction' ' model instead of CSA.') p.add_argument('--not_all', action='store_true', help='If set, will only save the files specified using the ' 'following flags.') g = p.add_argument_group(title='File flags') g.add_argument('--gfa', default='', help='Output filename for the generalized fractional ' 'anisotropy [gfa.nii.gz].') g.add_argument('--peaks', default='', help='Output filename for the extracted peaks ' '[peaks.nii.gz].') g.add_argument('--peak_indices', default='', help='Output filename for the generated peaks ' 'indices on the sphere [peaks_indices.nii.gz].') g.add_argument('--sh', default='', help='Output filename for the spherical harmonics ' 'coefficients [sh.nii.gz].') g.add_argument('--nufo', default='', help='Output filename for the NUFO map [nufo.nii.gz].') g.add_argument('--a_power', default='', help='Output filename for the anisotropic power map' '[anisotropic_power.nii.gz].') add_force_b0_arg(p) add_sh_basis_args(p) add_processes_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog=EPILOG) p.add_argument('in_dwi', help='DWI file.') p.add_argument('in_bval', help='b-values filename, in FSL format (.bval).') p.add_argument('in_bvec', help='b-vectors filename, in FSL format (.bvec).') p.add_argument('--mask', help='Brain mask filename.') p.add_argument('--out_dir', default="results", help='Output directory for the Free Water results. ' '[%(default)s]') p.add_argument('--b_thr', type=int, default=40, help='Limit value to consider that a b-value is on an ' 'existing shell. Above this limit, the b-value is ' 'placed on a new shell. This includes b0s values.') g1 = p.add_argument_group(title='Model options') g1.add_argument('--para_diff', type=float, default=1.5e-3, help='Axial diffusivity (AD) in the CC. [%(default)s]') g1.add_argument('--iso_diff', type=float, default=3e-3, help='Mean diffusivity (MD) in ventricles. [%(default)s]') g1.add_argument('--perp_diff_min', type=float, default=0.1e-3, help='Radial diffusivity (RD) minimum. [%(default)s]') g1.add_argument('--perp_diff_max', type=float, default=0.7e-3, help='Radial diffusivity (RD) maximum. [%(default)s]') g1.add_argument('--lambda1', type=float, default=0.0, help='First regularization parameter. [%(default)s]') g1.add_argument('--lambda2', type=float, default=1e-3, help='Second regularization parameter. [%(default)s]') g2 = p.add_argument_group(title='Kernels options') kern = g2.add_mutually_exclusive_group() kern.add_argument('--save_kernels', metavar='DIRECTORY', help='Output directory for the COMMIT kernels.') kern.add_argument('--load_kernels', metavar='DIRECTORY', help='Input directory where the COMMIT kernels are ' 'located.') g2.add_argument('--compute_only', action='store_true', help='Compute kernels only, --save_kernels must be used.') p.add_argument('--mouse', action='store_true', help='If set, use mouse fitting profile.') add_processes_arg(p) add_overwrite_arg(p) add_verbose_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG) p.add_argument('in_sh', help='Input SH image.') p.add_argument('--out_bingham', default='bingham.nii.gz', help='Output Bingham functions image. [%(default)s]') p.add_argument('--out_fd', default='fd.nii.gz', help='Path to output fiber density. [%(default)s]') p.add_argument('--out_fs', default='fs.nii.gz', help='Path to output fiber spread. [%(default)s]') p.add_argument('--out_ff', default='ff.nii.gz', help='Path to fiber fraction file. [%(default)s]') p.add_argument('--max_lobes', type=int, default=5, help='Maximum number of lobes per voxel' ' to extract. [%(default)s]') p.add_argument('--at', type=float, default=0.0, help='Absolute threshold for peaks' ' extraction. [%(default)s]') p.add_argument('--rt', type=float, default=0.1, help='Relative threshold for peaks' ' extraction. [%(default)s]') p.add_argument('--min_sep_angle', type=float, default=25., help='Minimum separation angle between' ' two peaks. [%(default)s]') p.add_argument('--max_fit_angle', type=float, default=15., help='Maximum distance in degrees around a peak direction' ' for fitting the Bingham function. [%(default)s]') p.add_argument('--nbr_integration_steps', type=int, default=50, help='Number of integration steps along the theta axis for' ' fiber density estimation. [%(default)s]') add_overwrite_arg(p) add_verbose_arg(p) add_processes_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output file.') add_reference_arg(p) add_processes_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter, ) p.add_argument('in_bundles_dir', help='Folder containing all the bundle files (.trk).') p.add_argument('labels_list', help='Text file containing the list of labels from the ' 'atlas.') p.add_argument('--volume', metavar='OUT_FILE', help='Output file for the volume weighted matrix (.npy).') p.add_argument('--streamline_count', metavar='OUT_FILE', help='Output file for the streamline count weighted matrix ' '(.npy).') p.add_argument('--length', metavar='OUT_FILE', help='Output file for the length weighted matrix (.npy).') p.add_argument('--similarity', nargs=2, metavar=('IN_FOLDER', 'OUT_FILE'), help='Input folder containing the averaged bundle density\n' 'maps (.nii.gz) and output file for the similarity ' 'weighted matrix (.npy).') p.add_argument('--maps', nargs=2, action='append', metavar=('IN_FOLDER', 'OUT_FILE'), help='Input folder containing pre-computed maps (.nii.gz)\n' 'and output file for the weighted matrix (.npy).') p.add_argument('--metrics', nargs=2, action='append', metavar=('IN_FILE', 'OUT_FILE'), help='Input (.nii.gz). and output file (.npy) for a metric ' 'weighted matrix.') p.add_argument( '--density_weighting', action="store_true", help='Use density-weighting for the metric weighted matrix.') p.add_argument('--no_self_connection', action="store_true", help='Eliminate the diagonal from the matrices.') add_processes_arg(p) add_reference_arg(p) add_verbose_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_sh', help='Input SH filename. (nii or nii.gz)') p.add_argument('out_sh', help='Output SH filename. (nii or nii.gz)') add_sh_basis_args(p, mandatory=True) add_processes_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_fODF', help='Path of the fODF volume in spherical harmonics (SH).') p.add_argument('--sphere', metavar='string', default='repulsion724', choices=['repulsion100', 'repulsion724'], help='Discrete sphere to use in the processing ' '[%(default)s].') p.add_argument('--mask', metavar='', help='Path to a binary mask. Only the data inside the mask\n' 'will beused for computations and reconstruction ' '[%(default)s].') p.add_argument('--at', dest='a_threshold', type=float, default='0.0', help='Absolute threshold on fODF amplitude. This ' 'value should be set to\napproximately 1.5 to 2 times ' 'the maximum fODF amplitude in isotropic voxels\n' '(ie. ventricles).\nUse compute_fodf_max_in_ventricles.py ' 'to find the maximal value.\n' 'See [Dell\'Acqua et al HBM 2013] [%(default)s].') p.add_argument('--rt', dest='r_threshold', type=float, default='0.1', help='Relative threshold on fODF amplitude in percentage ' '[%(default)s].') add_sh_basis_args(p) add_overwrite_arg(p) add_processes_arg(p) p.add_argument('--not_all', action='store_true', help='If set, only saves the files specified using the ' 'file flags [%(default)s].') g = p.add_argument_group(title='File flags') g.add_argument('--afd_max', metavar='file', default='', help='Output filename for the AFD_max map.') g.add_argument('--afd_total', metavar='file', default='', help='Output filename for the AFD_total map (SH coeff = 0).') g.add_argument('--afd_sum', metavar='file', default='', help='Output filename for the sum of all peak contributions\n' '(sum of fODF lobes on the sphere).') g.add_argument('--nufo', metavar='file', default='', help='Output filename for the NuFO map.') g.add_argument('--rgb', metavar='file', default='', help='Output filename for the RGB map.') g.add_argument('--peaks', metavar='file', default='', help='Output filename for the extracted peaks.') g.add_argument('--peak_values', metavar='file', default='', help='Output filename for the extracted peaks values.') g.add_argument('--peak_indices', metavar='file', default='', help='Output filename for the generated peaks indices on ' 'the sphere.') return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_sh', help='Path of the SH volume.') p.add_argument('out_sf', help='Name of the output SF file to save (bvals/bvecs will ' 'be automatically named when necessary).') # Sphere vs bvecs choice for SF directions = p.add_mutually_exclusive_group(required=True) directions.add_argument('--sphere', choices=sorted(SPHERE_FILES.keys()), help='Sphere used for the SH to SF projection. ') directions.add_argument( '--in_bvec', help="Directions used for the SH to SF projection.") p.add_argument('--dtype', default="float32", choices=["float32", "float64"], help="Datatype to use for SF computation and output array." "'[%(default)s]'") # Optional args for a DWI-like volume p.add_argument('--in_bval', help='b-value file, in FSL format, ' 'used to assign a b-value to the ' 'output SF and generate a `.bval` file.') p.add_argument('--in_b0', help='b0 volume to concatenate to the ' 'final SF volume.') p.add_argument('--out_bval', help="Optional output bval file.") p.add_argument('--out_bvec', help="Optional output bvec file.") p.add_argument('--b0_scaling', action="store_true", help="Scale resulting SF by the b0 image.") add_sh_basis_args(p) p.add_argument('--full_basis', action="store_true", help="If true, use a full basis for the input SH " "coefficients.") add_processes_arg(p) add_overwrite_arg(p) add_force_b0_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_hdf5', nargs='+', help='List of HDF5 filenames (.h5) from ' 'scil_decompose_connectivity.py.') p.add_argument('out_dir', help='Path of the output directory.') p.add_argument('--binary', action='store_true', help='Binarize density maps before the population average.') add_processes_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output file.') p.add_argument('--group_statistics', action='store_true', help='Show average measures \n' '[%(default)s].') add_reference_arg(p) add_processes_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_hdf5', help='HDF5 filename (.h5) containing decomposed ' 'connections.') p.add_argument('in_fodf', help='Path of the fODF volume in spherical harmonics (SH).') p.add_argument('out_hdf5', help='Path of the output HDF5 filenames (.h5).') p.add_argument('--length_weighting', action='store_true', help='If set, will weigh the AFD values according to ' 'segment lengths. [%(default)s]') add_processes_arg(p) add_sh_basis_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_sh', help='Path to the input file.') p.add_argument('out_sh', help='File name for averaged signal.') add_sh_basis_args(p) p.add_argument('--out_sym', default=None, help='Name of optional symmetric output. [%(default)s]') p.add_argument('--sphere', default='repulsion724', choices=sorted(SPHERE_FILES.keys()), help='Sphere used for the SH to SF projection. ' '[%(default)s]') p.add_argument('--sigma_angular', default=1.0, type=float, help='Standard deviation for angular distance.' ' [%(default)s]') p.add_argument('--sigma_spatial', default=1.0, type=float, help='Standard deviation for spatial distance.' ' [%(default)s]') p.add_argument('--sigma_range', default=1.0, type=float, help='Standard deviation for range filter.' ' [%(default)s]') p.add_argument('--use_gpu', action='store_true', help='Use GPU for computation.') add_verbose_arg(p) add_overwrite_arg(p) add_processes_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output json.') p.add_argument( '--streamlines_measures', nargs=2, metavar=('GOLD_STANDARD_STREAMLINES', 'TRACTOGRAM'), help='The gold standard bundle and the original tractogram.') p.add_argument( '--voxels_measures', nargs=2, metavar=('GOLD_STANDARD_MASK', 'TRACKING MASK'), help='The gold standard mask and the original tracking mask.') add_processes_arg(p) add_reference_arg(p) add_verbose_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) add_mandatory_options_tracking(p) track_g = add_tracking_options(p) track_g.add_argument('--algo', default='prob', choices=['det', 'prob'], help='Algorithm to use [%(default)s]') add_sphere_arg(track_g, symmetric_only=False) track_g.add_argument('--sfthres_init', metavar='sf_th', type=float, default=0.5, dest='sf_threshold_init', help="Spherical function relative threshold value " "for the \ninitial direction. [%(default)s]") track_g.add_argument('--rk_order', metavar="K", type=int, default=2, choices=[1, 2, 4], help="The order of the Runge-Kutta integration used " "for the \nstep function [%(default)s]. As a " "rule of thumb, doubling the rk_order \nwill " "double the computation time in the worst case.") track_g.add_argument('--max_invalid_length', metavar='MAX', type=float, default=1, help="Maximum length without valid direction, in mm. " "[%(default)s]") track_g.add_argument('--forward_only', action='store_true', help="If set, tracks in one direction only (forward) " "given the \ninitial seed. The direction is " "randomly drawn from the ODF.") track_g.add_argument('--sh_interp', default='trilinear', choices=['nearest', 'trilinear'], help="Spherical harmonic interpolation: " "nearest-neighbor \nor trilinear. [%(default)s]") track_g.add_argument('--mask_interp', default='trilinear', choices=['nearest', 'trilinear'], help="Mask interpolation: nearest-neighbor or " "trilinear. [%(default)s]") add_seeding_options(p) r_g = p.add_argument_group('Random seeding options') r_g.add_argument('--rng_seed', type=int, default=0, help='Initial value for the random number generator. ' '[%(default)s]') r_g.add_argument('--skip', type=int, default=0, help="Skip the first N random number. \n" "Useful if you want to create new streamlines to " "add to \na previously created tractogram with a " "fixed --rng_seed.\nEx: If tractogram_1 was created " "with -nt 1,000,000, \nyou can create tractogram_2 " "with \n--skip 1,000,000.") m_g = p.add_argument_group('Memory options') add_processes_arg(m_g) add_out_options(p) add_verbose_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_tractogram', help='Input tractogram (.trk or .tck or .h5).') p.add_argument('in_dwi', help='Diffusion-weighted images used by COMMIT (.nii.gz).') p.add_argument('in_bval', help='b-values in the FSL format (.bval).') p.add_argument('in_bvec', help='b-vectors in the FSL format (.bvec).') p.add_argument('out_dir', help='Output directory for the COMMIT maps.') p.add_argument('--b_thr', type=int, default=40, help='Limit value to consider that a b-value is on an ' 'existing shell.\nAbove this limit, the b-value is ' 'placed on a new shell. This includes b0s values.') p.add_argument('--nbr_dir', type=int, default=500, help='Number of directions, on the half of the sphere,\n' 'representing the possible orientations of the ' 'response functions [%(default)s].') p.add_argument('--nbr_iter', type=int, default=1000, help='Maximum number of iterations [%(default)s].') p.add_argument('--in_peaks', help='Peaks file representing principal direction(s) ' 'locally,\n typically coming from fODFs. This file is ' 'mandatory for the default\n stick-zeppelin-ball ' 'model.') p.add_argument('--in_tracking_mask', help='Binary mask where tratography was allowed.\n' 'If not set, uses a binary mask computed from ' 'the streamlines.') g0 = p.add_argument_group(title='COMMIT2 options') g0.add_argument('--commit2', action='store_true', help='Run commit2, requires .h5 as input and will force\n' 'ball&stick model.') g0.add_argument( '--lambda_commit_2', type=float, default=1e-3, help='Specify the clustering prior strength [%(default)s].') g1 = p.add_argument_group(title='Model options') g1.add_argument('--ball_stick', action='store_true', help='Use the ball&Stick model, disable the zeppelin ' 'compartment.\nOnly model suitable for single-shell ' 'data.') g1.add_argument('--para_diff', type=float, help='Parallel diffusivity in mm^2/s.\n' 'Default for ball_stick: 1.7E-3\n' 'Default for stick_zeppelin_ball: 1.7E-3') g1.add_argument('--perp_diff', nargs='+', type=float, help='Perpendicular diffusivity in mm^2/s.\n' 'Default for ball_stick: None\n' 'Default for stick_zeppelin_ball: [0.51E-3]') g1.add_argument('--iso_diff', nargs='+', type=float, help='Istropic diffusivity in mm^2/s.\n' 'Default for ball_stick: [2.0E-3]\n' 'Default for stick_zeppelin_ball: [1.7E-3, 3.0E-3]') g2 = p.add_argument_group(title='Tractogram options') g2.add_argument('--keep_whole_tractogram', action='store_true', help='Save a tractogram copy with streamlines weights in ' 'the data_per_streamline\n[%(default)s].') g3 = p.add_argument_group(title='Kernels options') kern = g3.add_mutually_exclusive_group() kern.add_argument('--save_kernels', metavar='DIRECTORY', help='Output directory for the COMMIT kernels.') kern.add_argument('--load_kernels', metavar='DIRECTORY', help='Input directory where the COMMIT kernels are ' 'located.') g2.add_argument('--compute_only', action='store_true', help='Compute kernels only, --save_kernels must be used.') add_processes_arg(p) add_overwrite_arg(p) add_verbose_arg(p) return p
def add_tracking_options(p): """ Similar to scilpy.tracking.utils.add_generic_options_tracking but - no algo (det/prob) anymore. Rather, propagation depends on the model. - no sf_threshold or sh_basis args. """ track_g = p.add_argument_group(' Tracking options') track_g.add_argument('--algo', choices=['det', 'prob'], default='det', help="Tracking algorithm (det or prob). Must be " "implemented in the chosen model. [det]") track_g.add_argument('--step', dest='step_size', type=float, default=0.5, help='Step size in mm. [%(default)s]') track_g.add_argument('--min_length', type=float, default=10., metavar='m', help='Minimum length of a streamline in mm. ' '[%(default)s]') track_g.add_argument('--max_length', type=float, default=300., metavar='M', help='Maximum length of a streamline in mm. ' '[%(default)s]') track_g.add_argument('--rk_order', metavar="K", type=int, default=2, choices=[1, 2, 4], help="The order of the Runge-Kutta integration used " "for the \nstep function [%(default)s]. As a " "rule of thumb, doubling the rk_order \nwill " "double the computation time in the worst case.") # Additional tracking options compared to scil_compute_local_tracking: track_g.add_argument('--theta', metavar='t', type=float, default=90, help="The tracking direction at each step being " "defined by the model, \ntheta arg can't define " "allowed directions in the tracking field.\n" "Rather, this new equivalent angle, is used as " "\na stopping criterion during propagation: " "tracking \nis stopped when a direction is more " "than an angle t from preceding direction") track_g.add_argument('--max_invalid_len', metavar='M', type=float, default=1, help="Maximum length without valid direction, in mm. " "[%(default)s]") track_g.add_argument('--track_forward_only', action='store_true', help="If set, tracks in one direction only (forward) " "given the initial \nseed. The direction is " "randomly drawn from the ODF.") track_g.add_argument('--mask_interp', default='trilinear', choices=['nearest', 'trilinear'], help="Mask interpolation: nearest-neighbor or " "trilinear. [%(default)s]") track_g.add_argument('--data_interp', default='trilinear', choices=['nearest', 'trilinear'], help="Input data interpolation: nearest-neighbor or " "trilinear. [%(default)s]") # As in scilpy: r_g = p.add_argument_group(' Random seeding options') r_g.add_argument('--rng_seed', type=int, help='Initial value for the random number generator. ' '[%(default)s]') r_g.add_argument('--skip', type=int, default=0, help="Skip the first N random number. \n" "Useful if you want to create new streamlines to " "add to \na previously created tractogram with a " "fixed --rng_seed.\nEx: If tractogram_1 was created " "with -nt 1,000,000, \nyou can create tractogram_2 " "with \n--skip 1,000,000.") # toDo. This should be clarified in scilpy eventually. Verifiy evolution. r_g.add_argument('--set_mmap_to_none', action='store_true', help="If true, use mmap_mode=None. Else mmap_mode='r+'.\n" "Used in np.load(data_file_info). Available only " "with --processes.\nTO BE CLEANED") # Preparing upcoming GPU option: m_g = p.add_argument_group(' Memory options') ram_options = m_g.add_mutually_exclusive_group() # Parallel processing or GPU processing add_processes_arg(ram_options) ram_options.add_argument('--use_gpu', action='store_true', help="If set, use GPU for processing. Cannot be " "used \ntogether with --processes.") return track_g