def _build_args_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description='Filter streamlines by length.') p.add_argument('in_tractogram', help='Streamlines input file name.') p.add_argument('out_tractogram', help='Streamlines output file name.') p.add_argument('--minL', default=0., type=float, help='Minimum length of streamlines. [%(default)s]') p.add_argument('--maxL', default=np.inf, type=float, help='Maximum length of streamlines. [%(default)s]') p.add_argument('--no_empty', action='store_true', help='Do not write file if there is no streamline.') p.add_argument('--display_counts', action='store_true', help='Print streamline count before and after filtering') add_reference_arg(p) add_overwrite_arg(p) add_verbose_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output json file.') p.add_argument('--streamline_dice', action='store_true', help='Streamlines-wise Dice coefficient will be computed \n' 'Tractograms must be identical [%(default)s].') p.add_argument('--disable_streamline_distance', action='store_true', help='Will not compute the streamlines distance \n' '[%(default)s].') p.add_argument('--single_compare', help='Compare inputs to this single file.') p.add_argument('--keep_tmp', action='store_true', help='Will not delete the tmp folder at the end.') add_processes_arg(p) add_reference_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_tractogram', help='Tractogram input file name.') p.add_argument('out_tractogram', help='Output tractogram without loops.') p.add_argument('--looping_tractogram', help='If set, saves detected looping streamlines.') p.add_argument('--qb', action='store_true', help='If set, uses QuickBundles to detect\n' + 'outliers (loops, sharp angle turns).\n' + 'Should mainly be used with bundles. ' '[%(default)s]') p.add_argument('--threshold', default=8., type=float, help='Maximal streamline to bundle distance\n' + 'for a streamline to be considered as\n' + 'a tracking error. [%(default)s]') p.add_argument('-a', dest='angle', default=360, type=float, help='Maximum looping (or turning) angle of\n' + 'a streamline in degrees. [%(default)s]') p.add_argument('--display_counts', action='store_true', help='Print streamline count before and after filtering') add_overwrite_arg(p) add_reference_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_lesion', help='Binary mask of the lesion(s) (.nii.gz).') p.add_argument('out_json', help='Output file for lesion information (.json).') p1 = p.add_mutually_exclusive_group() p1.add_argument('--bundle', help='Path of the bundle file (.trk).') p1.add_argument('--bundle_mask', help='Path of the bundle binary mask (.nii.gz).') p1.add_argument('--bundle_labels_map', help='Path of the bundle labels map (.nii.gz).') p.add_argument('--min_lesion_vol', type=float, default=7, help='Minimum lesion volume in mm3 [%(default)s].') p.add_argument('--out_lesion_atlas', metavar='FILE', help='Save the labelized lesion(s) map (.nii.gz).') p.add_argument('--out_lesion_stats', metavar='FILE', help='Save the lesion-wise volume measure (.json).') p.add_argument('--out_streamlines_stats', metavar='FILE', help='Save the lesion-wise streamline count (.json).') add_json_args(p) add_overwrite_arg(p) add_reference_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output json file.') p.add_argument('--streamline_dice', action='store_true', help='Compute streamline-wise dice coefficient.\n' 'Tractograms must be identical [%(default)s].') p.add_argument('--bundle_adjency_no_overlap', action='store_true', help='If set, do not count zeros in the average BA.') p.add_argument('--disable_streamline_distance', action='store_true', help='Will not compute the streamlines distance \n' '[%(default)s].') p.add_argument('--single_compare', help='Compare inputs to this single file.') p.add_argument('--keep_tmp', action='store_true', help='Will not delete the tmp folder at the end.') p.add_argument('--ratio', action='store_true', help='Compute overlap and overreach as a ratio over the\n' 'reference tractogram in a Tractometer-style way.\n' 'Can only be used if also using the `single_compare` ' 'option.') add_processes_arg(p) add_reference_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundle', help='Fiber bundle file to compute statistics on.') p.add_argument('in_label_map', help='Label map (.npz) of the corresponding fiber bundle.') p.add_argument('in_distance_map', help='Distance map (.npz) of the corresponding bundle/' 'centroid streamline.') p.add_argument('in_metrics', nargs='+', help='Nifti file to compute statistics on. Probably some ' 'tractometry measure(s) such as FA, MD, RD, ...') p.add_argument('--density_weighting', action='store_true', help='If set, weight statistics by the number of ' 'streamlines passing through each voxel.') p.add_argument('--distance_weighting', action='store_true', help='If set, weight statistics by the inverse of the ' 'distance between a streamline and the centroid.') p.add_argument('--out_json', help='Path of the output json file. If not given, json ' 'formatted stats are simply printed.') add_overwrite_arg(p) add_reference_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_bundle', help='Fiber bundle file to compute the bundle profiles on.') p.add_argument('in_metrics', nargs='+', help='Metric(s) on which to compute the bundle profiles.') g = p.add_mutually_exclusive_group() g.add_argument('--in_centroid', help='If provided it will be used to make sure all ' 'streamlines go in the same direction. \n' 'Also, number of points per streamline will be ' 'set according to centroid.') g.add_argument('--nb_pts_per_streamline', type=int, default=20, help='If centroid not provided, resample each streamline to' ' this number of points [%(default)s].') add_json_args(p) add_reference_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) p.add_argument('in_bundle', help='Fiber bundle file to compute statistics on.') p.add_argument('label_map', help='Label map (.npz) of the corresponding ' 'fiber bundle.') p.add_argument('distance_map', help='Distance map (.npz) of the corresponding ' 'bundle/centroid streamline.') p.add_argument('metrics', nargs='+', help='Nifti metric(s) to compute statistics on.') p.add_argument('--density_weighting', action='store_true', help='If set, weight statistics by the number of ' 'streamlines passing through each voxel.') p.add_argument('--distance_weighting', action='store_true', help='If set, weight statistics by the inverse of the ' 'distance between a streamline and the centroid.') add_reference_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_json', nargs='+', help='List of json files to merge (.json).') p.add_argument('out_json', help='Output json file (.json).') p.add_argument('--keep_separate', action='store_true', help='Merge entries as separate keys based on filename.') p.add_argument('--no_list', action='store_true', help='Merge entries knowing there is no conflict.') p.add_argument('--add_parent_key', help='Merge all entries under a single parent.') p.add_argument('--remove_parent_key', action='store_true', help='Merge ignoring parent key (e.g for population).') p.add_argument('--recursive', action='store_true', help='Merge all entries at the lowest layers.') p.add_argument('--average_last_layer', action='store_true', help='Average all entries at the lowest layers.') add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_mask', help='Mask volume filename.\nCan be a binary mask or a ' 'weighted mask.') p_metric = p.add_argument_group('Metrics input options') g_metric = p_metric.add_mutually_exclusive_group(required=True) g_metric.add_argument('--metrics_dir', help='Metrics files directory. Name of the ' 'directory containing the metrics files.') g_metric.add_argument('--metrics', dest='metrics_file_list', nargs='+', help='Metrics nifti filename. List of the names of ' 'the metrics file, in nifti format.') p.add_argument('--bin', action='store_true', help='If set, will consider every value of the mask ' 'higher than 0 to be part of the mask, and set to 1 ' '(equivalent weighting for every voxel).') p.add_argument('--normalize_weights', action='store_true', help='If set, the weights will be normalized to the [0,1] ' 'range.') add_overwrite_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument( 'in_masks', nargs='+', help='Masks volume filename (ROI).\nCan be a binary mask or a ' 'weighted mask.') p.add_argument('--metrics', nargs='+', required=True, help='Metrics nifti filename. List of the names of ' 'the metrics file, in nifti format.') p.add_argument('--masks_sum', action='store_true', help='Compute the sum of all values in masks ' '(similar to vox count)') p.add_argument('--save_avg', help='Save all average to a file (txt, npy, json)\n' 'Otherwise it print the average in ') add_overwrite_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_tractogram', help='Tractogram input file name.') p.add_argument('out_tractogram', help='Output tractogram file name.') p.add_argument('--minU', default=0.5, type=float, help='Min ufactor value. [%(default)s]') p.add_argument('--maxU', default=1.0, type=float, help='Max ufactor value. [%(default)s]') p.add_argument('--remaining_tractogram', help='If set, saves remaining streamlines.') p.add_argument('--no_empty', action='store_true', help='Do not write file if there is no streamline.') p.add_argument('--display_counts', action='store_true', help='Print streamline count before and after filtering.') add_overwrite_arg(p) add_reference_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG) p.add_argument('in_conn_matrix', help='Input connectivity matrix (.npy).\n' 'Typically a streamline count weighted matrix.') p.add_argument('in_length_matrix', help='Input length weighted matrix (.npy).') p.add_argument('out_json', help='Path of the output json.') p.add_argument('--filtering_mask', help='Binary filtering mask to apply before computing the ' 'measures.') p.add_argument('--avg_node_wise', action='store_true', help='Return a single value for node-wise measures.') p.add_argument('--append_json', action='store_true', help='If the file already exists, will append to the ' 'dictionary.') p.add_argument('--small_world', action='store_true', help='Compute measure related to small worldness (omega ' 'and sigma).\n This option is much slower.') add_json_args(p) add_verbose_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_tractogram', help='Streamlines input file name.') p.add_argument('out_tractogram', help='Streamlines output file name.') p.add_argument('--min_x', default=0., type=float, help='Minimum distance in the first dimension, in mm.' '[%(default)s]') p.add_argument('--max_x', default=np.inf, type=float, help='Maximum distance in the first dimension, in mm.' '[%(default)s]') p.add_argument('--min_y', default=0., type=float, help='Minimum distance in the second dimension, in mm.' '[%(default)s]') p.add_argument('--max_y', default=np.inf, type=float, help='Maximum distance in the second dimension, in mm.' '[%(default)s]') p.add_argument('--min_z', default=0., type=float, help='Minimum distance in the third dimension, in mm.' '[%(default)s]') p.add_argument('--max_z', default=np.inf, type=float, help='Maximum distance in the third dimension, in mm.' '[%(default)s]') p.add_argument('--use_abs', action='store_true', help="If set, will use the total of distances in absolute " "value (ex, coming back on yourself will contribute " "to the total distance instead of cancelling it).") p.add_argument('--no_empty', action='store_true', help='Do not write file if there is no streamline.') p.add_argument('--display_counts', action='store_true', help='Print streamline count before and after filtering.') p.add_argument('--save_rejected', metavar='filename', help="Save the SFT of rejected streamlines.") add_reference_arg(p) add_overwrite_arg(p) add_verbose_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_tractogram', help='Path of the input tractogram file.') add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument("in_tractogram", help="Input tractogram to score") p.add_argument("gt_config", help=".json dict configured as specified above.") p.add_argument("out_dir", help="Output directory.") g = p.add_argument_group("Additions to gt_config") p.add_argument("--gt_dir", metavar='DIR', help="Root path of the ground truth files listed in the " "gt_config.\n If not set, filenames in the config " "file are considered\n as complete paths.") g.add_argument("--use_gt_masks_as_limits_masks", action='store_true', help="If set, the gt_config's 'gt_mask' will also be used " "as\n'limits_mask' for each bundle. Note that this " "means the\nOR will necessarily be 0.") g = p.add_argument_group("Preprocessing") g.add_argument("--dilate_endpoints", metavar="NB_PASS", default=0, type=int, help="Dilate inclusion masks n-times. Default: 0.") g.add_argument("--remove_invalid", action="store_true", help="Remove invalid streamlines before scoring.") g = p.add_argument_group("Tractometry choices") g.add_argument("--save_wpc_separately", action='store_true', help="If set, streamlines rejected from VC based on the " "config\nfile criteria will be saved separately from " "IS (and IC)\nin one file *_WPC.tck per bundle.") g.add_argument("--compute_ic", action='store_true', help="If set, IS are split into NC + IC, where IC are " "computed as one bundle per\npair of ROI not " "belonging to a true connection, named\n*_*_IC.tck.") g.add_argument("--remove_wpc_belonging_to_another_bundle", action='store_true', help="If set, WPC actually belonging to VC (from another " "bundle,\nof course; in the case of overlapping ROIs) " "will be removed\nfrom the WPC classification.") p.add_argument("--no_empty", action='store_true', help='Do not write file if there is no streamline.') add_json_args(p) add_overwrite_arg(p) add_reference_arg(p) add_verbose_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('operation', choices=OPERATIONS.keys(), metavar='OPERATION', help='The type of operation to be performed on the ' 'streamlines. Must\nbe one of the following: ' '%(choices)s.') p.add_argument('in_tractograms', metavar='INPUT_FILES', nargs='+', help='The list of files that contain the ' + 'streamlines to operate on.') p.add_argument('out_tractogram', metavar='OUTPUT_FILE', help='The file where the remaining streamlines ' 'are saved.') p.add_argument('--precision', '-p', metavar='NBR_OF_DECIMALS', type=int, default=4, help='Precision used to compare streamlines [%(default)s].') p.add_argument('--robust', '-r', action='store_true', help='Use version robust to small translation/rotation.') p.add_argument('--no_metadata', '-n', action='store_true', help='Strip the streamline metadata from the output.') p.add_argument('--fake_metadata', action='store_true', help='Skip the metadata verification, create fake metadata ' 'if missing, can lead to unexpected behavior.') p.add_argument('--save_indices', '-s', metavar='OUT_INDEX_FILE', help='Save the streamline indices to the supplied ' 'json file.') p.add_argument('--ignore_invalid', action='store_true', help='If set, does not crash because of invalid ' 'streamlines.') add_json_args(p) add_reference_arg(p) add_verbose_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundle', help='Fiber bundle file.') add_reference_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_tractogram', help='Path of the input tractogram file.') p.add_argument('out_tractogram', help='Path of the output tractogram file.') p.add_argument('--drawn_roi', nargs=3, action='append', metavar=('ROI_NAME', 'MODE', 'CRITERIA'), help='Filename of a hand drawn ROI (.nii or .nii.gz).') p.add_argument('--atlas_roi', nargs=4, action='append', metavar=('ROI_NAME', 'ID', 'MODE', 'CRITERIA'), help='Filename of an atlas (.nii or .nii.gz).') p.add_argument('--bdo', nargs=3, action='append', metavar=('BDO_NAME', 'MODE', 'CRITERIA'), help='Filename of a bounding box (bdo) file from MI-Brain.') p.add_argument('--x_plane', nargs=3, action='append', metavar=('PLANE', 'MODE', 'CRITERIA'), help='Slice number in X, in voxel space.') p.add_argument('--y_plane', nargs=3, action='append', metavar=('PLANE', 'MODE', 'CRITERIA'), help='Slice number in Y, in voxel space.') p.add_argument('--z_plane', nargs=3, action='append', metavar=('PLANE', 'MODE', 'CRITERIA'), help='Slice number in Z, in voxel space.') p.add_argument('--filtering_list', help='Text file containing one rule per line\n' '(i.e. drawn_roi mask.nii.gz both_ends include).') p.add_argument('--no_empty', action='store_true', help='Do not write file if there is no streamline.') p.add_argument('--display_counts', action='store_true', help='Print streamline count before and after filtering') add_reference_arg(p) add_verbose_arg(p) add_overwrite_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG, description=__doc__) p.add_argument('in_tractogram', help='Path of the input tractogram file.') p.add_argument('in_wmparc', help='Path of the white matter parcellation atlas\n' + '(.nii or .nii.gz)') p.add_argument('out_path', help='Path to the output files.') p.add_argument('--minL', default=0., type=float, help='Minimum length of streamlines, in mm. [%(default)s]') p.add_argument('--maxL', default=np.inf, type=float, help='Maximum length of streamlines, in mm. [%(default)s]') p.add_argument('-a', dest='angle', default=np.inf, type=float, help='Maximum looping (or turning) angle of\n' + 'a streamline, in degrees. [%(default)s]') p.add_argument('--csf_bin', help='Allow CSF endings filtering with this binary\n' + 'mask instead of using the atlas (.nii or .nii.gz)') p.add_argument('--ctx_dilation_radius', type=float, default=0., help='Cortical labels dilation radius, in mm.\n' + ' [%(default)s]') p.add_argument('--save_intermediate_tractograms', action='store_true', help='Save accepted and discarded streamlines\n' + ' after each step.') p.add_argument('--save_volumes', action='store_true', help='Save volumetric images (e.g. binarised label\n' + ' images, etc) in the filtering process.') p.add_argument('--save_counts', action='store_true', help='Save the streamline counts to a file (.json)') p.add_argument('--no_empty', action='store_true', help='Do not write file if there is no streamlines.') add_reference_arg(p) add_verbose_arg(p) add_overwrite_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('voxel_label_map', help='Fiber bundle file.') p.add_argument('bundle_name', help='Bundle name.') add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='List of tractography files supported by nibabel.') p.add_argument('in_labels', nargs='+', help='List of labels maps that matches the bundles.') p.add_argument('--fitting_func', choices=['lin_up', 'lin_down', 'exp', 'inv', 'log'], default=None, help='Function to weigh points using their distance.' '\n[Default: %(default)s]') p2 = p.add_argument_group(title='Visualization options') p3 = p2.add_mutually_exclusive_group() p3.add_argument('--show_rendering', action='store_true', help='Display VTK window (optional).') p3.add_argument('--save_rendering', metavar='OUT_FOLDER', help='Save VTK render in the specified folder (optional)') p2.add_argument('--wireframe', action='store_true', help='Use wireframe for the tube rendering.') p2.add_argument('--error_coloring', action='store_true', help='Use the fitting error to color the tube.') p2.add_argument('--width', type=float, default=0.2, help='Width of tubes or lines representing streamlines' '\n[Default: %(default)s]') p2.add_argument('--opacity', type=float, default=0.2, help='Opacity for the streamlines rendered with the tube.' '\n[Default: %(default)s]') p2.add_argument('--background', metavar=('R', 'G', 'B'), nargs=3, default=[1, 1, 1], type=parser_color_type, help='RBG values [0, 255] of the color of the background.' '\n[Default: %(default)s]') add_reference_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output file.') add_reference_arg(p) add_processes_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description='Compute streamlines min, mean and max length, as well as ' 'standard deviation of length in mm.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) p.add_argument('in_bundle', help='Fiber bundle file.') add_reference_arg(p) add_json_args(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_matrices', nargs='+', help='Path of the input matricies.') p.add_argument('out_json', help='Path of the output json file.') p.add_argument('--single_compare', help='Compare inputs to this single file.') add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundles', nargs='+', help='Path of the input bundles.') p.add_argument('out_json', help='Path of the output file.') p.add_argument('--group_statistics', action='store_true', help='Show average measures \n' '[%(default)s].') add_reference_arg(p) add_processes_arg(p) add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__) p.add_argument('in_json', nargs='+', help='List of json files to merge (.json).') p.add_argument('out_json', help='Output json file (.json).') p.add_argument('--keep_separate', action='store_true', help='Merge entries as separate keys.') add_json_args(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) p.add_argument("in_tractogram", help="Input tractogram to score") p.add_argument("gt_bundles", nargs="+", help="Bundles ground truth (.[trk|.tck|.nii|.nii.gz]).") g = p.add_argument_group("ROIs") g.add_argument("--gt_endpoints", nargs="+", help="Bundles endpoints, both bundle's ROIs\ (.nii or .nii.gz).") g.add_argument("--gt_tails", nargs="+", help="Bundles tails, bundle's first ROI(.nii or .nii.gz).") g.add_argument("--gt_heads", nargs="+", help="Bundles heads, bundle's second ROI(.nii or .nii.gz).") p.add_argument("--dilate_endpoints", metavar="NB_PASS", default=1, type=int, help="Dilate masks n-times.") p.add_argument("--gt_config", metavar="FILE", help=".json dict to specify bundles streamlines min, \ max length and max angles.") p.add_argument("--out_dir", default="gt_out/", help="Output directory") p.add_argument("--wrong_path_as_separate", action="store_true", help="Separates streamlines that go outside of the ground \ truth mask from true connections, outputs as \ *_wpc.[tck|trk].") p.add_argument("--remove_invalid", action="store_true", help="Remove invalid streamlines before scoring.") add_json_args(p) add_overwrite_arg(p) add_reference_arg(p) add_verbose_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter) p.add_argument('in_bundle', help='Fiber bundle filename.') p.add_argument('endpoints_map_head', help='Output endpoints map head filename.') p.add_argument('endpoints_map_tail', help='Output endpoints map tail filename.') p.add_argument('--swap', action='store_true', help='Swap head<->tail convention. ' 'Can be useful when the reference is not in RAS.') add_json_args(p) add_reference_arg(p) add_overwrite_arg(p) return p
def _build_arg_parser(): p = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) p.add_argument('in_bundle', help='Fiber bundle file to compute statistics on') p.add_argument('metrics', nargs='+', help='Nifti file to compute statistics on. Probably some ' 'tractometry measure(s) such as FA, MD, RD, ...') p.add_argument('--density_weighting', action='store_true', help='If set, weight statistics by the number of ' 'fibers passing through each voxel.') add_reference_arg(p) add_json_args(p) return p