def test_instantiate_StatisticsSurface():
    from clinica.pipelines.statistics_surface.statistics_surface_pipeline import StatisticsSurface
    from os.path import dirname, join, abspath

    root = dirname(abspath(__file__))
    pipeline = StatisticsSurface(caps_directory=join(root, 'data',
                                                     'StatisticsSurface', 'in',
                                                     'caps'),
                                 tsv_file=join(root, 'data',
                                               'StatisticsSurface', 'in',
                                               'subjects.tsv'))
    pipeline.parameters = {
        'design_matrix': '1 + group + age + sex',
        'contrast': 'group',
        'str_format': '%s %s %s %f %s',
        'group_label': 'UnitTest',
        'glm_type': 'group_comparison',
        'custom_file':
        '@subject/@session/t1/freesurfer_cross_sectional/@subject_@session/surf/@[email protected]',
        'feature_label': 'cortical_thickness',
        'full_width_at_half_maximum': 20,
        'threshold_uncorrected_pvalue': 0.001,
        'threshold_corrected_pvalue': 0.05,
        'cluster_threshold': 0.001
    }
    pipeline.build()
Example #2
0
def test_run_StatisticsSurface(cmdopt):
    from clinica.pipelines.statistics_surface.statistics_surface_pipeline import StatisticsSurface
    from os.path import dirname, join, abspath, exists
    import shutil
    import numpy as np
    from scipy.io import loadmat

    working_dir = cmdopt
    root = dirname(abspath(join(abspath(__file__), pardir)))
    root = join(root, 'data', 'StatisticsSurface')

    clean_folder(join(root, 'out', 'caps'), recreate=False)
    clean_folder(join(working_dir, 'StatisticsSurface'))
    shutil.copytree(join(root, 'in', 'caps'), join(root, 'out', 'caps'))

    pipeline = StatisticsSurface(caps_directory=join(root, 'out', 'caps'),
                                 tsv_file=join(root, 'in', 'subjects.tsv'))
    pipeline.parameters = {
        'design_matrix': '1 + group + age + sex',
        'contrast': 'group',
        'str_format': '%s %s %s %f %s',
        'group_label': 'UnitTest',
        'glm_type': 'group_comparison',
        'custom_file':
        '@subject/@session/t1/freesurfer_cross_sectional/@subject_@session/surf/@[email protected]',
        'feature_label': 'cortical_thickness',
        'full_width_at_half_maximum': 20,
        'threshold_uncorrected_pvalue': 0.001,
        'threshold_corrected_pvalue': 0.05,
        'cluster_threshold': 0.001
    }
    pipeline.base_dir = join(working_dir, 'StatisticsSurface')
    pipeline.build()
    pipeline.run(plugin='MultiProc',
                 plugin_args={'n_procs': 8},
                 bypass_check=True)

    # Check files
    out_file = join(
        root,
        'out/caps/groups/group-UnitTest/statistics/surfstat_group_comparison/group-UnitTest_AD-lt-CN_measure-cortical_thickness_fwhm-20_correctedPValue.mat'
    )
    ref_file = join(
        root,
        'ref/group-UnitTest_AD-lt-CN_measure-cortical_thickness_fwhm-20_correctedPValue.mat'
    )

    out_file_mat = loadmat(out_file)['correctedpvaluesstruct']
    ref_file_mat = loadmat(ref_file)['correctedpvaluesstruct']
    for i in range(4):
        assert np.allclose(out_file_mat[0][0][i],
                           ref_file_mat[0][0][i],
                           rtol=1e-8,
                           equal_nan=True)
    clean_folder(join(root, 'out', 'caps'), recreate=False)
Example #3
0
    def run_command(self, args):
        """
        Run the pipelines with defined args
        """
        from clinica.pipelines.statistics_surface.statistics_surface_pipeline import StatisticsSurface
        from clinica.pipelines.statistics_surface.statistics_surface_utils import check_inputs
        from clinica.utils.stream import cprint
        import os

        if args.feature_type is not None:
            if args.custom_file is not None:
                raise Exception('--feature_type and --custom_file are mutually exclusive : you must choose between one or the other. See documentation for more informations.')
            if args.feature_label is not None:
                raise Exception('--feature_label should not be used with --feature_type')
            # FreeSurfer cortical thickness
            if args.feature_type == 'cortical_thickness':
                args.custom_file = '@subject/@session/t1/freesurfer_cross_sectional/@subject_@session/surf/@[email protected]'
                args.feature_label = 'ct'
            # PET cortical projection
            elif args.feature_type == 'pet_fdg_projection':
                args.custom_file = '@subject/@session/pet/surface/@subject_@session_task-rest_acq-fdg_pet_space-fsaverage_suvr-pons_pvc-iy_hemi-@hemi_fwhm-@fwhm_projection.mgh'
                args.feature_label = 'fdg'
            # NODDI, NDI, ODI and FISO
            elif args.feature_type == 'noddi_projection_ndi':
                args.custom_file = '@subject/@session/noddi/postprocessing/noddi-register-vertex-fsaverage/cortex-projection/@subject_@session_OnFsaverage_fwhm-@[email protected]'
                args.feature_label = 'NDI'
            elif args.feature_type == 'noddi_projection_fiso':
                args.custom_file = '@subject/@session/noddi/postprocessing/noddi-register-vertex-fsaverage/cortex-projection/@subject_@session_OnFsaverage_fwhm-@[email protected]'
                args.feature_label = 'FISO'
            elif args.feature_type == 'noddi_projection_odi':
                args.custom_file = '@subject/@session/noddi/postprocessing/noddi-register-vertex-fsaverage/cortex-projection/@subject_@session_OnFsaverage_fwhm-@[email protected]'
                args.feature_label = 'ODI'
            # DTI fa, md, rd and ad
            elif args.feature_type == 'dti_projection_fa':
                args.custom_file = '@subject/@session/noddi/postprocessing/dti-register-vertex-fsaverage/cortex-projection/@subject_@session_OnFsaverage_fwhm-@[email protected]'
                args.feature_label = 'FA'
            elif args.feature_type == 'dti_projection_md':
                args.custom_file = '@subject/@session/noddi/postprocessing/dti-register-vertex-fsaverage/cortex-projection/@subject_@session_OnFsaverage_fwhm-@[email protected]'
                args.feature_label = 'MD'
            elif args.feature_type == 'dti_projection_rd':
                args.custom_file = '@subject/@session/noddi/postprocessing/dti-register-vertex-fsaverage/cortex-projection/@subject_@session_OnFsaverage_fwhm-@[email protected]'
                args.feature_label = 'RD'
            elif args.feature_type == 'dti_projection_ad':
                args.custom_file = '@subject/@session/noddi/postprocessing/dti-register-vertex-fsaverage/cortex-projection/@subject_@session_OnFsaverage_fwhm-@[email protected]'
                args.feature_label = 'AD'
            else:
                raise Exception('Feature type ' + args.feature_type + ' not recognized. Use --custom_file to specify your own files (without --feature_type).')
        elif args.feature_type is None:
            if args.custom_file is None:
                cprint('No feature type selected : using cortical thickness as default value')
                args.custom_file = '@subject/@session/t1/freesurfer_cross_sectional/@subject_@session/surf/@[email protected]'
                args.feature_label = 'ct'
            else:
                cprint('Using custom features.')
                if args.feature_label is None:
                    raise Exception('You must specify a --feature_label when using the --custom_files flag')

        # Check if the group label has been existed, if yes, give an error to the users
        if os.path.exists(os.path.join(os.path.abspath(self.absolute_path(args.caps_directory)), 'groups', 'group-' + args.group_id)):
            error_message = 'group_id : ' + args.group_id + ' already exists, please choose another one or delete the existing folder and also the working directory and rerun the pipelines'
            raise Exception(error_message)

        pipeline = StatisticsSurface(
            caps_directory=self.absolute_path(args.caps_directory),
            tsv_file=self.absolute_path(args.subject_visits_with_covariates_tsv))
        pipeline.parameters = {
            # pass these args by using self.parameters in a dictionary
            'design_matrix': args.design_matrix,
            'contrast': args.contrast,
            'str_format': args.string_format,
            'group_label': args.group_id,
            'glm_type': args.glm_type,
            'custom_file': args.custom_file,
            'feature_label': args.feature_label,
            'full_width_at_half_maximum': args.full_width_at_half_maximum,
            'threshold_uncorrected_pvalue': args.threshold_uncorrected_pvalue,
            'threshold_corrected_pvalue': args.threshold_corrected_pvalue,
            'cluster_threshold': args.cluster_threshold
        }
        pipeline.base_dir = self.absolute_path(args.working_directory)

        check_inputs(pipeline.caps_directory,
                     pipeline.parameters['custom_file'],
                     pipeline.parameters['full_width_at_half_maximum'],
                     pipeline.tsv_file)

        if args.n_procs:
            pipeline.run(plugin='MultiProc',
                         plugin_args={'n_procs': args.n_procs})
        else:
            print(pipeline.parameters)
            pipeline.run()

        cprint("The " + self._name + " pipeline has completed. You can now delete the working directory (" + args.working_directory + ").")