Example #1
0
def main():
    """Entry point"""
    from nipype import config as ncfg
    from nipype.pipeline.engine import Workflow
    from mriqc import DEFAULTS
    from mriqc.utils.bids import collect_bids_data
    from mriqc.workflows.core import build_workflow
    # from mriqc.reports.utils import check_reports

    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='mriqc v{}'.format(__version__))

    parser.add_argument('bids_dir',
                        action='store',
                        help='The directory with the input dataset '
                        'formatted according to the BIDS standard.')
    parser.add_argument(
        'output_dir',
        action='store',
        help='The directory where the output files '
        'should be stored. If you are running group level analysis '
        'this folder should be prepopulated with the results of the'
        'participant level analysis.')
    parser.add_argument(
        'analysis_level',
        action='store',
        nargs='+',
        help='Level of the analysis that will be performed. '
        'Multiple participant level analyses can be run independently '
        '(in parallel) using the same output_dir.',
        choices=['participant', 'group'])
    parser.add_argument(
        '--participant_label',
        '--subject_list',
        '-S',
        action='store',
        help='The label(s) of the participant(s) that should be analyzed. '
        'The label corresponds to sub-<participant_label> from the '
        'BIDS spec (so it does not include "sub-"). If this parameter '
        'is not provided all subjects should be analyzed. Multiple '
        'participants can be specified with a space separated list.',
        nargs="*")

    g_input = parser.add_argument_group('mriqc specific inputs')
    g_input.add_argument('-m',
                         '--modalities',
                         action='store',
                         nargs='*',
                         choices=['T1w', 'bold', 'T2w'],
                         default=['T1w', 'bold', 'T2w'])
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('--nthreads',
                         action='store',
                         type=int,
                         help='number of threads')
    g_input.add_argument('--n_procs',
                         action='store',
                         default=0,
                         type=int,
                         help='number of threads')
    g_input.add_argument('--mem_gb',
                         action='store',
                         default=0,
                         type=int,
                         help='available total memory')
    g_input.add_argument('--write-graph',
                         action='store_true',
                         default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--dry-run',
                         action='store_true',
                         default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin',
                         action='store',
                         default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--testing',
                         action='store_true',
                         default=False,
                         help='use testing settings for a minimal footprint')
    g_input.add_argument(
        '--hmc-afni',
        action='store_true',
        default=True,
        help='Use ANFI 3dvolreg for head motion correction (HMC)')
    g_input.add_argument(
        '--hmc-fsl',
        action='store_true',
        default=False,
        help='Use FSL MCFLIRT for head motion correction (HMC)')
    g_input.add_argument(
        '-f',
        '--float32',
        action='store_true',
        default=DEFAULTS['float32'],
        help=
        "Cast the input data to float32 if it's represented in higher precision "
        "(saves space and improves perfomance)")
    g_input.add_argument('--fft-spikes-detector',
                         action='store_true',
                         default=False,
                         help='Turn on FFT based spike detector (slow).')

    g_outputs = parser.add_argument_group('mriqc specific outputs')
    g_outputs.add_argument('-w',
                           '--work-dir',
                           action='store',
                           default=op.join(os.getcwd(), 'work'))
    g_outputs.add_argument('--report-dir', action='store')
    g_outputs.add_argument('--verbose-reports',
                           default=False,
                           action='store_true')

    # ANTs options
    g_ants = parser.add_argument_group(
        'specific settings for ANTs registrations')
    g_ants.add_argument(
        '--ants-nthreads',
        action='store',
        type=int,
        default=DEFAULTS['ants_nthreads'],
        help='number of threads that will be set in ANTs processes')
    g_ants.add_argument('--ants-settings',
                        action='store',
                        help='path to JSON file with settings for ANTS')

    # AFNI head motion correction settings
    g_afni = parser.add_argument_group(
        'specific settings for AFNI head motion correction')
    g_afni.add_argument(
        '--deoblique',
        action='store_true',
        default=False,
        help='Deoblique the functional scans during head motion '
        'correction preprocessing')
    g_afni.add_argument(
        '--despike',
        action='store_true',
        default=False,
        help='Despike the functional scans during head motion correction '
        'preprocessing')
    g_afni.add_argument(
        '--start-idx',
        action='store',
        type=int,
        help='Initial volume in functional timeseries that should be '
        'considered for preprocessing')
    g_afni.add_argument(
        '--stop-idx',
        action='store',
        type=int,
        help='Final volume in functional timeseries that should be '
        'considered for preprocessing')
    g_afni.add_argument('--correct-slice-timing',
                        action='store_true',
                        default=False,
                        help='Perform slice timing correction')

    opts = parser.parse_args()

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = 0
    if opts.nthreads is not None:
        MRIQC_LOG.warn('Option --nthreads has been deprecated in mriqc 0.8.8. '
                       'Please use --n_procs instead.')
        n_procs = opts.nthreads
    if opts.n_procs is not None:
        n_procs = opts.n_procs

    # Check physical memory
    total_memory = opts.mem_gb
    if total_memory < 0:
        try:
            from psutil import virtual_memory
            total_memory = virtual_memory().total // (1024**3) + 1
        except ImportError:
            MRIQC_LOG.warn(
                'Total physical memory could not be estimated, using %d'
                'GB as default', DEFAULT_MEM_GB)
            total_memory = DEFAULT_MEM_GB

    if total_memory > 0:
        av_procs = total_memory // 4
        if av_procs < 1:
            MRIQC_LOG.warn(
                'Total physical memory is less than 4GB, memory allocation'
                ' problems are likely to occur.')
            n_procs = 1
        elif n_procs > av_procs:
            n_procs = av_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts.stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' %
                           ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir
        }
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = 1
            max_parallel_ants = cpu_count() // settings['ants_nthreads']
            if max_parallel_ants > 1:
                settings['n_procs'] = max_parallel_ants

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}

    MRIQC_LOG.info(
        'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
        __version__, ', '.join(analysis_levels), opts.participant_label,
        settings)

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(settings['bids_dir'],
                                participant_label=opts.participant_label)

    # Set up participant level
    if 'participant' in analysis_levels:
        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                MRIQC_LOG.warn('No %s scans were found in %s', mod,
                               settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod,
                                          settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                workflow.run(**plugin_settings)
        else:
            raise RuntimeError(
                'Error reading BIDS directory (%s), or the dataset is not '
                'BIDS-compliant.' % settings['bids_dir'])

    # Set up group level
    if 'group' in analysis_levels:
        from mriqc.reports import group_html
        from mriqc.utils.misc import generate_csv, generate_pred

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                MRIQC_LOG.warn(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            MRIQC_LOG.info('Summary CSV table for the %s data generated (%s)',
                           mod, out_csv)

            out_pred = generate_pred(derivatives_dir, settings['output_dir'],
                                     mod)
            if out_pred is not None:
                MRIQC_LOG.info(
                    'Predicted QA CSV table for the %s data generated (%s)',
                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv,
                       mod,
                       csv_failed=op.join(settings['output_dir'],
                                          'failed_' + mod + '.csv'),
                       out_file=out_html)
            MRIQC_LOG.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception(
                "No data found. No group level reports were generated.")
Example #2
0
def compute_iqms(settings, name='ComputeIQMs'):
    """Workflow that actually computes the IQMs"""
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'session_id', 'run_id', 'orig', 'brainmask', 'airmask',
        'artmask', 'headmask', 'segmentation', 'inu_corrected', 'in_inu',
        'pvms', 'metadata', 'reverse_transforms', 'reverse_invert_flags'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_file', 'out_noisefit']),
        name='outputnode')

    deriv_dir = check_folder(
        op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # AFNI check smoothing
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16

    # Mortamet's QI2
    getqi2 = pe.Node(ComputeQI2(erodemsk=settings.get('testing', False)),
                     name='ComputeQI2')

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(), 'measures')

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(ants.ApplyTransforms(dimension=3,
                                           default_value=0,
                                           interpolation='NearestNeighbor'),
                      iterfield=['input_image'],
                      name='MNItpms2t1')
    invt.inputs.input_image = [
        op.join(get_mni_icbm152_nlin_asym_09c(), fname + '.nii.gz')
        for fname in ['1mm_tpm_csf', '1mm_tpm_gm', '1mm_tpm_wm']
    ]

    datasink = pe.Node(IQMFileSink(modality='T1w', out_dir=deriv_dir),
                       name='datasink')

    workflow.connect([(inputnode, datasink, [('subject_id', 'subject_id'),
                                             ('session_id', 'session_id'),
                                             ('run_id', 'run_id'),
                                             ('metadata', 'metadata')]),
                      (inputnode, getqi2, [('orig', 'in_file'),
                                           ('airmask', 'air_msk')]),
                      (inputnode, measures, [('inu_corrected', 'in_noinu'),
                                             ('in_inu', 'in_bias'),
                                             ('orig', 'in_file'),
                                             ('airmask', 'air_msk'),
                                             ('headmask', 'head_msk'),
                                             ('artmask', 'artifact_msk'),
                                             ('segmentation', 'in_segm'),
                                             ('pvms', 'in_pvms')]),
                      (inputnode, fwhm, [('orig', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (inputnode, invt, [('orig', 'reference_image'),
                                         ('reverse_transforms', 'transforms'),
                                         ('reverse_invert_flags',
                                          'invert_transform_flags')]),
                      (invt, measures, [('output_image', 'mni_tpms')]),
                      (measures, datasink, [('out_qc', 'root')]),
                      (getqi2, datasink, [('qi2', 'qi_2')]),
                      (fwhm, datasink, [(('fwhm', fwhm_dict), 'root0')]),
                      (getqi2, outputnode, [('out_file', 'out_noisefit')]),
                      (datasink, outputnode, [('out_file', 'out_file')])])
    return workflow
Example #3
0
def compute_iqms(settings, name='ComputeIQMs'):
    """
    Workflow that actually computes the IQMs

    .. workflow::

      from mriqc.workflows.functional import compute_iqms
      wf = compute_iqms(settings={'output_dir': 'out'})


    """
    from mriqc.workflows.utils import _tofloat

    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'session_id', 'task_id', 'acq_id', 'rec_id', 'run_id',
        'orig', 'epi_mean', 'brainmask', 'hmc_epi', 'hmc_fd', 'fd_thres',
        'in_tsnr', 'metadata'
    ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
                         name='outputnode')
    #Set FD threshold
    inputnode.inputs.fd_thres = settings.get('fd_thres', 0.2)
    deriv_dir = check_folder(
        op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(save_plot=False, save_all=True),
                     name='ComputeDVARS')
    dvnode.interface.estimated_memory_gb = biggest_file_gb * 3

    # AFNI quality measures
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True,
                                         out_file='ouliers.out'),
                       name='outliers')
    outliers.interface.estimated_memory_gb = biggest_file_gb * 2.5
    quality = pe.Node(afni.QualityIndex(automask=True),
                      out_file='quality.out',
                      name='quality')
    quality.interface.estimated_memory_gb = biggest_file_gb * 3

    measures = pe.Node(FunctionalQC(), name='measures')
    measures.interface.estimated_memory_gb = biggest_file_gb * 3

    workflow.connect([(inputnode, dvnode, [('hmc_epi', 'in_file'),
                                           ('brainmask', 'in_mask')]),
                      (inputnode, measures, [('epi_mean', 'in_epi'),
                                             ('brainmask', 'in_mask'),
                                             ('hmc_epi', 'in_hmc'),
                                             ('hmc_fd', 'in_fd'),
                                             ('fd_thres', 'fd_thres'),
                                             ('in_tsnr', 'in_tsnr')]),
                      (inputnode, fwhm, [('epi_mean', 'in_file'),
                                         ('brainmask', 'mask')]),
                      (inputnode, quality, [('hmc_epi', 'in_file')]),
                      (inputnode, outliers, [('hmc_epi', 'in_file'),
                                             ('brainmask', 'mask')]),
                      (dvnode, measures, [('out_all', 'in_dvars')]),
                      (fwhm, measures, [(('fwhm', _tofloat), 'in_fwhm')]),
                      (dvnode, outputnode, [('out_all', 'out_dvars')]),
                      (outliers, outputnode, [('out_file', 'outliers')])])

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(modality='bold', out_dir=deriv_dir),
                       name='datasink')

    workflow.connect([
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('task_id', 'task_id'), ('acq_id', 'acq_id'),
                               ('rec_id', 'rec_id'), ('run_id', 'run_id'),
                               ('metadata', 'metadata')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])

    if settings.get('fft_spikes_detector', False):
        # FFT spikes finder
        spikes_fft = pe.Node(niu.Function(
            input_names=['in_file'],
            output_names=['n_spikes', 'out_spikes', 'out_fft'],
            function=slice_wise_fft),
                             name='SpikesFinderFFT')

        workflow.connect([
            (inputnode, spikes_fft, [('orig', 'in_file')]),
            (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                      ('out_fft', 'out_fft')]),
            (spikes_fft, datasink, [('n_spikes', 'spikes_num')])
        ])

    return workflow
Example #4
0
def compute_iqms(settings, name='ComputeIQMs'):
    """Workflow that actually computes the IQMs"""
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'session_id', 'task_id', 'run_id', 'orig', 'epi_mean',
        'brainmask', 'hmc_epi', 'hmc_fd', 'in_tsnr', 'metadata']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_dvars', 'outliers', 'out_spikes', 'out_fft']),
                         name='outputnode')

    deriv_dir = check_folder(op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(save_plot=False, save_all=True), name='ComputeDVARS')

    # AFNI quality measures
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True, out_file='ouliers.out'),
                       name='outliers')
    quality = pe.Node(afni.QualityIndex(automask=True), out_file='quality.out',
                      name='quality')

    # FFT spikes finder
    spikes_fft = pe.Node(niu.Function(
        input_names=['in_file'], output_names=['n_spikes', 'out_spikes', 'out_fft'],
        function=slice_wise_fft), name='SpikesFinderFFT')

    measures = pe.Node(FunctionalQC(), name='measures')

    workflow.connect([
        (inputnode, dvnode, [('orig', 'in_file'),
                             ('brainmask', 'in_mask')]),
        (inputnode, measures, [('epi_mean', 'in_epi'),
                               ('brainmask', 'in_mask'),
                               ('hmc_epi', 'in_hmc'),
                               ('hmc_fd', 'in_fd'),
                               ('in_tsnr', 'in_tsnr')]),
        (inputnode, fwhm, [('epi_mean', 'in_file'),
                           ('brainmask', 'mask')]),
        (inputnode, spikes_fft, [('orig', 'in_file')]),
        (inputnode, quality, [('hmc_epi', 'in_file')]),
        (inputnode, outliers, [('hmc_epi', 'in_file'),
                               ('brainmask', 'mask')]),
        (dvnode, measures, [('out_all', 'in_dvars')]),
        (dvnode, outputnode, [('out_all', 'out_dvars')]),
        (outliers, outputnode, [('out_file', 'outliers')]),
        (spikes_fft, outputnode, [('out_spikes', 'out_spikes'),
                                  ('out_fft', 'out_fft')])
    ])

    # Save to JSON file
    datasink = pe.Node(IQMFileSink(
        modality='bold', out_dir=deriv_dir), name='datasink')

    workflow.connect([
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('task_id', 'task_id'),
                               ('run_id', 'run_id'),
                               ('metadata', 'metadata')]),
        (outliers, datasink, [(('out_file', _parse_tout), 'aor')]),
        (quality, datasink, [(('out_file', _parse_tqual), 'aqi')]),
        (measures, datasink, [('out_qc', 'root')]),
        (spikes_fft, datasink, [('n_spikes', 'spikes_num')]),
        (fwhm, datasink, [(('fwhm', fwhm_dict), 'root0')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])
    return workflow
Example #5
0
def main():
    from nipype import config as ncfg

    """Entry point"""
    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument('-v', '--version', action='version',
                        version='mriqc v{}'.format(__version__))

    parser.add_argument('bids_dir', action='store',
                        help='The directory with the input dataset '
                             'formatted according to the BIDS standard.')
    parser.add_argument('output_dir', action='store',
                        help='The directory where the output files '
                             'should be stored. If you are running group level analysis '
                             'this folder should be prepopulated with the results of the'
                             'participant level analysis.')
    parser.add_argument('analysis_level', action='store',
                        help='Level of the analysis that will be performed. '
                             'Multiple participant level analyses can be run independently '
                             '(in parallel) using the same output_dir.',
                        choices=['participant', 'group'])
    parser.add_argument('--participant_label', '--subject_list', '-S', action='store',
                        help='The label(s) of the participant(s) that should be analyzed. '
                             'The label corresponds to sub-<participant_label> from the '
                             'BIDS spec (so it does not include "sub-"). If this parameter '
                             'is not provided all subjects should be analyzed. Multiple '
                             'participants can be specified with a space separated list.',
                        nargs="*")

    g_input = parser.add_argument_group('mriqc specific inputs')
    g_input.add_argument('-d', '--data-type', action='store', nargs='*',
                         choices=['anat', 'func'], default=['anat', 'func'])
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('--nthreads', action='store', default=0,
                         type=int, help='number of threads')
    g_input.add_argument('--write-graph', action='store_true', default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--dry-run', action='store_true', default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin', action='store', default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--testing', action='store_true', default=False,
                         help='use testing settings for a minimal footprint')

    g_outputs = parser.add_argument_group('mriqc specific outputs')
    g_outputs.add_argument('-w', '--work-dir', action='store', default=op.join(os.getcwd(), 'work'))
    g_outputs.add_argument('--report-dir', action='store')
    g_outputs.add_argument('--verbose-reports', default=False, action='store_true')

    # ANTs options
    g_ants = parser.add_argument_group('specific settings for ANTs registrations')
    g_ants.add_argument('--ants-nthreads', action='store', type=int,
                        help='number of threads that will be set in ANTs processes')
    g_ants.add_argument('--ants-settings', action='store',
                        help='path to JSON file with settings for ANTS')

    # AFNI head motion correction settings
    g_afni = parser.add_argument_group('specific settings for AFNI head motion correction')
    g_afni.add_argument('--hmc-afni', action='store_true', default=False,
                        help='Use ANFI 3dvolreg for head motion correction (HMC) and '
                             'frame displacement (FD) estimation')
    g_afni.add_argument('--deoblique', action='store_true', default=False,
                        help='Deoblique the functional scans during head motion '
                             'correction preprocessing')
    g_afni.add_argument('--despike', action='store_true', default=False,
                        help='Despike the functional scans during head motion correction '
                             'preprocessing')
    g_afni.add_argument('--start-idx', action='store', type=int,
                        help='Initial volume in functional timeseries that should be '
                             'considered for preprocessing')
    g_afni.add_argument('--stop-idx', action='store', type=int,
                        help='Final volume in functional timeseries that should be '
                             'considered for preprocessing')
    g_afni.add_argument('--correct-slice-timing', action='store_true', default=False,
                        help='Perform slice timing correction')

    opts = parser.parse_args()

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)
    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'nthreads': opts.nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    if opts.ants_nthreads:
        settings['ants_nthreads'] = opts.ants_nthreads

    log_dir = op.join(settings['output_dir'], 'logs')

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['work_dir'], 'reports')

    with LockFile(op.join(os.getenv('HOME'), '.mriqc-lock')):
        check_folder(settings['output_dir'])
        check_folder(settings['work_dir'])
        check_folder(log_dir)
        check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir}
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}

    MRIQC_LOG.info(
        'Running MRIQC-%s (analysis_level=%s, participant_label=%s)\n\tSettings=%s',
        __version__, opts.analysis_level, opts.participant_label, settings)

    # Set up participant level
    if opts.analysis_level == 'participant':
        for dtype in opts.data_type:
            ms_func = getattr(mwc, 'ms_' + dtype)
            workflow = ms_func(subject_id=opts.participant_label, session_id=opts.session_id,
                               run_id=opts.run_id, settings=settings)
            if workflow is None:
                MRIQC_LOG.warn('No scans were found for the given inputs')
                continue

            workflow.base_dir = settings['work_dir']
            if settings.get('write_graph', False):
                workflow.write_graph()

            if not opts.dry_run:
                workflow.run(**plugin_settings)

    # Set up group level
    elif opts.analysis_level == 'group':
        from mriqc.reports import MRIQCReportPDF

        for dtype in opts.data_type:
            reporter = MRIQCReportPDF(dtype, settings)
            reporter.group_report()
            reporter.individual_report()
Example #6
0
def gen_html(csv_file, qctype, csv_failed=None, out_file=None):
    import os.path as op
    from os import remove
    from shutil import copy
    import datetime
    from pkg_resources import resource_filename as pkgrf
    from mriqc import __version__ as ver
    from mriqc.data import GroupTemplate
    from mriqc.utils.misc import check_folder

    if version_info[0] > 2:
        from io import StringIO as TextIO
    else:
        from io import BytesIO as TextIO

    QCGROUPS = {
        'anat': [
            (['cjv'], None),
            (['cnr'], None),
            (['efc'], None),
            (['fber'], None),
            (['wm2max'], None),
            (['snr_csf', 'snr_gm', 'snr_wm'], None),
            (['snrd_csf', 'snrd_gm', 'snrd_wm'], None),
            (['fwhm_avg', 'fwhm_x', 'fwhm_y', 'fwhm_z'], 'mm'),
            (['qi_1', 'qi_2'], None),
            (['inu_range', 'inu_med'], None),
            (['icvs_csf', 'icvs_gm', 'icvs_wm'], None),
            (['rpve_csf', 'rpve_gm', 'rpve_wm'], None),
            (['tpm_overlap_csf', 'tpm_overlap_gm', 'tpm_overlap_wm'], None),
            (['summary_bg_mean', 'summary_bg_stdv', 'summary_bg_k',
              'summary_bg_p05', 'summary_bg_p95'], None),
            (['summary_csf_mean', 'summary_csf_stdv', 'summary_csf_k',
              'summary_csf_p05', 'summary_csf_p95'], None),
            (['summary_gm_mean', 'summary_gm_stdv', 'summary_gm_k',
              'summary_gm_p05', 'summary_gm_p95'], None),
            (['summary_wm_mean', 'summary_wm_stdv', 'summary_wm_k',
              'summary_wm_p05', 'summary_wm_p95'], None)
        ],
        'func': [
            (['efc'], None),
            (['fber'], None),
            (['fwhm', 'fwhm_x', 'fwhm_y', 'fwhm_z'], 'mm'),
            (['gsr_%s' % a for a in ['x', 'y']], None),
            (['snr'], None),
            (['dvars_std', 'dvars_vstd'], None),
            (['dvars_nstd'], None),
            (['fd_mean'], 'mm'),
            (['fd_num'], '# timepoints'),
            (['fd_perc'], '% timepoints'),
            (['spikes_num'], '# slices'),
            (['gcor'], None),
            (['tsnr'], None),
            (['aor'], None),
            (['aqi'], None),
            (['summary_bg_mean', 'summary_bg_stdv', 'summary_bg_k',
              'summary_bg_p05', 'summary_bg_p95'], None),
            (['summary_fg_mean', 'summary_fg_stdv', 'summary_fg_k',
              'summary_fg_p05', 'summary_fg_p95'], None),
        ]
    }

    def_comps = [key for key, _ in BIDS_COMPONENTS]
    dataframe = pd.read_csv(csv_file, index_col=False,
                            dtype={comp: object for comp in def_comps})

    id_labels = list(set(def_comps) & set(dataframe.columns.ravel().tolist()))
    dataframe['label'] = dataframe[id_labels].apply(_format_labels, axis=1)
    nPart = len(dataframe)

    failed = None
    if csv_failed is not None and op.isfile(csv_failed):
        MRIQC_REPORT_LOG.warn('Found failed-workflows table "%s"', csv_failed)
        failed_df = pd.read_csv(csv_failed, index_col=False)
        cols = list(set(id_labels) & set(failed_df.columns.ravel().tolist()))

        try:
            failed_df = failed_df.sort_values(by=cols)
        except AttributeError:
            #pylint: disable=E1101
            failed_df = failed_df.sort(columns=cols)

        failed = failed_df[cols].apply(myfmt, args=(cols,), axis=1).ravel().tolist()

    csv_groups = []
    for group, units in QCGROUPS[qctype[:4]]:
        dfdict = {'iqm': [], 'value': [], 'label': [], 'units': []}

        for iqm in group:
            if iqm in dataframe.columns.ravel().tolist():
                values = dataframe[[iqm]].values.ravel().tolist()
                dfdict['iqm'] += [iqm] * nPart
                dfdict['units'] += [units] * nPart
                dfdict['value'] += values
                dfdict['label'] += dataframe[['label']].values.ravel().tolist()

        csv_df = pd.DataFrame(dfdict)
        csv_str = TextIO()
        csv_df[['iqm', 'value', 'label', 'units']].to_csv(csv_str, index=False)
        csv_groups.append(csv_str.getvalue())

    if out_file is None:
        out_file = op.abspath('group.html')
    tpl = GroupTemplate()
    tpl.generate_conf({
            'qctype': qctype,
            'timestamp': datetime.datetime.now().strftime("%Y-%m-%d, %H:%M"),
            'version': ver,
            'csv_groups': csv_groups,
            'failed': failed
        }, out_file)

    res_folder = op.join(op.dirname(out_file), 'resources')
    check_folder(res_folder)
    for fname in ['boxplots.css', 'boxplots.js', 'd3.min.js']:
        dstpath = op.join(res_folder, fname)
        if op.isfile(dstpath):
            remove(dstpath)

        copy(pkgrf('mriqc', op.join('data', 'reports', 'resources', fname)), dstpath)
    return out_file
Example #7
0
def main():
    """Entry point"""
    from nipype import config as ncfg
    from nipype.pipeline.engine import Workflow
    from mriqc.utils.bids import collect_bids_data
    from mriqc.workflows.core import build_workflow

    # Run parser
    opts = get_parser().parse_args()

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub or opts.testing,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
    }

    if not settings['no_sub']:
        MRIQC_LOG.warn('Anonymized quality metrics will be submitted'
                       ' to MRIQC\'s metrics repository.'
                       ' Use --no-sub to disable submission.')

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt'},
    })

    callback_log_path = None
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    MRIQC_LOG.info(
        'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
        __version__, ', '.join(analysis_levels), opts.participant_label, settings)

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                MRIQC_LOG.warn('No %s scans were found in %s', mod, settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                if plugin_settings['plugin'] == 'MultiProc' and opts.profile:
                    import logging
                    from nipype.pipeline.plugins.callback_log import log_nodes_cb
                    plugin_settings['plugin_args']['status_callback'] = log_nodes_cb
                    callback_log_path = op.join(log_dir, 'run_stats.log')
                    logger = logging.getLogger('callback')
                    logger.setLevel(logging.DEBUG)
                    handler = logging.FileHandler(callback_log_path)
                    logger.addHandler(handler)

                workflow.run(**plugin_settings)
                if not settings['no_sub']:
                    MRIQC_LOG.warn(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')
                if callback_log_path is not None:
                    from nipype.utils.draw_gantt_chart import generate_gantt_chart
                    generate_gantt_chart(callback_log_path, cores=settings['n_procs'])
        else:
            raise RuntimeError('Error reading BIDS directory (%s), or the dataset is not '
                               'BIDS-compliant.' % settings['bids_dir'])

    # Set up group level
    if 'group' in analysis_levels:
        from mriqc.reports import group_html
        from mriqc.utils.misc import generate_csv, generate_pred

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                MRIQC_LOG.warn(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            MRIQC_LOG.info('Summary CSV table for the %s data generated (%s)', mod, out_csv)

            # out_pred = generate_pred(derivatives_dir, settings['output_dir'], mod)
            # if out_pred is not None:
            #     MRIQC_LOG.info('Predicted QA CSV table for the %s data generated (%s)',
            #                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv, mod,
                       csv_failed=op.join(settings['output_dir'], 'failed_' + mod + '.csv'),
                       out_file=out_html)
            MRIQC_LOG.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception("No data found. No group level reports were generated.")
Example #8
0
def fmri_qc_workflow(name="fMRIQC", settings=None):
    """ The fMRI qc workflow """

    if settings is None:
        settings = {}

    workflow = pe.Workflow(name=name)
    deriv_dir = check_folder(op.abspath(op.join(settings["output_dir"], "derivatives")))

    # Read FD radius, or default it
    fd_radius = settings.get("fd_radius", 50.0)

    # Define workflow, inputs and outputs
    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=["bids_dir", "subject_id", "session_id", "run_id", "site_name", "start_idx", "stop_idx"]
        ),
        name="inputnode",
    )
    get_idx = pe.Node(
        niu.Function(
            input_names=["in_file", "start_idx", "stop_idx"],
            function=fmri_getidx,
            output_names=["start_idx", "stop_idx"],
        ),
        name="get_idx",
    )

    outputnode = pe.Node(
        niu.IdentityInterface(fields=["qc", "mosaic", "out_group", "out_dvars", "out_fd"]), name="outputnode"
    )

    # 0. Get data, put it in RAS orientation
    datasource = pe.Node(
        niu.Function(
            input_names=["bids_dir", "data_type", "subject_id", "session_id", "run_id"],
            output_names=["out_file"],
            function=bids_getfile,
        ),
        name="datasource",
    )
    datasource.inputs.data_type = "func"

    to_ras = pe.Node(
        niu.Function(input_names=["in_file"], output_names=["out_file"], function=reorient), name="EPIReorient"
    )

    # Workflow --------------------------------------------------------

    # 1. HMC: head motion correct
    hmcwf = hmc_mcflirt()

    if settings.get("hmc_afni", False):
        hmcwf = hmc_afni(
            st_correct=settings.get("correct_slice_timing", False),
            despike=settings.get("despike", False),
            deoblique=settings.get("deoblique", False),
        )

    hmcwf.inputs.inputnode.fd_radius = fd_radius

    mean = pe.Node(afni.TStat(options="-mean", outputtype="NIFTI_GZ"), name="mean")  # 2. Compute mean fmri
    bmw = fmri_bmsk_workflow(use_bet=settings.get("use_bet", False))  # 3. Compute brain mask

    # EPI to MNI registration
    ema = epi_mni_align()

    # Compute TSNR using nipype implementation
    tsnr = pe.Node(nac.TSNR(), name="compute_tsnr")

    # Compute DVARS
    dvnode = pe.Node(nac.ComputeDVARS(remove_zerovariance=True, save_plot=False, save_all=True), name="ComputeDVARS")

    # AFNI quality measures
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name="smoothness")
    # fwhm.inputs.acf = True  # add when AFNI >= 16
    outliers = pe.Node(afni.OutlierCount(fraction=True, out_file="ouliers.out"), name="outliers")
    quality = pe.Node(afni.QualityIndex(automask=True), out_file="quality.out", name="quality")

    spmask = pe.Node(
        niu.Function(input_names=["in_file", "in_mask"], output_names=["out_file", "out_plot"], function=spikes_mask),
        name="SpikesMask",
    )
    spikes = pe.Node(Spikes(), name="SpikesFinder")
    spikes_bg = pe.Node(Spikes(no_zscore=True, detrend=False), name="SpikesFinderBgMask")

    bigplot = pe.Node(
        niu.Function(
            input_names=["in_func", "in_mask", "in_segm", "in_spikes", "in_spikes_bg", "fd", "dvars"],
            output_names=["out_file"],
            function=_big_plot,
        ),
        name="BigPlot",
    )

    measures = pe.Node(FunctionalQC(), name="measures")

    # Link images that should be reported
    dsreport = pe.Node(nio.DataSink(base_directory=settings["report_dir"], parameterization=True), name="dsreport")
    dsreport.inputs.container = "func"
    dsreport.inputs.substitutions = [
        ("_data", ""),
        ("tsnr.nii.gz", "mosaic_TSNR.nii.gz"),
        ("mean.nii.gz", "mosaic_TSNR_mean.nii.gz"),
        ("stdev.nii.gz", "mosaic_stdev.nii.gz"),
    ]
    dsreport.inputs.regexp_substitutions = [
        ("_u?(sub-[\\w\\d]*)\\.([\\w\\d_]*)(?:\\.([\\w\\d_-]*))+", "\\1_ses-\\2_\\3"),
        ("sub-[^/.]*_fmriplot", "plot_fmri"),
        ("sub-[^/.]*_mask", "mask"),
        ("sub-[^/.]*_mcf_tstat", "mosaic_epi_mean"),
        ("sub-[^/.]*_spmask", "plot_spikes_mask"),
        ("sub-[^/.]*_volreg_tstat", "mosaic_epi_mean"),
    ]

    workflow.connect(
        [
            (
                inputnode,
                datasource,
                [
                    ("bids_dir", "bids_dir"),
                    ("subject_id", "subject_id"),
                    ("session_id", "session_id"),
                    ("run_id", "run_id"),
                ],
            ),
            (inputnode, get_idx, [("start_idx", "start_idx"), ("stop_idx", "stop_idx")]),
            (datasource, get_idx, [("out_file", "in_file")]),
            (datasource, to_ras, [("out_file", "in_file")]),
            (to_ras, hmcwf, [("out_file", "inputnode.in_file")]),
            (datasource, spikes, [("out_file", "in_file")]),
            (datasource, spikes_bg, [("out_file", "in_file")]),
            (to_ras, dvnode, [("out_file", "in_file")]),
            (get_idx, hmcwf, [("start_idx", "inputnode.start_idx"), ("stop_idx", "inputnode.stop_idx")]),
            (hmcwf, bmw, [("outputnode.out_file", "inputnode.in_file")]),
            (hmcwf, mean, [("outputnode.out_file", "in_file")]),
            (hmcwf, tsnr, [("outputnode.out_file", "in_file")]),
            (hmcwf, spmask, [("outputnode.out_file", "in_file")]),
            (mean, fwhm, [("out_file", "in_file")]),
            (bmw, fwhm, [("outputnode.out_file", "mask")]),
            (bmw, spikes, [("outputnode.out_file", "in_mask")]),
            (spmask, spikes_bg, [("out_file", "in_mask")]),
            (mean, ema, [("out_file", "inputnode.epi_mean")]),
            (bmw, ema, [("outputnode.out_file", "inputnode.epi_mask")]),
            (hmcwf, outliers, [("outputnode.out_file", "in_file")]),
            (bmw, outliers, [("outputnode.out_file", "mask")]),
            (hmcwf, quality, [("outputnode.out_file", "in_file")]),
            (bmw, dvnode, [("outputnode.out_file", "in_mask")]),
            (mean, measures, [("out_file", "in_epi")]),
            (hmcwf, measures, [("outputnode.out_file", "in_hmc")]),
            (bmw, measures, [("outputnode.out_file", "in_mask")]),
            (tsnr, measures, [("tsnr_file", "in_tsnr")]),
            (dvnode, measures, [("out_all", "in_dvars")]),
            (hmcwf, measures, [("outputnode.out_fd", "in_fd")]),
            (to_ras, bigplot, [("out_file", "in_func")]),
            (bmw, bigplot, [("outputnode.out_file", "in_mask")]),
            (hmcwf, bigplot, [("outputnode.out_fd", "fd")]),
            (dvnode, bigplot, [("out_std", "dvars")]),
            (ema, bigplot, [("outputnode.epi_parc", "in_segm")]),
            (spikes, bigplot, [("out_tsz", "in_spikes")]),
            (spikes_bg, bigplot, [("out_tsz", "in_spikes_bg")]),
            (mean, dsreport, [("out_file", "@meanepi")]),
            (tsnr, dsreport, [("tsnr_file", "@tsnr"), ("stddev_file", "@tsnr_std")]),
            (bmw, dsreport, [("outputnode.out_file", "@mask")]),
            (bigplot, dsreport, [("out_file", "@fmriplot")]),
            (hmcwf, outputnode, [("outputnode.out_fd", "out_fd")]),
            (dvnode, outputnode, [("out_all", "out_dvars")]),
        ]
    )

    # Format name
    out_name = pe.Node(
        niu.Function(
            input_names=["subid", "sesid", "runid", "prefix", "out_path"], output_names=["out_file"], function=bids_path
        ),
        name="FormatName",
    )
    out_name.inputs.out_path = deriv_dir
    out_name.inputs.prefix = "func"

    # Save to JSON file
    datasink = pe.Node(nio.JSONFileSink(), name="datasink")
    datasink.inputs.qc_type = "func"

    workflow.connect(
        [
            (inputnode, out_name, [("subject_id", "subid"), ("session_id", "sesid"), ("run_id", "runid")]),
            (inputnode, datasink, [("subject_id", "subject_id"), ("session_id", "session_id"), ("run_id", "run_id")]),
            (fwhm, datasink, [(("fwhm", fwhm_dict), "fwhm")]),
            (outliers, datasink, [(("out_file", _parse_tout), "outlier")]),
            (quality, datasink, [(("out_file", _parse_tqual), "quality")]),
            (
                measures,
                datasink,
                [
                    ("summary", "summary"),
                    ("spacing", "spacing"),
                    ("size", "size"),
                    ("fber", "fber"),
                    ("efc", "efc"),
                    ("snr", "snr"),
                    ("gsr", "gsr"),
                    ("m_tsnr", "m_tsnr"),
                    ("fd", "fd"),
                    ("dvars", "dvars"),
                    ("gcor", "gcor"),
                ],
            ),
            (out_name, datasink, [("out_file", "out_file")]),
            (datasink, outputnode, [("out_file", "out_file")]),
        ]
    )

    return workflow
Example #9
0
def compute_iqms(settings, name='ComputeIQMs'):
    """Workflow that actually computes the IQMs"""
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id', 'session_id', 'run_id', 'orig', 'brainmask', 'airmask', 'artmask',
        'headmask', 'segmentation', 'inu_corrected', 'in_inu', 'pvms', 'metadata',
        'reverse_transforms', 'reverse_invert_flags']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file', 'out_noisefit']),
                         name='outputnode')

    deriv_dir = check_folder(op.abspath(op.join(settings['output_dir'], 'derivatives')))

    # AFNI check smoothing
    fwhm = pe.Node(afni.FWHMx(combine=True, detrend=True), name='smoothness')
    # fwhm.inputs.acf = True  # add when AFNI >= 16

    # Compute python-coded measures
    measures = pe.Node(StructuralQC(testing=settings.get('testing', False)),
                       'measures')

    # Project MNI segmentation to T1 space
    invt = pe.MapNode(ants.ApplyTransforms(
        dimension=3, default_value=0, interpolation='NearestNeighbor'),
        iterfield=['input_image'], name='MNItpms2t1')
    invt.inputs.input_image = [op.join(get_mni_icbm152_nlin_asym_09c(), fname + '.nii.gz')
                               for fname in ['1mm_tpm_csf', '1mm_tpm_gm', '1mm_tpm_wm']]

    # Link images that should be reported
    dsreport = pe.Node(nio.DataSink(
        base_directory=settings['report_dir'], parameterization=True), name='dsreport')
    dsreport.inputs.container = 'anat'
    dsreport.inputs.substitutions = [
        ('_data', ''),
        ('background_fit', 'plot_bgfit')
    ]
    dsreport.inputs.regexp_substitutions = [
        ('_u?(sub-[\\w\\d]*)\\.([\\w\\d_]*)(?:\\.([\\w\\d_-]*))+', '\\1_ses-\\2_\\3'),
        ('anatomical_bgplotsub-[^/.]*_dvars_std', 'plot_dvars'),
        ('sub-[^/.]*_T1w_out_calc_thresh', 'mask'),
    ]

    # Format name
    out_name = pe.Node(niu.Function(
        input_names=['subid', 'sesid', 'runid', 'prefix', 'out_path'], output_names=['out_file'],
        function=bids_path), name='FormatName')
    out_name.inputs.out_path = deriv_dir
    out_name.inputs.prefix = 'anat'

    # Save to JSON file
    jfs_if = nio.JSONFileSink()
    setattr(jfs_if, '_always_run', settings.get('force_run', False))
    datasink = pe.Node(jfs_if, name='datasink')
    datasink.inputs.qc_type = 'anat'

    workflow.connect([
        (inputnode, out_name, [('subject_id', 'subid'),
                               ('session_id', 'sesid'),
                               ('run_id', 'runid')]),
        (inputnode, datasink, [('subject_id', 'subject_id'),
                               ('session_id', 'session_id'),
                               ('run_id', 'run_id'),
                               ('metadata', 'metadata')]),
        (inputnode, measures, [('inu_corrected', 'in_noinu'),
                               ('in_inu', 'in_bias'),
                               ('orig', 'in_file'),
                               ('airmask', 'air_msk'),
                               ('headmask', 'head_msk'),
                               ('artmask', 'artifact_msk'),
                               ('segmentation', 'in_segm'),
                               ('pvms', 'in_pvms')]),
        (inputnode, fwhm, [('orig', 'in_file'),
                           ('brainmask', 'mask')]),
        (inputnode, invt, [('orig', 'reference_image'),
                           ('reverse_transforms', 'transforms'),
                           ('reverse_invert_flags', 'invert_transform_flags')]),
        (invt, measures, [('output_image', 'mni_tpms')]),
        (fwhm, datasink, [(('fwhm', fwhm_dict), 'fwhm')]),
        (measures, datasink, [('summary', 'summary'),
                              ('spacing', 'spacing'),
                              ('size', 'size'),
                              ('icvs', 'icvs'),
                              ('rpve', 'rpve'),
                              ('inu', 'inu'),
                              ('snr', 'snr'),
                              ('cnr', 'cnr'),
                              ('fber', 'fber'),
                              ('efc', 'efc'),
                              ('qi1', 'qi1'),
                              ('qi2', 'qi2'),
                              ('cjv', 'cjv'),
                              ('wm2max', 'wm2max'),
                              ('tpm_overlap', 'tpm_overlap')]),
        (out_name, datasink, [('out_file', 'out_file')]),
        (measures, outputnode, [('out_noisefit', 'out_noisefit')]),
        (datasink, outputnode, [('out_file', 'out_file')])
    ])
    return workflow