Esempio n. 1
0
 def get_final_cost(in_file):
     from niworkflows.nipype import logging
     with open(in_file, 'r') as fobj:
         for line in fobj:
             if line.startswith(' >> print U:1'):
                 costs = next(fobj).split()
                 return float(costs[0])
     logger = logging.getLogger('interface')
     logger.error('No cost report found in log file. Please report this '
                  'issue, with contents of {}'.format(in_file))
Esempio n. 2
0
def _new_version():
    from niworkflows.nipype import logging
    iflogger = logging.getLogger('interface')
    level = iflogger.getEffectiveLevel()
    iflogger.setLevel('ERROR')
    v = _old_version()
    iflogger.setLevel(level)
    if v is None:
        iflogger.warn('afni_vcheck executable not found')
    return v
Esempio n. 3
0
 def get_final_cost(in_file):
     from niworkflows.nipype import logging
     with open(in_file, 'r') as fobj:
         for line in fobj:
             if line.startswith('>> print U:1'):
                 costs = next(fobj).split()
                 return float(costs[0])
     logger = logging.getLogger('interface')
     logger.error('No cost report found in log file. Please report this '
                  'issue, with contents of {}'.format(in_file))
Esempio n. 4
0
# BOLD workflows
from .confounds import init_bold_confs_wf, init_carpetplot_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .t2s import init_bold_t2s_wf
from .registration import init_bold_reg_wf
from .resampling import (
    init_bold_surf_wf,
    init_bold_mni_trans_wf,
    init_bold_preproc_trans_wf,
)
from .util import init_bold_reference_wf

DEFAULT_MEMORY_MIN_GB = 0.01
LOGGER = logging.getLogger('workflow')


def init_func_preproc_wf(bold_file, ignore, freesurfer,
                         use_bbr, t2s_coreg, bold2t1w_dof, reportlets_dir,
                         output_spaces, template, output_dir, omp_nthreads,
                         fmap_bspline, fmap_demean, use_syn, force_syn,
                         use_aroma, ignore_aroma_err, aroma_melodic_dim,
                         medial_surface_nan, cifti_output,
                         debug, low_mem, template_out_grid, layout=None):
    """
    This workflow controls the functional preprocessing stages of FMRIPREP.

    .. workflow::
        :graph2use: orig
        :simple_form: yes
Esempio n. 5
0
from os import path as op
import numpy as np
import nibabel as nb
from nilearn.signal import clean
from builtins import zip

from niworkflows.nipype.interfaces.base import (
    traits, TraitedSpec, File, isdefined, BaseInterfaceInputSpec,
    SimpleInterface
)
from niworkflows.nipype import logging

from ..utils.misc import _flatten_dict
from ..qc.anatomical import snr, fber, efc, summary_stats
from ..qc.functional import gsr
IFLOGGER = logging.getLogger('interface')


class FunctionalQCInputSpec(BaseInterfaceInputSpec):
    in_epi = File(exists=True, mandatory=True, desc='input EPI file')
    in_hmc = File(exists=True, mandatory=True, desc='input motion corrected file')
    in_tsnr = File(exists=True, mandatory=True, desc='input tSNR volume')
    in_mask = File(exists=True, mandatory=True, desc='input mask')
    direction = traits.Enum('all', 'x', 'y', '-x', '-y', usedefault=True,
                            desc='direction for GSR computation')
    in_fd = File(exists=True, mandatory=True, desc='motion parameters for FD computation')
    fd_thres = traits.Float(0.2, usedefault=True, desc='motion threshold for FD computation')
    in_dvars = File(exists=True, mandatory=True, desc='input file containing DVARS')
    in_fwhm = traits.List(traits.Float, mandatory=True,
                          desc='smoothness estimated with AFNI')
Esempio n. 6
0
from os import path as op
import numpy as np
import nibabel as nb
from nilearn.signal import clean
from builtins import zip

from niworkflows.nipype.interfaces.base import (
    traits, TraitedSpec, File, isdefined, BaseInterfaceInputSpec)
from niworkflows.nipype import logging

from niworkflows.interfaces.base import SimpleInterface

from ..utils.misc import _flatten_dict
from ..qc.anatomical import snr, fber, efc, summary_stats
from ..qc.functional import gsr
IFLOGGER = logging.getLogger('interface')


class FunctionalQCInputSpec(BaseInterfaceInputSpec):
    in_epi = File(exists=True, mandatory=True, desc='input EPI file')
    in_hmc = File(exists=True, mandatory=True, desc='input motion corrected file')
    in_tsnr = File(exists=True, mandatory=True, desc='input tSNR volume')
    in_mask = File(exists=True, mandatory=True, desc='input mask')
    direction = traits.Enum('all', 'x', 'y', '-x', '-y', usedefault=True,
                            desc='direction for GSR computation')
    in_fd = File(exists=True, mandatory=True, desc='motion parameters for FD computation')
    fd_thres = traits.Float(0.2, usedefault=True, desc='motion threshold for FD computation')
    in_dvars = File(exists=True, mandatory=True, desc='input file containing DVARS')
    in_fwhm = traits.List(traits.Float, mandatory=True,
                          desc='smoothness estimated with AFNI')
Esempio n. 7
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))
    print(log_level)

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts.stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' %
                           ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt'
        },
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('filemanip').setLevel(log_level)

    callback_log_path = None
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label,
            settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod,
                            settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod,
                                          settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                if plugin_settings['plugin'] == 'MultiProc' and opts.profile:
                    import logging
                    from niworkflows.nipype.pipeline.plugins.callback_log import log_nodes_cb
                    plugin_settings['plugin_args'][
                        'status_callback'] = log_nodes_cb
                    callback_log_path = op.join(log_dir, 'run_stats.log')
                    logger = logging.getLogger('callback')
                    logger.setLevel(logging.DEBUG)
                    handler = logging.FileHandler(callback_log_path)
                    logger.addHandler(handler)

                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning('Anonymized quality metrics will be submitted'
                                ' to MRIQC\'s metrics repository.'
                                ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                if callback_log_path is not None:
                    from niworkflows.nipype.utils.draw_gantt_chart import generate_gantt_chart
                    generate_gantt_chart(callback_log_path,
                                         cores=settings['n_procs'])
        else:
            msg = """\
Error reading BIDS directory ({}), or the dataset is not \
BIDS-compliant."""
            if opts.participant_label is not None:
                msg = """\
None of the supplied labels (--participant_label) matched with the \
participants found in the BIDS directory ({})."""
            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')

    # Set up group level
    if 'group' in analysis_levels:
        from ..reports import group_html
        from ..utils.misc import generate_csv  # , generate_pred

        log.info('Group level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label,
            settings)

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            log.info('Summary CSV table for the %s data generated (%s)', mod,
                     out_csv)

            # out_pred = generate_pred(derivatives_dir, settings['output_dir'], mod)
            # if out_pred is not None:
            #     log.info('Predicted QA CSV table for the %s data generated (%s)',
            #                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv,
                       mod,
                       csv_failed=op.join(settings['output_dir'],
                                          'failed_' + mod + '.csv'),
                       out_file=out_html)
            log.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception(
                "No data found. No group level reports were generated.")

        log.info('Group level finished successfully.')
Esempio n. 8
0
def main():
    """Entry point"""
    from niworkflows.nipype import logging as nlogging
    from multiprocessing import set_start_method, Process, Manager
    from ..viz.reports import generate_reports
    set_start_method('forkserver')

    warnings.showwarning = _warn_redirect
    opts = get_parser().parse_args()

    # FreeSurfer license
    default_license = op.join(os.getenv('FREESURFER_HOME', ''), 'license.txt')
    # Precedence: --fs-license-file, $FS_LICENSE, default_license
    license_file = opts.fs_license_file or os.getenv('FS_LICENSE',
                                                     default_license)
    if not os.path.exists(license_file):
        raise RuntimeError(
            'ERROR: a valid license file is required for FreeSurfer to run. '
            'FMRIPREP looked for an existing license file at several paths, in this '
            'order: 1) command line argument ``--fs-license-file``; 2) ``$FS_LICENSE`` '
            'environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. '
            'Get it (for free) by registering at https://'
            'surfer.nmr.mgh.harvard.edu/registration.html')
    os.environ['FS_LICENSE'] = license_file

    # Retrieve logging level
    log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG))
    if opts.debug:
        log_level = logging.DEBUG

    # Set logging
    logger.setLevel(log_level)
    nlogging.getLogger('workflow').setLevel(log_level)
    nlogging.getLogger('interface').setLevel(log_level)
    nlogging.getLogger('utils').setLevel(log_level)

    errno = 0

    # Call build_workflow(opts, retval)
    with Manager() as mgr:
        retval = mgr.dict()
        p = Process(target=build_workflow, args=(opts, retval))
        p.start()
        p.join()

        fmriprep_wf = retval['workflow']
        plugin_settings = retval['plugin_settings']
        output_dir = retval['output_dir']
        work_dir = retval['work_dir']
        subject_list = retval['subject_list']
        run_uuid = retval['run_uuid']
        retcode = retval['return_code']

    if fmriprep_wf is None:
        sys.exit(1)

    if opts.write_graph:
        fmriprep_wf.write_graph(graph2use="colored",
                                format='svg',
                                simple_form=True)

    if opts.reports_only:
        sys.exit(int(retcode > 0))

    # Clean up master process before running workflow, which may create forks
    gc.collect()
    try:
        fmriprep_wf.run(**plugin_settings)
    except RuntimeError as e:
        if "Workflow did not execute cleanly" in str(e):
            errno = 1
        else:
            raise

    # Generate reports phase
    errno += generate_reports(subject_list, output_dir, work_dir, run_uuid)
    sys.exit(int(errno > 0))
Esempio n. 9
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'ants_float': opts.ants_float,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

#    check_folder(log_dir)
#    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
#        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {#'crashdump_dir': log_dir, 
                      'crashfile_format': 'txt',
                      'resource_monitor': opts.profile},
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('utils').setLevel(log_level)

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod, settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics will be submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')
        else:
            msg = 'Error reading BIDS directory ({}), or the dataset is not ' \
                  'BIDS-compliant.'

            if opts.participant_label or opts.session_id or opts.run_id or opts.task_id:

                msg = 'The combination of supplied labels'

                if opts.participant_label is not None:
                    msg += ' (--participant_label {})'.format(" ".join(opts.participant_label))
                if opts.session_id is not None:
                    msg += ' (--session-id {})'.format(" ".join(opts.session_id))
                if opts.run_id is not None:
                    msg += ' (--run-id {})'.format(" ".join(opts.run_id))
                if opts.task_id is not None:
                    msg += ' (--task-id {})'.format(" ".join(opts.task_id))

                msg += ' did not result in matches within the BIDS directory ({}).'

            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')


        for mod in modalities:
            dataframe, order, jsonfiles = generate_csv(settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, settings['output_dir'])
                continue

            if 'jsonfile' in order: order.remove('jsonfile')

            base_name = 'mclf_run-20170724-191452_mod-rfc_ver-0.9.7-rc8_class-2_cv-loso'
            load_classifier = pkgrf(
                'mriqc',
                'data/mclf_run-20170724-191452_mod-rfc_ver-0.9.7-rc8_class-2_cv-'
                'loso_data-all_estimator.pklz')

            cvhelper = CVHelper(load_clf=load_classifier, n_jobs=-1,
                                rate_label=['rater_1'], basename=base_name)
            
            prediction = cvhelper.predict(dataframe[order])
            dataframe['y_prob'] = prediction[0][:,1]
            dataframe['y_pred'] = prediction[1]

            dataframe.index = jsonfiles

            for jsonfile in dataframe.index.values:
                with open(jsonfile, 'r+') as json_file:
                    json_dict = json.load(json_file, object_pairs_hook=OrderedDict)
                    json_dict['y_prob'] = float(dataframe.get_value(jsonfile, 'y_prob'))
                    json_dict['y_pred'] = float(dataframe.get_value(jsonfile, 'y_pred'))
                    json_file.seek(0)
                    json.dump(json_dict, json_file, separators=(',', ':'), sort_keys=True, indent=4)
                    json_file.truncate()
Esempio n. 10
0
def get_ica_confounds(ica_out_dir, ignore_aroma_err):
    import os
    import shutil
    import numpy as np
    from niworkflows.nipype import logging

    # To catch edge cases when there are either no noise or signal components
    LOGGER = logging.getLogger('workflow')

    # Pass in numpy array and column base name to generate headers
    # modified from add_header_func
    def aroma_add_header_func(np_arr, col_base, comp_nums):
        import pandas as pd
        from sys import version_info
        PY3 = version_info[0] > 2

        df = pd.DataFrame(
            np_arr,
            columns=[str(col_base) + str(index) for index in comp_nums])
        df.to_csv(str(col_base) + "AROMAConfounds.tsv",
                  sep="\t" if PY3 else '\t'.encode(),
                  index=None)

        return os.path.abspath(str(col_base) + "AROMAConfounds.tsv")

    # load the txt files from ICA_AROMA
    melodic_mix = os.path.join(ica_out_dir, 'melodic.ica/melodic_mix')
    motion_ics = os.path.join(ica_out_dir, 'classified_motion_ICs.txt')

    # Change names of motion_ics and melodic_mix for output
    melodic_mix_out = os.path.join(ica_out_dir, 'MELODICmix.tsv')
    motion_ics_out = os.path.join(ica_out_dir, 'AROMAnoiseICs.csv')

    # melodic_mix replace spaces with tabs
    with open(melodic_mix, 'r') as melodic_file:
        melodic_mix_out_char = melodic_file.read().replace('  ', '\t')
    # write to output file
    with open(melodic_mix_out, 'w+') as melodic_file_out:
        melodic_file_out.write(melodic_mix_out_char)

    # copy metion_ics file to derivatives name
    shutil.copyfile(motion_ics, motion_ics_out)

    # -1 since python lists start at index 0
    motion_ic_indices = np.loadtxt(motion_ics, dtype=int, delimiter=',') - 1
    melodic_mix_arr = np.loadtxt(melodic_mix, ndmin=2)

    # Return dummy list of ones if no noise compnents were found
    if motion_ic_indices.size == 0:
        if ignore_aroma_err:
            LOGGER.warn('WARNING: No noise components were classified')
            aroma_confounds = None
            return aroma_confounds, motion_ics_out, melodic_mix_out
        else:
            raise RuntimeError('ERROR: ICA-AROMA found no noise components!')

    # transpose melodic_mix_arr so x refers to the correct dimension
    aggr_confounds = np.asarray(
        [melodic_mix_arr.T[x] for x in motion_ic_indices])

    # the "good" ics, (e.g. not motion related)
    good_ic_arr = np.delete(melodic_mix_arr, motion_ic_indices, 1).T

    # return dummy lists of zeros if no signal components were found
    if good_ic_arr.size == 0:
        if ignore_aroma_err:
            LOGGER.warn('WARNING: No signal components were classified')
            aroma_confounds = None
            return aroma_confounds, motion_ics_out, melodic_mix_out
        else:
            raise RuntimeError('ERROR: ICA-AROMA found no signal components!')

    # add one to motion_ic_indices to match melodic report.
    aggr_tsv = aroma_add_header_func(
        aggr_confounds.T, 'AROMAAggrComp',
        [str(x).zfill(2) for x in motion_ic_indices + 1])
    aroma_confounds = aggr_tsv

    return aroma_confounds, motion_ics_out, melodic_mix_out
Esempio n. 11
0
def get_ica_confounds(ica_out_dir, ignore_aroma_err):
    import os
    import shutil
    import numpy as np
    from niworkflows.nipype import logging

    # To catch edge cases when there are either no noise or signal components
    LOGGER = logging.getLogger('workflow')

    # Pass in numpy array and column base name to generate headers
    # modified from add_header_func
    def aroma_add_header_func(np_arr, col_base, comp_nums):
        import pandas as pd
        from sys import version_info
        PY3 = version_info[0] > 2

        df = pd.DataFrame(np_arr, columns=[str(col_base) + str(index) for index in comp_nums])
        df.to_csv(str(col_base) + "AROMAConfounds.tsv",
                  sep="\t" if PY3 else '\t'.encode(), index=None)

        return os.path.abspath(str(col_base) + "AROMAConfounds.tsv")

    # load the txt files from ICA_AROMA
    melodic_mix = os.path.join(ica_out_dir, 'melodic.ica/melodic_mix')
    motion_ics = os.path.join(ica_out_dir, 'classified_motion_ICs.txt')

    # Change names of motion_ics and melodic_mix for output
    melodic_mix_out = os.path.join(ica_out_dir, 'MELODICmix.tsv')
    motion_ics_out = os.path.join(ica_out_dir, 'AROMAnoiseICs.csv')

    # melodic_mix replace spaces with tabs
    with open(melodic_mix, 'r') as melodic_file:
        melodic_mix_out_char = melodic_file.read().replace('  ', '\t')
    # write to output file
    with open(melodic_mix_out, 'w+') as melodic_file_out:
        melodic_file_out.write(melodic_mix_out_char)

    # copy metion_ics file to derivatives name
    shutil.copyfile(motion_ics, motion_ics_out)

    # -1 since python lists start at index 0
    motion_ic_indices = np.loadtxt(motion_ics, dtype=int, delimiter=',') - 1
    melodic_mix_arr = np.loadtxt(melodic_mix, ndmin=2)

    # Return dummy list of ones if no noise compnents were found
    if motion_ic_indices.size == 0:
        if ignore_aroma_err:
            LOGGER.warn('WARNING: No noise components were classified')
            aroma_confounds = None
            return aroma_confounds, motion_ics_out, melodic_mix_out
        else:
            raise RuntimeError('ERROR: ICA-AROMA found no noise components!')

    # transpose melodic_mix_arr so x refers to the correct dimension
    aggr_confounds = np.asarray([melodic_mix_arr.T[x] for x in motion_ic_indices])

    # the "good" ics, (e.g. not motion related)
    good_ic_arr = np.delete(melodic_mix_arr, motion_ic_indices, 1).T

    # return dummy lists of zeros if no signal components were found
    if good_ic_arr.size == 0:
        if ignore_aroma_err:
            LOGGER.warn('WARNING: No signal components were classified')
            aroma_confounds = None
            return aroma_confounds, motion_ics_out, melodic_mix_out
        else:
            raise RuntimeError('ERROR: ICA-AROMA found no signal components!')

    # add one to motion_ic_indices to match melodic report.
    aggr_tsv = aroma_add_header_func(aggr_confounds.T, 'AROMAAggrComp',
                                     [str(x).zfill(2) for x in motion_ic_indices + 1])
    aroma_confounds = aggr_tsv

    return aroma_confounds, motion_ics_out, melodic_mix_out
Esempio n. 12
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))
    print(log_level)

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url' : opts.webapi_url,
        'webapi_port' : opts.webapi_port,
        'upload_strict' : opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt'},
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('filemanip').setLevel(log_level)

    callback_log_path = None
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod, settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                if plugin_settings['plugin'] == 'MultiProc' and opts.profile:
                    import logging
                    from niworkflows.nipype.pipeline.plugins.callback_log import log_nodes_cb
                    plugin_settings['plugin_args']['status_callback'] = log_nodes_cb
                    callback_log_path = op.join(log_dir, 'run_stats.log')
                    logger = logging.getLogger('callback')
                    logger.setLevel(logging.DEBUG)
                    handler = logging.FileHandler(callback_log_path)
                    logger.addHandler(handler)

                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics will be submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                if callback_log_path is not None:
                    from niworkflows.nipype.utils.draw_gantt_chart import generate_gantt_chart
                    generate_gantt_chart(callback_log_path, cores=settings['n_procs'])
        else:
            msg = """\
Error reading BIDS directory ({}), or the dataset is not \
BIDS-compliant."""
            if opts.participant_label is not None:
                msg = """\
None of the supplied labels (--participant_label) matched with the \
participants found in the BIDS directory ({})."""
            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')

    # Set up group level
    if 'group' in analysis_levels:
        from ..reports import group_html
        from ..utils.misc import generate_csv  # , generate_pred

        log.info('Group level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            log.info('Summary CSV table for the %s data generated (%s)', mod, out_csv)

            # out_pred = generate_pred(derivatives_dir, settings['output_dir'], mod)
            # if out_pred is not None:
            #     log.info('Predicted QA CSV table for the %s data generated (%s)',
            #                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv, mod,
                       csv_failed=op.join(settings['output_dir'], 'failed_' + mod + '.csv'),
                       out_file=out_html)
            log.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception("No data found. No group level reports were generated.")

        log.info('Group level finished successfully.')
Esempio n. 13
0
def main():
    """Entry point"""
    from niworkflows.nipype import logging as nlogging
    from multiprocessing import set_start_method, Process, Manager
    from ..viz.reports import generate_reports
    from ..info import __version__
    set_start_method('forkserver')

    warnings.showwarning = _warn_redirect
    opts = get_parser().parse_args()

    # FreeSurfer license
    default_license = op.join(os.getenv('FREESURFER_HOME', ''), 'license.txt')
    # Precedence: --fs-license-file, $FS_LICENSE, default_license
    license_file = opts.fs_license_file or os.getenv('FS_LICENSE',
                                                     default_license)
    if not os.path.exists(license_file):
        raise RuntimeError(
            'ERROR: a valid license file is required for FreeSurfer to run. '
            'FMRIPREP looked for an existing license file at several paths, in this '
            'order: 1) command line argument ``--fs-license-file``; 2) ``$FS_LICENSE`` '
            'environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. '
            'Get it (for free) by registering at https://'
            'surfer.nmr.mgh.harvard.edu/registration.html')
    os.environ['FS_LICENSE'] = license_file

    # Retrieve logging level
    log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG))
    # Set logging
    logger.setLevel(log_level)
    nlogging.getLogger('workflow').setLevel(log_level)
    nlogging.getLogger('interface').setLevel(log_level)
    nlogging.getLogger('utils').setLevel(log_level)

    errno = 0

    # Call build_workflow(opts, retval)
    with Manager() as mgr:
        retval = mgr.dict()
        p = Process(target=build_workflow, args=(opts, retval))
        p.start()
        p.join()

        if p.exitcode != 0:
            sys.exit(p.exitcode)

        fmriprep_wf = retval['workflow']
        plugin_settings = retval['plugin_settings']
        output_dir = retval['output_dir']
        work_dir = retval['work_dir']
        subject_list = retval['subject_list']
        run_uuid = retval['run_uuid']
        retcode = retval['return_code']

    if fmriprep_wf is None:
        sys.exit(1)

    if opts.write_graph:
        fmriprep_wf.write_graph(graph2use="colored",
                                format='svg',
                                simple_form=True)

    if opts.reports_only:
        sys.exit(int(retcode > 0))

    # Sentry tracking
    if not opts.notrack:
        try:
            from raven import Client
            dev_user = bool(int(os.getenv('FMRIPREP_DEV', 0)))
            msg = 'fMRIPrep running%s' % (int(dev_user) * ' [dev]')
            client = Client(
                'https://*****:*****@sentry.io/1137693',
                release=__version__)
            client.captureMessage(message=msg,
                                  level='debug' if dev_user else 'info',
                                  tags={
                                      'run_id': run_uuid,
                                      'npart': len(subject_list),
                                      'type': 'ping',
                                      'dev': dev_user
                                  })
        except Exception:
            pass

    # Clean up master process before running workflow, which may create forks
    gc.collect()
    try:
        fmriprep_wf.run(**plugin_settings)
    except RuntimeError as e:
        if "Workflow did not execute cleanly" in str(e):
            errno = 1
        else:
            raise

    # Generate reports phase
    errno += generate_reports(subject_list, output_dir, work_dir, run_uuid)
    sys.exit(int(errno > 0))