Ejemplo n.º 1
0
def parse_args(args=None, namespace=None):
    """Parse args and run further checks on the command line."""
    import logging
    from niworkflows.utils.spaces import Reference, SpatialReferences
    parser = _build_parser()
    opts = parser.parse_args(args, namespace)
    config.execution.log_level = int(
        max(25 - 5 * opts.verbose_count, logging.DEBUG))
    config.from_dict(vars(opts))
    config.loggers.init()

    # Initialize --output-spaces if not defined
    if config.execution.output_spaces is None:
        config.execution.output_spaces = SpatialReferences(
            [Reference("MNI152NLin2009cAsym", {"res": "native"})])

    # Retrieve logging level
    build_log = config.loggers.cli

    if config.execution.fs_license_file is None:
        raise RuntimeError("""\
ERROR: a valid license file is required for FreeSurfer to run. fMRIPrep looked for an existing \
license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \
2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \
(for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html"""
                           )
    os.environ['FS_LICENSE'] = str(config.execution.fs_license_file)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        _plugin = plugin_settings.get('plugin')
        if _plugin:
            config.nipype.plugin = _plugin
            config.nipype.plugin_args = plugin_settings.get('plugin_args', {})
            config.nipype.nprocs = config.nipype.plugin_args.get(
                'nprocs', config.nipype.nprocs)

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    if 1 < config.nipype.nprocs < config.nipype.omp_nthreads:
        build_log.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', config.nipype.omp_nthread,
            config.nipype.nprocs)

    bids_dir = config.execution.bids_dir
    output_dir = config.execution.output_dir
    work_dir = config.execution.work_dir
    version = config.environment.version

    if config.execution.fs_subjects_dir is None:
        config.execution.fs_subjects_dir = output_dir / 'freesurfer'

    # Wipe out existing work_dir
    if opts.clean_workdir and work_dir.exists():
        from niworkflows.utils.misc import clean_directory
        build_log.log("Clearing previous fMRIPrep working directory: %s",
                      work_dir)
        if not clean_directory(work_dir):
            build_log.warning(
                "Could not clear all contents of working directory: %s",
                work_dir)

    # Ensure input and output folders are not the same
    if output_dir == bids_dir:
        parser.error(
            'The selected output folder is the same as the input BIDS folder. '
            'Please modify the output path (suggestion: %s).' % bids_dir /
            'derivatives' / ('fmriprep-%s' % version.split('+')[0]))

    if bids_dir in work_dir.parents:
        parser.error(
            'The selected working directory is a subdirectory of the input BIDS folder. '
            'Please modify the output path.')

    # Validate inputs
    if not opts.skip_bids_validation:
        from ..utils.bids import validate_input_dir
        build_log.info(
            "Making sure the input data is BIDS compliant (warnings can be ignored in most "
            "cases).")
        validate_input_dir(config.environment.exec_env, opts.bids_dir,
                           opts.participant_label)

    # Setup directories
    config.execution.log_dir = output_dir / 'fmriprep' / 'logs'
    # Check and create output and working directories
    config.execution.log_dir.mkdir(exist_ok=True, parents=True)
    output_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Force initialization of the BIDSLayout
    config.execution.init()
    all_subjects = config.execution.layout.get_subjects()
    if config.execution.participant_label is None:
        config.execution.participant_label = all_subjects

    participant_label = set(config.execution.participant_label)
    missing_subjects = participant_label - set(all_subjects)
    if missing_subjects:
        parser.error(
            "One or more participant labels were not found in the BIDS directory: "
            "%s." % ", ".join(missing_subjects))

    config.execution.participant_label = sorted(participant_label)
    config.workflow.skull_strip_template = config.workflow.skull_strip_template[
        0]
Ejemplo n.º 2
0
def main():
    """Entry point"""
    from nipype import config as ncfg
    from nipype.pipeline.engine import Workflow
    from mriqc import DEFAULTS
    from mriqc.utils.bids import collect_bids_data
    from mriqc.workflows.core import build_workflow
    # from mriqc.reports.utils import check_reports

    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='mriqc v{}'.format(__version__))

    parser.add_argument('bids_dir',
                        action='store',
                        help='The directory with the input dataset '
                        'formatted according to the BIDS standard.')
    parser.add_argument(
        'output_dir',
        action='store',
        help='The directory where the output files '
        'should be stored. If you are running group level analysis '
        'this folder should be prepopulated with the results of the'
        'participant level analysis.')
    parser.add_argument(
        'analysis_level',
        action='store',
        nargs='+',
        help='Level of the analysis that will be performed. '
        'Multiple participant level analyses can be run independently '
        '(in parallel) using the same output_dir.',
        choices=['participant', 'group'])
    parser.add_argument(
        '--participant_label',
        '--subject_list',
        '-S',
        action='store',
        help='The label(s) of the participant(s) that should be analyzed. '
        'The label corresponds to sub-<participant_label> from the '
        'BIDS spec (so it does not include "sub-"). If this parameter '
        'is not provided all subjects should be analyzed. Multiple '
        'participants can be specified with a space separated list.',
        nargs="*")

    g_input = parser.add_argument_group('mriqc specific inputs')
    g_input.add_argument('-m',
                         '--modalities',
                         action='store',
                         nargs='*',
                         choices=['T1w', 'bold', 'T2w'],
                         default=['T1w', 'bold', 'T2w'])
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('--nthreads',
                         action='store',
                         type=int,
                         help='number of threads')
    g_input.add_argument('--n_procs',
                         action='store',
                         default=0,
                         type=int,
                         help='number of threads')
    g_input.add_argument('--mem_gb',
                         action='store',
                         default=0,
                         type=int,
                         help='available total memory')
    g_input.add_argument('--write-graph',
                         action='store_true',
                         default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--dry-run',
                         action='store_true',
                         default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin',
                         action='store',
                         default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--testing',
                         action='store_true',
                         default=False,
                         help='use testing settings for a minimal footprint')
    g_input.add_argument(
        '--hmc-afni',
        action='store_true',
        default=True,
        help='Use ANFI 3dvolreg for head motion correction (HMC)')
    g_input.add_argument(
        '--hmc-fsl',
        action='store_true',
        default=False,
        help='Use FSL MCFLIRT for head motion correction (HMC)')
    g_input.add_argument(
        '-f',
        '--float32',
        action='store_true',
        default=DEFAULTS['float32'],
        help=
        "Cast the input data to float32 if it's represented in higher precision "
        "(saves space and improves perfomance)")
    g_input.add_argument('--fft-spikes-detector',
                         action='store_true',
                         default=False,
                         help='Turn on FFT based spike detector (slow).')

    g_outputs = parser.add_argument_group('mriqc specific outputs')
    g_outputs.add_argument('-w',
                           '--work-dir',
                           action='store',
                           default=op.join(os.getcwd(), 'work'))
    g_outputs.add_argument('--report-dir', action='store')
    g_outputs.add_argument('--verbose-reports',
                           default=False,
                           action='store_true')

    # ANTs options
    g_ants = parser.add_argument_group(
        'specific settings for ANTs registrations')
    g_ants.add_argument(
        '--ants-nthreads',
        action='store',
        type=int,
        default=DEFAULTS['ants_nthreads'],
        help='number of threads that will be set in ANTs processes')
    g_ants.add_argument('--ants-settings',
                        action='store',
                        help='path to JSON file with settings for ANTS')

    # AFNI head motion correction settings
    g_afni = parser.add_argument_group(
        'specific settings for AFNI head motion correction')
    g_afni.add_argument(
        '--deoblique',
        action='store_true',
        default=False,
        help='Deoblique the functional scans during head motion '
        'correction preprocessing')
    g_afni.add_argument(
        '--despike',
        action='store_true',
        default=False,
        help='Despike the functional scans during head motion correction '
        'preprocessing')
    g_afni.add_argument(
        '--start-idx',
        action='store',
        type=int,
        help='Initial volume in functional timeseries that should be '
        'considered for preprocessing')
    g_afni.add_argument(
        '--stop-idx',
        action='store',
        type=int,
        help='Final volume in functional timeseries that should be '
        'considered for preprocessing')
    g_afni.add_argument('--correct-slice-timing',
                        action='store_true',
                        default=False,
                        help='Perform slice timing correction')

    opts = parser.parse_args()

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = 0
    if opts.nthreads is not None:
        MRIQC_LOG.warn('Option --nthreads has been deprecated in mriqc 0.8.8. '
                       'Please use --n_procs instead.')
        n_procs = opts.nthreads
    if opts.n_procs is not None:
        n_procs = opts.n_procs

    # Check physical memory
    total_memory = opts.mem_gb
    if total_memory < 0:
        try:
            from psutil import virtual_memory
            total_memory = virtual_memory().total // (1024**3) + 1
        except ImportError:
            MRIQC_LOG.warn(
                'Total physical memory could not be estimated, using %d'
                'GB as default', DEFAULT_MEM_GB)
            total_memory = DEFAULT_MEM_GB

    if total_memory > 0:
        av_procs = total_memory // 4
        if av_procs < 1:
            MRIQC_LOG.warn(
                'Total physical memory is less than 4GB, memory allocation'
                ' problems are likely to occur.')
            n_procs = 1
        elif n_procs > av_procs:
            n_procs = av_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts.stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' %
                           ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir
        }
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = 1
            max_parallel_ants = cpu_count() // settings['ants_nthreads']
            if max_parallel_ants > 1:
                settings['n_procs'] = max_parallel_ants

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}

    MRIQC_LOG.info(
        'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
        __version__, ', '.join(analysis_levels), opts.participant_label,
        settings)

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(settings['bids_dir'],
                                participant_label=opts.participant_label)

    # Set up participant level
    if 'participant' in analysis_levels:
        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                MRIQC_LOG.warn('No %s scans were found in %s', mod,
                               settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod,
                                          settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                workflow.run(**plugin_settings)
        else:
            raise RuntimeError(
                'Error reading BIDS directory (%s), or the dataset is not '
                'BIDS-compliant.' % settings['bids_dir'])

    # Set up group level
    if 'group' in analysis_levels:
        from mriqc.reports import group_html
        from mriqc.utils.misc import generate_csv, generate_pred

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                MRIQC_LOG.warn(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            MRIQC_LOG.info('Summary CSV table for the %s data generated (%s)',
                           mod, out_csv)

            out_pred = generate_pred(derivatives_dir, settings['output_dir'],
                                     mod)
            if out_pred is not None:
                MRIQC_LOG.info(
                    'Predicted QA CSV table for the %s data generated (%s)',
                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv,
                       mod,
                       csv_failed=op.join(settings['output_dir'],
                                          'failed_' + mod + '.csv'),
                       out_file=out_html)
            MRIQC_LOG.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception(
                "No data found. No group level reports were generated.")
Ejemplo n.º 3
0
def init_mriqc(opts, retval):
    """Build the workflow enumerator"""

    from bids.grabbids import BIDSLayout
    from nipype import config as ncfg
    from nipype.pipeline.engine import Workflow

    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow

    retval['workflow'] = None
    retval['plugin_settings'] = None

    # Build settings dict
    bids_dir = Path(opts.bids_dir).expanduser()
    output_dir = Path(opts.output_dir).expanduser()

    # Number of processes
    n_procs = opts.n_procs or cpu_count()

    settings = {
        'bids_dir': bids_dir.resolve(),
        'output_dir': output_dir.resolve(),
        'work_dir': opts.work_dir.expanduser().resolve(),
        'write_graph': opts.write_graph,
        'n_procs': n_procs,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'ants_nthreads': opts.ants_nthreads,
        'ants_float': opts.ants_float,
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    if opts.dsname:
        settings['dataset_name'] = opts.dsname

    log_dir = settings['output_dir'] / 'logs'

    # Create directories
    log_dir.mkdir(parents=True, exist_ok=True)
    settings['work_dir'].mkdir(parents=True, exist_ok=True)

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': str(log_dir), 'log_to_file': True},
        'execution': {
            'crashdump_dir': str(log_dir), 'crashfile_format': 'txt',
            'resource_monitor': opts.profile},
    })

    # Plugin configuration
    plugin_settings = {}
    if n_procs == 1:
        plugin_settings['plugin'] = 'Linear'

        if settings['ants_nthreads'] == 0:
            settings['ants_nthreads'] = 1
    else:
        plugin_settings['plugin'] = 'MultiProc'
        plugin_settings['plugin_args'] = {'n_procs': n_procs}
        if opts.mem_gb:
            plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

        if settings['ants_nthreads'] == 0:
            # always leave one extra thread for non ANTs work,
            # don't use more than 8 threads - the speed up is minimal
            settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)

    # Overwrite options if --use-plugin provided
    if opts.use_plugin and opts.use_plugin.exists():
        from yaml import load as loadyml
        with opts.use_plugin.open() as pfile:
            plugin_settings.update(loadyml(pfile))

    # Process data types
    modalities = opts.modalities

    layout = BIDSLayout(str(settings['bids_dir']),
                        exclude=['derivatives', 'sourcedata'])
    dataset = collect_bids_data(
        layout,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
        bids_type=modalities,
    )

    workflow = Workflow(name='workflow_enumerator')
    workflow.base_dir = settings['work_dir']

    wf_list = []
    subject_list = []
    for mod in modalities:
        if dataset[mod]:
            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))
            subject_list += dataset[mod]

    retval['subject_list'] = subject_list
    if not wf_list:
        retval['return_code'] = 1
        return retval

    workflow.add_nodes(wf_list)
    retval['plugin_settings'] = plugin_settings
    retval['workflow'] = workflow
    retval['return_code'] = 0
    return retval
Ejemplo n.º 4
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf
    from shutil import copyfile

    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_fmriprep_wf
    from ..viz.reports import generate_reports

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    output_spaces = opts.output_space or []

    # Validity of some inputs
    # ERROR check if use_aroma was specified, but the correct template was not
    if opts.use_aroma and (opts.template != 'MNI152NLin2009cAsym'
                           or 'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--use-aroma" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").')

    if opts.cifti_output and (opts.template != 'MNI152NLin2009cAsym'
                              or 'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--cifti-output" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").')

    # Check output_space
    if 'template' not in output_spaces and (opts.use_syn_sdc
                                            or opts.force_syn):
        msg = [
            'SyN SDC correction requires T1 to MNI registration, but '
            '"template" is not specified in "--output-space" arguments.',
            'Option --use-syn will be cowardly dismissed.'
        ]
        if opts.force_syn:
            output_spaces.append('template')
            msg[1] = (
                ' Since --force-syn has been requested, "template" has been added to'
                ' the "--output-space" list.')
        logger.warning(' '.join(msg))

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = os.path.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir or 'work')  # Set work/ as default

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir':
            log_dir,
            'crashfile_format':
            'txt',
            'get_linked_libs':
            False,
            'stop_on_first_crash':
            opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['bids_dir'] = bids_dir
    retval['output_dir'] = output_dir
    retval['work_dir'] = work_dir
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s',
                   ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list, output_dir,
                                                 work_dir, run_uuid)
        return retval

    # Build main workflow
    logger.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    template_out_grid = opts.template_resampling_grid
    if opts.output_grid_reference is not None:
        logger.warning(
            'Option --output-grid-reference is deprecated, please use '
            '--template-resampling-grid')
        template_out_grid = template_out_grid or opts.output_grid_reference
    if opts.debug:
        logger.warning('Option --debug is deprecated and has no effect')

    retval['workflow'] = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        echo_idx=opts.echo_idx,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.sloppy,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        t2s_coreg=opts.t2s_coreg,
        omp_nthreads=omp_nthreads,
        skull_strip_template=opts.skull_strip_template,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.run_reconall,
        output_spaces=output_spaces,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        cifti_output=opts.cifti_output,
        template_out_grid=template_out_grid,
        hires=opts.hires,
        use_bbr=opts.use_bbr,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        aroma_melodic_dim=opts.aroma_melodic_dimensionality,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'fmriprep' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        (logs_path / 'CITATION.md').write_text(boilerplate)
        logger.log(
            25, 'Works derived from this fMRIPrep execution should '
            'include the following boilerplate:\n\n%s', boilerplate)

        # Generate HTML file resolving citations
        cmd = [
            'pandoc', '-s', '--bibliography',
            pkgrf('fmriprep',
                  'data/boilerplate.bib'), '--filter', 'pandoc-citeproc',
            '--metadata', 'pagetitle="fMRIPrep citation boilerplate"',
            str(logs_path / 'CITATION.md'), '-o',
            str(logs_path / 'CITATION.html')
        ]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.html file:\n%s',
                           ' '.join(cmd))

        # Generate LaTex file resolving citations
        cmd = [
            'pandoc', '-s', '--bibliography',
            pkgrf('fmriprep', 'data/boilerplate.bib'), '--natbib',
            str(logs_path / 'CITATION.md'), '-o',
            str(logs_path / 'CITATION.tex')
        ]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.tex file:\n%s',
                           ' '.join(cmd))
        else:
            copyfile(pkgrf('fmriprep', 'data/boilerplate.bib'),
                     (logs_path / 'CITATION.bib'))

    return retval
Ejemplo n.º 5
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from nipype import logging, config as ncfg
    from ..info import __version__
    from ..workflows.base import init_fmriprep_wf
    from ..utils.bids import collect_participants
    from ..viz.reports import generate_reports

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    output_spaces = opts.output_space or []

    # Validity of some inputs
    # ERROR check if use_aroma was specified, but the correct template was not
    if opts.use_aroma and (opts.template != 'MNI152NLin2009cAsym'
                           or 'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--use-aroma" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").')

    # Check output_space
    if 'template' not in output_spaces and (opts.use_syn_sdc
                                            or opts.force_syn):
        msg = [
            'SyN SDC correction requires T1 to MNI registration, but '
            '"template" is not specified in "--output-space" arguments.',
            'Option --use-syn will be cowardly dismissed.'
        ]
        if opts.force_syn:
            output_spaces.append('template')
            msg[1] = (
                ' Since --force-syn has been requested, "template" has been added to'
                ' the "--output-space" list.')
        logger.warning(' '.join(msg))

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = op.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Setting up MultiProc
    nthreads = opts.nthreads
    if nthreads < 1:
        nthreads = cpu_count()

    plugin_settings = {
        'plugin': 'MultiProc',
        'plugin_args': {
            'n_procs': nthreads,
            'raise_insufficient': False,
            'maxtasksperchild': 1,
        }
    }

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    # Overload plugin_settings if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir or 'work')  # Set work/ as default

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir':
            log_dir,
            'crashfile_format':
            'txt',
            'get_linked_libs':
            False,
            'stop_on_first_crash':
            opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['output_dir'] = output_dir
    retval['work_dir'] = work_dir
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s',
                   ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list, output_dir,
                                                 work_dir, run_uuid)
        return retval

    # Build main workflow
    logger.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    template_out_grid = opts.template_resampling_grid
    if opts.output_grid_reference is not None:
        logger.warning(
            'Option --output-grid-reference is deprecated, please use '
            '--template-resampling-grid')
        template_out_grid = template_out_grid or opts.output_grid_reference

    retval['workflow'] = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.debug,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        t2s_coreg=opts.t2s_coreg,
        omp_nthreads=omp_nthreads,
        skull_strip_template=opts.skull_strip_template,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.run_reconall,
        skull_kernel=opts.skull_kernel,
        output_spaces=output_spaces,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        cifti_output=opts.cifti_output,
        template_out_grid=template_out_grid,
        hires=opts.hires,
        use_bbr=opts.use_bbr,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        aroma_melodic_dim=opts.aroma_melodic_dimensionality,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )
    retval['return_code'] = 0
    return retval
Ejemplo n.º 6
0
def main():
    from ..workflows.base import init_nibetaseries_participant_wf

    # get commandline options
    opts = get_parser().parse_args()

    # check inputs
    if (opts.hrf_model == 'fir') and (opts.fir_delays is None):
        raise ValueError('If the FIR HRF model is selected, '
                         'FIR delays must be provided.')

    # Set up directories
    # TODO: set up some sort of versioning system
    bids_dir = os.path.abspath(opts.bids_dir)
    if os.path.isdir(opts.derivatives_pipeline):
        derivatives_pipeline_dir = os.path.abspath(opts.derivatives_pipeline)
    else:
        derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives',
                                                opts.derivatives_pipeline)

    if not os.path.isdir(derivatives_pipeline_dir):
        msg = "{dir} is not an available directory".format(
            dir=derivatives_pipeline_dir)
        raise NotADirectoryError(msg)

    output_dir = os.path.abspath(opts.output_dir)
    os.makedirs(output_dir, exist_ok=True)

    log_dir = os.path.join(output_dir, 'nibetaseries/logs')
    os.makedirs(log_dir, exist_ok=True)

    if opts.work_dir:
        work_dir = os.path.abspath(opts.work_dir)
    else:
        work_dir = os.path.join(os.getcwd(), 'nibetaseries_work')

    os.makedirs(work_dir, exist_ok=True)

    # only for a subset of subjects
    if opts.participant_label:
        subject_list = [
            s[4:] if s.startswith('sub-') else s
            for s in opts.participant_label
        ]
    # for all subjects
    else:
        subject_dirs = glob(os.path.join(bids_dir, "sub-*"))
        subject_list = [
            subject_dir.split("-")[-1] for subject_dir in subject_dirs
        ]

    # Nipype plugin configuration
    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt',
            'parameterize_dirs': False
        },
    })

    # check if atlas img or atlas lut exist
    if opts.atlas_img and opts.atlas_lut:
        atlas_img = os.path.abspath(opts.atlas_img)
        atlas_lut = os.path.abspath(opts.atlas_lut)
    else:
        atlas_img = atlas_lut = None

    # check if --no-signal-scaling is set
    if opts.no_signal_scaling:
        signal_scaling = False
    else:
        signal_scaling = 0

    # running participant level
    if opts.analysis_level == "participant":
        nibetaseries_participant_wf = init_nibetaseries_participant_wf(
            estimator=opts.estimator,
            atlas_img=atlas_img,
            atlas_lut=atlas_lut,
            bids_dir=bids_dir,
            database_path=opts.database_path,
            derivatives_pipeline_dir=derivatives_pipeline_dir,
            exclude_description_label=opts.exclude_description_label,
            fir_delays=opts.fir_delays,
            hrf_model=opts.hrf_model,
            high_pass=opts.high_pass,
            norm_betas=opts.normalize_betas,
            output_dir=output_dir,
            return_residuals=opts.return_residuals,
            run_label=opts.run_label,
            signal_scaling=signal_scaling,
            selected_confounds=opts.confounds,
            session_label=opts.session_label,
            smoothing_kernel=opts.smoothing_kernel,
            space_label=opts.space_label,
            subject_list=subject_list,
            task_label=opts.task_label,
            description_label=opts.description_label,
            work_dir=work_dir,
        )

        if opts.graph:
            nibetaseries_participant_wf.write_graph(graph2use='colored',
                                                    format='svg',
                                                    simple_form=True)

        if not opts.boilerplate:
            try:
                nibetaseries_participant_wf.run(**plugin_settings)
            except RuntimeError as e:
                if "Workflow did not execute cleanly" in str(e):
                    print("Workflow did not execute cleanly")
                else:
                    raise e

        boilerplate = nibetaseries_participant_wf.visit_desc()
        if boilerplate:
            citation_files = {
                ext: Path(log_dir) / 'CITATION.{}'.format(ext)
                for ext in ('bib', 'tex', 'md', 'html')
            }
            # To please git-annex users and also to guarantee consistency
            # among different renderings of the same file, first remove any
            # existing one
            for citation_file in citation_files.values():
                try:
                    citation_file.unlink()
                except FileNotFoundError:
                    pass

            citation_files['md'].write_text(boilerplate)

    elif opts.analysis_level == "group":
        raise NotImplementedError('group analysis not currently implemented')

    if citation_files['md'].exists():
        # Generate HTML file resolving citations
        cmd = [
            'pandoc', '-s', '--bibliography',
            pkgrf('nibetaseries',
                  'data/references.bib'), '--filter', 'pandoc-citeproc',
            '--metadata', 'pagetitle="NiBetaSeries citation boilerplate"',
            str(citation_files['md']), '-o',
            str(citation_files['html'])
        ]

        logger.info(
            'Generating an HTML version of the citation boilerplate...')
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.html file:\n%s',
                           ' '.join(cmd))

        # Generate LaTex file resolving citations
        cmd = [
            'pandoc', '-s', '--bibliography',
            pkgrf('nibetaseries', 'data/references.bib'), '--natbib',
            str(citation_files['md']), '-o',
            str(citation_files['tex'])
        ]
        logger.info(
            'Generating a LaTeX version of the citation boilerplate...')
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.tex file:\n%s',
                           ' '.join(cmd))
        else:
            copyfile(pkgrf('nibetaseries', 'data/references.bib'),
                     citation_files['bib'])
    else:
        logger.warning(
            'NiBetaSeries could not find the markdown version of '
            'the citation boilerplate (%s). HTML and LaTeX versions'
            ' of it will not be available', citation_files['md'])
Ejemplo n.º 7
0
def parse_args(args=None, namespace=None):
    """Parse args and run further checks on the command line."""
    import logging
    from niworkflows.utils.spaces import Reference, SpatialReferences
    from niworkflows.utils.misc import check_valid_fs_license

    parser = _build_parser()
    opts = parser.parse_args(args, namespace)
    config.execution.log_level = int(
        max(25 - 5 * opts.verbose_count, logging.DEBUG))
    config.from_dict(vars(opts))

    # Initialize --output-spaces if not defined
    if config.execution.output_spaces is None:
        config.execution.output_spaces = SpatialReferences(
            [Reference("MNI152NLin2009cAsym", {"res": "native"})])

    # Retrieve logging level
    build_log = config.loggers.cli

    if not check_valid_fs_license(lic=config.execution.fs_license_file):
        raise RuntimeError("""\
ERROR: a valid license file is required for FreeSurfer to run. fMRIPrep looked for an existing \
license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \
2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \
(for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html"""
                           )

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml

        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        _plugin = plugin_settings.get("plugin")
        if _plugin:
            config.nipype.plugin = _plugin
            config.nipype.plugin_args = plugin_settings.get("plugin_args", {})
            config.nipype.nprocs = config.nipype.plugin_args.get(
                "nprocs", config.nipype.nprocs)

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    if 1 < config.nipype.nprocs < config.nipype.omp_nthreads:
        build_log.warning(
            f"Per-process threads (--omp-nthreads={config.nipype.omp_nthreads}) exceed "
            f"total threads (--nthreads/--n_cpus={config.nipype.nprocs})")

    # Inform the user about the risk of using brain-extracted images
    if config.workflow.skull_strip_t1w == "auto":
        build_log.warning("""\
Option ``--skull-strip-t1w`` was set to 'auto'. A heuristic will be \
applied to determine whether the input T1w image(s) have already been skull-stripped.
If that were the case, brain extraction and INU correction will be skipped for those T1w \
inputs. Please, BEWARE OF THE RISKS TO THE CONSISTENCY of results when using varying \
processing workflows across participants. To determine whether a participant has been run \
through the shortcut pipeline (meaning, brain extraction was skipped), please check the \
citation boilerplate. When reporting results with varying pipelines, please make sure you \
mention this particular variant of fMRIPrep listing the participants for which it was \
applied.""")

    bids_dir = config.execution.bids_dir
    output_dir = config.execution.output_dir
    work_dir = config.execution.work_dir
    version = config.environment.version

    if config.execution.fs_subjects_dir is None:
        config.execution.fs_subjects_dir = output_dir / "freesurfer"

    # Wipe out existing work_dir
    if opts.clean_workdir and work_dir.exists():
        from niworkflows.utils.misc import clean_directory

        build_log.info(
            f"Clearing previous fMRIPrep working directory: {work_dir}")
        if not clean_directory(work_dir):
            build_log.warning(
                f"Could not clear all contents of working directory: {work_dir}"
            )

    # Ensure input and output folders are not the same
    if output_dir == bids_dir:
        parser.error(
            "The selected output folder is the same as the input BIDS folder. "
            "Please modify the output path (suggestion: %s)." % bids_dir /
            "derivatives" / ("fmriprep-%s" % version.split("+")[0]))

    if bids_dir in work_dir.parents:
        parser.error(
            "The selected working directory is a subdirectory of the input BIDS folder. "
            "Please modify the output path.")

    # Validate inputs
    if not opts.skip_bids_validation:
        from ..utils.bids import validate_input_dir

        build_log.info(
            "Making sure the input data is BIDS compliant (warnings can be ignored in most "
            "cases).")
        validate_input_dir(config.environment.exec_env, opts.bids_dir,
                           opts.participant_label)

    # Setup directories
    config.execution.log_dir = output_dir / "fmriprep" / "logs"
    # Check and create output and working directories
    config.execution.log_dir.mkdir(exist_ok=True, parents=True)
    output_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Force initialization of the BIDSLayout
    config.execution.init()
    all_subjects = config.execution.layout.get_subjects()
    if config.execution.participant_label is None:
        config.execution.participant_label = all_subjects

    participant_label = set(config.execution.participant_label)
    missing_subjects = participant_label - set(all_subjects)
    if missing_subjects:
        parser.error(
            "One or more participant labels were not found in the BIDS directory: "
            "%s." % ", ".join(missing_subjects))

    config.execution.participant_label = sorted(participant_label)
    config.workflow.skull_strip_template = config.workflow.skull_strip_template[
        0]
Ejemplo n.º 8
0
def main():
    from ..workflows.base import init_nibetaseries_participant_wf

    # get commandline options
    opts = get_parser().parse_args()

    # Set up directories
    # TODO: set up some sort of versioning system
    bids_dir = os.path.abspath(opts.bids_dir)

    derivatives_pipeline_dir = os.path.join(bids_dir, 'derivatives',
                                            opts.derivatives_pipeline)

    output_dir = os.path.abspath(os.path.join(opts.output_dir, 'NiBetaSeries'))
    os.makedirs(output_dir, exist_ok=True)

    log_dir = os.path.join(output_dir, 'logs')
    os.makedirs(log_dir, exist_ok=True)

    if opts.work_dir:
        work_dir = os.path.abspath(opts.work_dir)
    else:
        work_dir = os.path.join(os.getcwd(), 'nibetaseries_work')

    os.makedirs(work_dir, exist_ok=True)

    # only for a subset of subjects
    if opts.participant_label:
        subject_list = opts.participant_label
    # for all subjects
    else:
        subject_dirs = glob(os.path.join(bids_dir, "sub-*"))
        subject_list = [
            subject_dir.split("-")[-1] for subject_dir in subject_dirs
        ]

    # Nipype plugin configuration
    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt',
            'parameterize_dirs': False
        },
    })

    # running participant level
    if opts.analysis_level == "participant":
        nibetaseries_participant_wf = init_nibetaseries_participant_wf(
            atlas_img=os.path.abspath(opts.atlas_img),
            atlas_lut=os.path.abspath(opts.atlas_lut),
            bids_dir=bids_dir,
            derivatives_pipeline_dir=derivatives_pipeline_dir,
            exclude_variant_label=opts.exclude_variant_label,
            hrf_model=opts.hrf_model,
            low_pass=opts.low_pass,
            output_dir=output_dir,
            run_label=opts.run_label,
            selected_confounds=opts.confounds,
            session_label=opts.session_label,
            smoothing_kernel=opts.smoothing_kernel,
            space_label=opts.space_label,
            subject_list=subject_list,
            task_label=opts.task_label,
            variant_label=opts.variant_label,
            work_dir=work_dir,
        )

        if opts.graph:
            nibetaseries_participant_wf.write_graph(graph2use='colored',
                                                    format='svg',
                                                    simple_form=True)
        try:
            nibetaseries_participant_wf.run(**plugin_settings)
        except RuntimeError as e:
            if "Workflow did not execute cleanly" in str(e):
                print("Workflow did not execute cleanly")
            else:
                raise e

    elif opts.analysis_level == "group":
        raise NotImplementedError('group analysis not currently implemented')
Ejemplo n.º 9
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))
    print(log_level)

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url' : opts.webapi_url,
        'webapi_port' : opts.webapi_port,
        'upload_strict' : opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt'},
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('filemanip').setLevel(log_level)

    callback_log_path = None
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod, settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                if plugin_settings['plugin'] == 'MultiProc' and opts.profile:
                    import logging
                    from niworkflows.nipype.pipeline.plugins.callback_log import log_nodes_cb
                    plugin_settings['plugin_args']['status_callback'] = log_nodes_cb
                    callback_log_path = op.join(log_dir, 'run_stats.log')
                    logger = logging.getLogger('callback')
                    logger.setLevel(logging.DEBUG)
                    handler = logging.FileHandler(callback_log_path)
                    logger.addHandler(handler)

                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics will be submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                if callback_log_path is not None:
                    from niworkflows.nipype.utils.draw_gantt_chart import generate_gantt_chart
                    generate_gantt_chart(callback_log_path, cores=settings['n_procs'])
        else:
            msg = """\
Error reading BIDS directory ({}), or the dataset is not \
BIDS-compliant."""
            if opts.participant_label is not None:
                msg = """\
None of the supplied labels (--participant_label) matched with the \
participants found in the BIDS directory ({})."""
            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')

    # Set up group level
    if 'group' in analysis_levels:
        from ..reports import group_html
        from ..utils.misc import generate_csv  # , generate_pred

        log.info('Group level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            log.info('Summary CSV table for the %s data generated (%s)', mod, out_csv)

            # out_pred = generate_pred(derivatives_dir, settings['output_dir'], mod)
            # if out_pred is not None:
            #     log.info('Predicted QA CSV table for the %s data generated (%s)',
            #                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv, mod,
                       csv_failed=op.join(settings['output_dir'], 'failed_' + mod + '.csv'),
                       out_file=out_html)
            log.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception("No data found. No group level reports were generated.")

        log.info('Group level finished successfully.')
Ejemplo n.º 10
0
def create_workflow(opts):
    """Build workflow"""
    import logging
    from fmriprep.utils import make_folder
    from fmriprep.viz.reports import run_reports
    from fmriprep.workflows.base import init_fmriprep_wf

    errno = 0

    # set up logger
    logger = logging.getLogger('cli')

    if opts.debug:
        logger.setLevel(logging.DEBUG)

    run_uuid = strftime('%Y%m%d-%H%M%S_') + str(uuid.uuid4())

    # Check and create output and working directories
    # Using make_folder to prevent https://github.com/poldracklab/mriqc/issues/111
    make_folder(opts.output_dir)
    make_folder(opts.work_dir)

    # nipype plugin configuration
    plugin_settings = {'plugin': 'Linear'}
    nthreads = opts.nthreads
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
    else:
        # Setup multiprocessing
        nthreads = opts.nthreads
        if nthreads == 0:
            nthreads = cpu_count()

        if nthreads > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': nthreads}
            if opts.mem_mb:
                plugin_settings['plugin_args'][
                    'memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        print('Per-process threads (--omp-nthreads={:d}) cannot exceed total '
              'threads (--nthreads/--n_cpus={:d})'.format(
                  omp_nthreads, nthreads))
        sys.exit(1)

    # Determine subjects to be processed
    subject_list = opts.participant_label

    if subject_list is None or not subject_list:
        subject_list = [
            op.basename(subdir)[4:] for subdir in glob.glob(
                op.join(op.abspath(opts.bids_dir), 'sub-*'))
        ]
    else:
        subject_list = [
            sub[4:] if sub.startswith('sub-') else sub for sub in subject_list
        ]

    logger.info('Subject list: %s', ', '.join(subject_list))

    # Build main workflow and run
    reportlets_dir = op.join(op.abspath(opts.work_dir), 'reportlets')
    output_dir = op.abspath(opts.output_dir)
    bids_dir = op.abspath(opts.bids_dir)
    fmriprep_wf = init_fmriprep_wf(subject_list=subject_list,
                                   task_id=opts.task_id,
                                   run_uuid=run_uuid,
                                   ignore=opts.ignore,
                                   debug=opts.debug,
                                   omp_nthreads=omp_nthreads,
                                   skull_strip_ants=opts.skull_strip_ants,
                                   reportlets_dir=reportlets_dir,
                                   output_dir=output_dir,
                                   bids_dir=bids_dir,
                                   freesurfer=opts.freesurfer,
                                   output_spaces=opts.output_space,
                                   template=opts.template,
                                   output_grid_ref=opts.output_grid_reference,
                                   hires=opts.hires,
                                   bold2t1w_dof=opts.bold2t1w_dof,
                                   fmap_bspline=opts.fmap_bspline,
                                   fmap_demean=opts.fmap_no_demean)
    fmriprep_wf.base_dir = op.abspath(opts.work_dir)

    if opts.reports_only:
        if opts.write_graph:
            fmriprep_wf.write_graph(graph2use="colored",
                                    format='svg',
                                    simple_form=True)

        for subject_label in subject_list:
            run_reports(reportlets_dir,
                        output_dir,
                        subject_label,
                        run_uuid=run_uuid)
        sys.exit()

    try:
        fmriprep_wf.run(**plugin_settings)
    except RuntimeError as e:
        if "Workflow did not execute cleanly" in str(e):
            errno = 1
        else:
            raise (e)

    if opts.write_graph:
        fmriprep_wf.write_graph(graph2use="colored",
                                format='svg',
                                simple_form=True)

    report_errors = 0
    for subject_label in subject_list:
        report_errors += run_reports(reportlets_dir,
                                     output_dir,
                                     subject_label,
                                     run_uuid=run_uuid)
    if errno == 1:
        assert (report_errors > 0)

    sys.exit(errno)
Ejemplo n.º 11
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf
    from shutil import copyfile

    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_fmriprep_wf
    from ..viz.reports import generate_reports

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    output_spaces = opts.output_space or []

    # Validity of some inputs
    # ERROR check if use_aroma was specified, but the correct template was not
    if opts.use_aroma and (opts.template != 'MNI152NLin2009cAsym' or
                           'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--use-aroma" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").'
        )

    if opts.cifti_output and (opts.template != 'MNI152NLin2009cAsym' or
                              'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--cifti-output" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").'
        )

    # Check output_space
    if 'template' not in output_spaces and (opts.use_syn_sdc or opts.force_syn):
        msg = ['SyN SDC correction requires T1 to MNI registration, but '
               '"template" is not specified in "--output-space" arguments.',
               'Option --use-syn will be cowardly dismissed.']
        if opts.force_syn:
            output_spaces.append('template')
            msg[1] = (' Since --force-syn has been requested, "template" has been added to'
                      ' the "--output-space" list.')
        logger.warning(' '.join(msg))

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = os.path.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir or 'work')  # Set work/ as default

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['bids_dir'] = bids_dir
    retval['output_dir'] = output_dir
    retval['work_dir'] = work_dir
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s', ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list, output_dir, work_dir, run_uuid)
        return retval

    # Build main workflow
    logger.log(25, INIT_MSG(
        version=__version__,
        bids_dir=bids_dir,
        subject_list=subject_list,
        uuid=run_uuid)
    )

    template_out_grid = opts.template_resampling_grid
    if opts.output_grid_reference is not None:
        logger.warning(
            'Option --output-grid-reference is deprecated, please use '
            '--template-resampling-grid')
        template_out_grid = template_out_grid or opts.output_grid_reference
    if opts.debug:
        logger.warning('Option --debug is deprecated and has no effect')

    retval['workflow'] = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        echo_idx=opts.echo_idx,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.sloppy,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        t2s_coreg=opts.t2s_coreg,
        omp_nthreads=omp_nthreads,
        skull_strip_template=opts.skull_strip_template,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.run_reconall,
        output_spaces=output_spaces,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        cifti_output=opts.cifti_output,
        template_out_grid=template_out_grid,
        hires=opts.hires,
        use_bbr=opts.use_bbr,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        aroma_melodic_dim=opts.aroma_melodic_dimensionality,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'fmriprep' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        (logs_path / 'CITATION.md').write_text(boilerplate)
        logger.log(25, 'Works derived from this fMRIPrep execution should '
                   'include the following boilerplate:\n\n%s', boilerplate)

        # Generate HTML file resolving citations
        cmd = ['pandoc', '-s', '--bibliography',
               pkgrf('fmriprep', 'data/boilerplate.bib'),
               '--filter', 'pandoc-citeproc',
               str(logs_path / 'CITATION.md'),
               '-o', str(logs_path / 'CITATION.html')]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.html file:\n%s',
                           ' '.join(cmd))

        # Generate LaTex file resolving citations
        cmd = ['pandoc', '-s', '--bibliography',
               pkgrf('fmriprep', 'data/boilerplate.bib'),
               '--natbib', str(logs_path / 'CITATION.md'),
               '-o', str(logs_path / 'CITATION.tex')]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.tex file:\n%s',
                           ' '.join(cmd))
        else:
            copyfile(pkgrf('fmriprep', 'data/boilerplate.bib'),
                     (logs_path / 'CITATION.bib'))

    return retval
def main():
    """Entry point"""
    from nipype import config as ncfg
    from nipype.pipeline import engine as pe
    from fmriprep import __version__
    from fmriprep.workflows import fmriprep_single

    parser = ArgumentParser(description='fMRI Preprocessing workflow',
                            formatter_class=RawTextHelpFormatter)

    # Arguments as specified by BIDS-Apps
    # required, positional arguments
    # IMPORTANT: they must go directly with the parser object
    parser.add_argument('bids_dir', action='store', default=os.getcwd())
    parser.add_argument('output_dir',
                        action='store',
                        default=op.join(os.getcwd(), 'out'))
    parser.add_argument('analysis_level', choices=['participant'])

    # optional arguments
    parser.add_argument('-S',
                        '--subject-id',
                        '--participant_label',
                        action='store',
                        nargs='+')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='fmriprep v{}'.format(__version__))

    g_input = parser.add_argument_group('fMRIprep specific arguments')
    g_input.add_argument('-s',
                         '--session-id',
                         action='store',
                         default='single_session')
    g_input.add_argument('-r',
                         '--run-id',
                         action='store',
                         default='single_run')
    g_input.add_argument('-d',
                         '--data-type',
                         action='store',
                         choices=['anat', 'func'])
    g_input.add_argument('--debug',
                         action='store_true',
                         default=False,
                         help='run debug version of workflow')
    g_input.add_argument('--skull-strip-ants',
                         action='store_true',
                         default=False,
                         help='run debug version of workflow')

    g_input.add_argument('--nthreads',
                         action='store',
                         default=0,
                         type=int,
                         help='number of threads')
    g_input.add_argument("--write-graph",
                         action='store_true',
                         default=False,
                         help="Write workflow graph.")
    g_input.add_argument("--use-plugin",
                         action='store',
                         default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('-w',
                         '--work-dir',
                         action='store',
                         default=op.join(os.getcwd(), 'work'))

    opts = parser.parse_args()

    settings = {
        'bids_root': op.abspath(opts.bids_dir),
        'write_graph': opts.write_graph,
        'nthreads': opts.nthreads,
        'debug': opts.debug,
        'skull_strip_ants': opts.skull_strip_ants,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir)
    }

    # set up logger
    logger = logging.getLogger('cli')

    if opts.debug:
        settings['ants_t1-mni_settings'] = 't1-mni_registration_test'
        logger.setLevel(logging.DEBUG)

    log_dir = op.join(settings['work_dir'], 'log')

    # Check and create output and working directories
    # Using locks to prevent https://github.com/poldracklab/mriqc/issues/111
    with LockFile('.fmriprep-folders-lock'):
        if not op.exists(settings['output_dir']):
            os.makedirs(settings['output_dir'])

        derivatives = op.join(settings['output_dir'], 'derivatives')
        if not op.exists(derivatives):
            os.makedirs(derivatives)

        if not op.exists(settings['work_dir']):
            os.makedirs(settings['work_dir'])

        if not op.exists(log_dir):
            os.makedirs(log_dir)

    logger.addHandler(logging.FileHandler(op.join(log_dir, 'run_workflow')))

    # Warn for default work/output directories
    if (opts.work_dir == parser.get_default('work_dir')
            or opts.output_dir == parser.get_default('output_dir')):
        logger.warning("work-dir and/or output-dir not specified. Using " +
                       opts.work_dir + " and " + opts.output_dir)

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir
        }
    })

    # nipype plugin configuration
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}

    # Determine subjects to be processed
    subject_list = opts.subject_id

    if subject_list is None or not subject_list:
        subject_list = [
            op.basename(subdir)[4:]
            for subdir in glob.glob(op.join(settings['bids_root'], 'sub-*'))
        ]

    logger.info("subject list: {}", ', '.join(subject_list))

    # Build main workflow and run
    preproc_wf = fmriprep_single(subject_list, settings=settings)
    preproc_wf.base_dir = settings['work_dir']
    preproc_wf.run(**plugin_settings)

    if opts.write_graph:
        preproc_wf.write_graph()
Ejemplo n.º 13
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows smriprep to enforce
    a hard-limited memory-scope.

    """
    from shutil import copyfile
    from os import cpu_count
    import uuid
    from time import strftime
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf

    import json
    from bids import BIDSLayout
    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_smriprep_wf

    logger = logging.getLogger("nipype.workflow")

    INIT_MSG = """
    Running sMRIPrep version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.

    {spaces}
    """.format

    # Set up some instrumental utilities
    run_uuid = "%s_%s" % (strftime("%Y%m%d-%H%M%S"), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = opts.bids_dir.resolve()
    layout = BIDSLayout(str(bids_dir), validate=False)
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)

    bids_filters = (json.loads(opts.bids_filter_file.read_text())
                    if opts.bids_filter_file else None)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml

        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault("plugin_args", {})
    else:
        # Defaults
        plugin_settings = {
            "plugin": "MultiProc",
            "plugin_args": {
                "raise_insufficient": False,
                "maxtasksperchild": 1,
            },
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nprocs = plugin_settings["plugin_args"].get("n_procs")
    # Permit overriding plugin config with specific CLI options
    if nprocs is None or opts.nprocs is not None:
        nprocs = opts.nprocs
        if nprocs is None or nprocs < 1:
            nprocs = cpu_count()
        plugin_settings["plugin_args"]["n_procs"] = nprocs

    if opts.mem_gb:
        plugin_settings["plugin_args"]["memory_gb"] = opts.mem_gb

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nprocs - 1 if nprocs > 1 else cpu_count(), 8)

    if 1 < nprocs < omp_nthreads:
        logger.warning(
            "Per-process threads (--omp-nthreads=%d) exceed total "
            "available CPUs (--nprocs/--ncpus=%d)",
            omp_nthreads,
            nprocs,
        )

    # Set up directories
    output_dir = opts.output_dir.resolve()
    log_dir = output_dir / "smriprep" / "logs"
    work_dir = opts.work_dir.resolve()

    # Check and create output and working directories
    log_dir.mkdir(parents=True, exist_ok=True)
    work_dir.mkdir(parents=True, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        "logging": {
            "log_directory": str(log_dir),
            "log_to_file": True
        },
        "execution": {
            "crashdump_dir": str(log_dir),
            "crashfile_format": "txt",
            "get_linked_libs": False,
            "stop_on_first_crash": opts.stop_on_first_crash,
        },
        "monitoring": {
            "enabled": opts.resource_monitor,
            "sample_frequency": "0.5",
            "summary_append": True,
        },
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval["return_code"] = 0
    retval["plugin_settings"] = plugin_settings
    retval["bids_dir"] = str(bids_dir)
    retval["output_dir"] = str(output_dir)
    retval["work_dir"] = str(work_dir)
    retval["subject_list"] = subject_list
    retval["run_uuid"] = run_uuid
    retval["workflow"] = None

    # Called with reports only
    if opts.reports_only:
        from niworkflows.reports import generate_reports

        logger.log(25, "Running --reports-only on participants %s",
                   ", ".join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval["return_code"] = generate_reports(subject_list,
                                                 str(output_dir),
                                                 run_uuid,
                                                 packagename="smriprep")
        return retval

    output_spaces = opts.output_spaces
    if not output_spaces.is_cached():
        output_spaces.checkpoint()

    logger.log(
        25,
        INIT_MSG(
            version=__version__,
            bids_dir=bids_dir,
            subject_list=subject_list,
            uuid=run_uuid,
            spaces=output_spaces,
        ),
    )

    # Build main workflow
    retval["workflow"] = init_smriprep_wf(
        debug=opts.sloppy,
        fast_track=opts.fast_track,
        freesurfer=opts.run_reconall,
        fs_subjects_dir=opts.fs_subjects_dir,
        hires=opts.hires,
        layout=layout,
        longitudinal=opts.longitudinal,
        low_mem=opts.low_mem,
        omp_nthreads=omp_nthreads,
        output_dir=str(output_dir),
        run_uuid=run_uuid,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        skull_strip_mode=opts.skull_strip_mode,
        skull_strip_template=opts.skull_strip_template[0],
        spaces=output_spaces,
        subject_list=subject_list,
        work_dir=str(work_dir),
        bids_filters=bids_filters,
    )
    retval["return_code"] = 0

    boilerplate = retval["workflow"].visit_desc()
    (log_dir / "CITATION.md").write_text(boilerplate)
    logger.log(
        25,
        "Works derived from this sMRIPrep execution should "
        "include the following boilerplate:\n\n%s",
        boilerplate,
    )

    # Generate HTML file resolving citations
    cmd = [
        "pandoc",
        "-s",
        "--bibliography",
        pkgrf("smriprep", "data/boilerplate.bib"),
        "--citeproc",
        "--metadata",
        'pagetitle="sMRIPrep citation boilerplate"',
        str(log_dir / "CITATION.md"),
        "-o",
        str(log_dir / "CITATION.html"),
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning("Could not generate CITATION.html file:\n%s",
                       " ".join(cmd))

    # Generate LaTex file resolving citations
    cmd = [
        "pandoc",
        "-s",
        "--bibliography",
        pkgrf("smriprep", "data/boilerplate.bib"),
        "--natbib",
        str(log_dir / "CITATION.md"),
        "-o",
        str(log_dir / "CITATION.tex"),
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning("Could not generate CITATION.tex file:\n%s",
                       " ".join(cmd))
    else:
        copyfile(pkgrf("smriprep", "data/boilerplate.bib"),
                 str(log_dir / "CITATION.bib"))
    return retval
Ejemplo n.º 14
0
def main(argv=None):
    # get commandline options
    opts = get_parser().parse_args(argv)

    # Set up directories
    # TODO: set up some sort of versioning system
    bids_dir = os.path.abspath(opts.bids_dir)

    output_dir = os.path.abspath(opts.output_dir)
    os.makedirs(output_dir, exist_ok=True)

    log_dir = os.path.join(output_dir, 'logs')
    os.makedirs(log_dir, exist_ok=True)

    if opts.work_dir:
        work_dir = os.path.abspath(opts.work_dir)
    else:
        work_dir = os.path.join(os.getcwd(), 'complexpreproc_work')

    os.makedirs(work_dir, exist_ok=True)

    if opts.participant_label:  # only for a subset of subjects
        subject_list = opts.participant_label
    else:  # for all subjects
        subject_dirs = glob(os.path.join(bids_dir, 'sub-*'))
        subject_list = [
            subject_dir.split('-')[-1] for subject_dir in subject_dirs
        ]

    # Nipype plugin configuration
    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml

        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1
            },
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt',
            'parameterize_dirs': False,
        },
    })

    # running participant level
    participant_wf = init_preproc_workflow(
        bids_dir=bids_dir,
        output_dir=output_dir,
        work_dir=work_dir,
        subject_list=subject_list,
        session_label=opts.session_label,
        task_label=opts.task_label,
        run_label=opts.run_label,
    )

    if opts.graph:
        participant_wf.write_graph(graph2use='flat',
                                   format='svg',
                                   simple_form=False)

    try:
        participant_wf.run(**plugin_settings)
    except RuntimeError as e:
        if 'Workflow did not execute cleanly' in str(e):
            print('Workflow did not execute cleanly')
        else:
            raise e
Ejemplo n.º 15
0
def main():
    """Entry point"""
    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    g_input = parser.add_argument_group('Inputs')
    g_input.add_argument('-B',
                         '--bids-root',
                         action='store',
                         default=os.getcwd())
    g_input.add_argument('-i', '--input-folder', action='store')
    g_input.add_argument('-S', '--subject-id', nargs='*', action='store')
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('-d',
                         '--data-type',
                         action='store',
                         nargs='*',
                         choices=['anat', 'func'],
                         default=['anat', 'func'])
    g_input.add_argument('-v',
                         '--version',
                         action='store_true',
                         default=False,
                         help='Show current mriqc version')

    g_input.add_argument('--nthreads',
                         action='store',
                         default=0,
                         type=int,
                         help='number of threads')
    g_input.add_argument('--write-graph',
                         action='store_true',
                         default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--test-run',
                         action='store_true',
                         default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin',
                         action='store',
                         default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--save-memory',
                         action='store_true',
                         default=False,
                         help='Save as much memory as possible')
    g_input.add_argument(
        '--hmc-afni',
        action='store_true',
        default=False,
        help='Use ANFI 3dvolreg for head motion correction (HMC) and '
        'frame displacement (FD) estimation')
    g_input.add_argument('--ants-settings',
                         action='store',
                         help='path to JSON file with settings for ANTS')

    g_outputs = parser.add_argument_group('Outputs')
    g_outputs.add_argument('-o', '--output-dir', action='store')
    g_outputs.add_argument('-w',
                           '--work-dir',
                           action='store',
                           default=op.join(os.getcwd(), 'work'))

    opts = parser.parse_args()

    bids_root = op.abspath(opts.bids_root)
    if opts.input_folder is not None:
        warn('The --input-folder flag is deprecated, please use -B instead',
             DeprecationWarning)

        if bids_root == os.getcwd():
            bids_root = op.abspath(opts.input_folder)

    if opts.version:
        print('mriqc version ' + __version__)
        exit(0)

    settings = {
        'bids_root': bids_root,
        'output_dir': os.getcwd(),
        'write_graph': opts.write_graph,
        'save_memory': opts.save_memory,
        'hmc_afni': opts.hmc_afni,
        'nthreads': opts.nthreads
    }

    if opts.output_dir:
        settings['output_dir'] = op.abspath(opts.output_dir)

    if not op.exists(settings['output_dir']):
        os.makedirs(settings['output_dir'])

    settings['work_dir'] = op.abspath(opts.work_dir)

    with LockFile(settings['work_dir']):
        if not op.exists(settings['work_dir']):
            os.makedirs(settings['work_dir'])

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['work_dir'] + '_log')
    if not op.exists(log_dir):
        os.makedirs(log_dir)

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir
        }
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}

    for dtype in opts.data_type:
        ms_func = getattr(mwc, 'ms_' + dtype)
        workflow = ms_func(subject_id=opts.subject_id,
                           session_id=opts.session_id,
                           run_id=opts.run_id,
                           settings=settings)
        if workflow is None:
            LOGGER.warn('No {} scans were found in {}', dtype,
                        settings['bids_root'])
            continue

        workflow.base_dir = settings['work_dir']
        if settings.get('write_graph', False):
            workflow.write_graph()

        if not opts.test_run:
            workflow.run(**plugin_settings)

        if opts.subject_id is None and not opts.test_run:
            workflow_report(dtype, settings)
Ejemplo n.º 16
0
def build_recon_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows qsiprep to enforce
    a hard-limited memory-scope.

    """
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf

    from nipype import logging, config as ncfg
    from ..__about__ import __version__
    from ..workflows.recon import init_qsirecon_wf
    from ..utils.bids import collect_participants

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running qsirecon version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = os.path.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label, bids_validate=False)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'qsirecon', 'logs')
    work_dir = op.abspath(opts.work_dir or 'work')  # Set work/ as default

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir':
            log_dir,
            'crashfile_format':
            'txt',
            'get_linked_libs':
            False,
            'stop_on_first_crash':
            opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['bids_dir'] = bids_dir
    retval['output_dir'] = output_dir
    retval['work_dir'] = work_dir
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Build main workflow
    logger.log(
        25,
        INIT_MSG(
            version=__version__,
            bids_dir=bids_dir,
            subject_list=subject_list,
            uuid=run_uuid))

    retval['workflow'] = init_qsirecon_wf(
        subject_list=subject_list,
        run_uuid=run_uuid,
        work_dir=work_dir,
        output_dir=output_dir,
        recon_input=opts.recon_input,
        recon_spec=opts.recon_spec,
        low_mem=opts.low_mem,
        omp_nthreads=omp_nthreads,
        bids_dir=bids_dir,
        sloppy=opts.sloppy
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'qsirecon' / 'logs'
    boilerplate = retval['workflow'].visit_desc()
    (logs_path / 'CITATION.md').write_text(boilerplate)
    logger.log(
        25, 'Works derived from this qsiprep execution should '
        'include the following boilerplate:\n\n%s', boilerplate)

    # Generate HTML file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('qsiprep', 'data/boilerplate.bib'), '--filter',
        'pandoc-citeproc',
        str(logs_path / 'CITATION.md'), '-o',
        str(logs_path / 'CITATION.html')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.html file:\n%s',
                       ' '.join(cmd))

    # Generate LaTex file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('qsiprep', 'data/boilerplate.bib'), '--natbib',
        str(logs_path / 'CITATION.md'), '-o',
        str(logs_path / 'CITATION.tex')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.tex file:\n%s',
                       ' '.join(cmd))
    return retval
Ejemplo n.º 17
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'ants_float': opts.ants_float,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

#    check_folder(log_dir)
#    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
#        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {#'crashdump_dir': log_dir, 
                      'crashfile_format': 'txt',
                      'resource_monitor': opts.profile},
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('utils').setLevel(log_level)

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod, settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics will be submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')
        else:
            msg = 'Error reading BIDS directory ({}), or the dataset is not ' \
                  'BIDS-compliant.'

            if opts.participant_label or opts.session_id or opts.run_id or opts.task_id:

                msg = 'The combination of supplied labels'

                if opts.participant_label is not None:
                    msg += ' (--participant_label {})'.format(" ".join(opts.participant_label))
                if opts.session_id is not None:
                    msg += ' (--session-id {})'.format(" ".join(opts.session_id))
                if opts.run_id is not None:
                    msg += ' (--run-id {})'.format(" ".join(opts.run_id))
                if opts.task_id is not None:
                    msg += ' (--task-id {})'.format(" ".join(opts.task_id))

                msg += ' did not result in matches within the BIDS directory ({}).'

            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')


        for mod in modalities:
            dataframe, order, jsonfiles = generate_csv(settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, settings['output_dir'])
                continue

            if 'jsonfile' in order: order.remove('jsonfile')

            base_name = 'mclf_run-20170724-191452_mod-rfc_ver-0.9.7-rc8_class-2_cv-loso'
            load_classifier = pkgrf(
                'mriqc',
                'data/mclf_run-20170724-191452_mod-rfc_ver-0.9.7-rc8_class-2_cv-'
                'loso_data-all_estimator.pklz')

            cvhelper = CVHelper(load_clf=load_classifier, n_jobs=-1,
                                rate_label=['rater_1'], basename=base_name)
            
            prediction = cvhelper.predict(dataframe[order])
            dataframe['y_prob'] = prediction[0][:,1]
            dataframe['y_pred'] = prediction[1]

            dataframe.index = jsonfiles

            for jsonfile in dataframe.index.values:
                with open(jsonfile, 'r+') as json_file:
                    json_dict = json.load(json_file, object_pairs_hook=OrderedDict)
                    json_dict['y_prob'] = float(dataframe.get_value(jsonfile, 'y_prob'))
                    json_dict['y_pred'] = float(dataframe.get_value(jsonfile, 'y_pred'))
                    json_file.seek(0)
                    json.dump(json_dict, json_file, separators=(',', ':'), sort_keys=True, indent=4)
                    json_file.truncate()
Ejemplo n.º 18
0
def main():
    """Entry point"""
    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument('-v', '--version', action='version',
                        version='mriqc v{}'.format(__version__))

    parser.add_argument('bids_dir', action='store',
                        help='The directory with the input dataset '
                             'formatted according to the BIDS standard.')
    parser.add_argument('output_dir', action='store',
                        help='The directory where the output files '
                             'should be stored. If you are running group level analysis '
                             'this folder should be prepopulated with the results of the'
                             'participant level analysis.')
    parser.add_argument('analysis_level', action='store',
                        help='Level of the analysis that will be performed. '
                             'Multiple participant level analyses can be run independently '
                             '(in parallel) using the same output_dir.',
                        choices=['participant', 'group'])
    parser.add_argument('--participant_label', '--subject_list', '-S', action='store',
                        help='The label(s) of the participant(s) that should be analyzed. '
                             'The label corresponds to sub-<participant_label> from the '
                             'BIDS spec (so it does not include "sub-"). If this parameter '
                             'is not provided all subjects should be analyzed. Multiple '
                             'participants can be specified with a space separated list.',
                        nargs="*")

    g_input = parser.add_argument_group('mriqc specific inputs')
    g_input.add_argument('-d', '--data-type', action='store', nargs='*',
                         choices=['anat', 'func'], default=['anat', 'func'])
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('--nthreads', action='store', default=0,
                         type=int, help='number of threads')
    g_input.add_argument('--write-graph', action='store_true', default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--dry-run', action='store_true', default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin', action='store', default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--testing', action='store_true', default=False,
                         help='use testing settings for a minimal footprint')
    g_input.add_argument('--hmc-afni', action='store_true', default=False,
                         help='Use ANFI 3dvolreg for head motion correction (HMC) and '
                              'frame displacement (FD) estimation')


    g_outputs = parser.add_argument_group('mriqc specific outputs')
    g_outputs.add_argument('-w', '--work-dir', action='store', default=op.join(os.getcwd(), 'work'))
    g_outputs.add_argument('--report-dir', action='store')

    # ANTs options
    g_ants = parser.add_argument_group('specific settings for ANTs registrations')
    g_ants.add_argument('--ants-nthreads', action='store', type=int,
                        help='number of threads that will be set in ANTs processes')
    g_ants.add_argument('--ants-settings', action='store',
                         help='path to JSON file with settings for ANTS')

    opts = parser.parse_args()


    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)
    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'nthreads': opts.nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir)
    }

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    if opts.ants_nthreads:
        settings['ants_nthreads'] = opts.ants_nthreads

    log_dir = op.join(settings['output_dir'], 'logs')

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['work_dir'], 'reports')

    with LockFile(op.join(os.getenv('HOME'), '.mriqc-lock')):
        if not op.exists(settings['output_dir']):
            os.makedirs(settings['output_dir'])

        if not op.exists(settings['work_dir']):
            os.makedirs(settings['work_dir'])

        if not op.exists(log_dir):
            os.makedirs(log_dir)

        if not op.exists(settings['report_dir']):
            os.makedirs(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir}
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}

    LOGGER.info(
        'Running MRIQC-%s (analysis_level=%s, participant_label=%s)\n\tSettings=%s',
        __version__, opts.analysis_level, opts.participant_label, settings)

    # Set up participant level
    if opts.analysis_level == 'participant':
        for dtype in opts.data_type:
            ms_func = getattr(mwc, 'ms_' + dtype)
            workflow = ms_func(subject_id=opts.participant_label, session_id=opts.session_id,
                               run_id=opts.run_id, settings=settings)
            if workflow is None:
                LOGGER.warn('No scans were found for the given inputs')
                continue

            workflow.base_dir = settings['work_dir']
            if settings.get('write_graph', False):
                workflow.write_graph()

            if not opts.dry_run:
                workflow.run(**plugin_settings)

    # Set up group level
    elif opts.analysis_level == 'group':
        from mriqc.reports import MRIQCReportPDF

        for dtype in opts.data_type:
            reporter = MRIQCReportPDF(dtype, settings)
            reporter.group_report()
            reporter.individual_report()
Ejemplo n.º 19
0
def create_workflow(opts):
    """Build workflow"""
    from niworkflows.nipype import config as ncfg
    from ..viz.reports import run_reports
    from ..workflows.base import init_fmriprep_wf
    from ..utils.bids import collect_participants

    # Set up some instrumental utilities
    errno = 0
    run_uuid = strftime('%Y%m%d-%H%M%S_') + str(uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = op.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Nipype plugin configuration
    plugin_settings = {'plugin': 'Linear'}
    nthreads = opts.nthreads
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
    else:
        # Setup multiprocessing
        nthreads = opts.nthreads
        if nthreads == 0:
            nthreads = cpu_count()

        if nthreads > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': nthreads}
            if opts.mem_mb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        raise RuntimeError(
            'Per-process threads (--omp-nthreads={:d}) cannot exceed total '
            'threads (--nthreads/--n_cpus={:d})'.format(omp_nthreads, nthreads))

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir)

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt'},
    })

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s', ', '.join(subject_list))
        report_errors = [
            run_reports(op.join(work_dir, 'reportlets'), output_dir, subject_label,
                        run_uuid=run_uuid)
            for subject_label in subject_list]
        sys.exit(int(sum(report_errors) > 0))

    # Build main workflow
    logger.log(25, INIT_MSG(
        version=__version__,
        subject_list=subject_list,
        uuid=run_uuid)
    )

    fmriprep_wf = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.debug,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        omp_nthreads=omp_nthreads,
        skull_strip_ants=True,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.freesurfer,
        output_spaces=opts.output_space,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        output_grid_ref=opts.output_grid_reference,
        hires=opts.hires,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )

    if opts.write_graph:
        fmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True)

    try:
        fmriprep_wf.run(**plugin_settings)
    except RuntimeError as e:
        if "Workflow did not execute cleanly" in str(e):
            errno = 1
        else:
            raise(e)

    # Generate reports phase
    report_errors = [run_reports(
        op.join(work_dir, 'reportlets'), output_dir, subject_label, run_uuid=run_uuid)
        for subject_label in subject_list]

    if sum(report_errors):
        logger.warning('Errors occurred while generating reports for participants: %s.',
                       ', '.join(['%s (%d)' % (subid, err)
                                  for subid, err in zip(subject_list, report_errors)]))

    errno += sum(report_errors)
    sys.exit(int(errno > 0))
Ejemplo n.º 20
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))
    print(log_level)

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts.stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' %
                           ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt'
        },
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('filemanip').setLevel(log_level)

    callback_log_path = None
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label,
            settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod,
                            settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod,
                                          settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                if plugin_settings['plugin'] == 'MultiProc' and opts.profile:
                    import logging
                    from niworkflows.nipype.pipeline.plugins.callback_log import log_nodes_cb
                    plugin_settings['plugin_args'][
                        'status_callback'] = log_nodes_cb
                    callback_log_path = op.join(log_dir, 'run_stats.log')
                    logger = logging.getLogger('callback')
                    logger.setLevel(logging.DEBUG)
                    handler = logging.FileHandler(callback_log_path)
                    logger.addHandler(handler)

                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning('Anonymized quality metrics will be submitted'
                                ' to MRIQC\'s metrics repository.'
                                ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                if callback_log_path is not None:
                    from niworkflows.nipype.utils.draw_gantt_chart import generate_gantt_chart
                    generate_gantt_chart(callback_log_path,
                                         cores=settings['n_procs'])
        else:
            msg = """\
Error reading BIDS directory ({}), or the dataset is not \
BIDS-compliant."""
            if opts.participant_label is not None:
                msg = """\
None of the supplied labels (--participant_label) matched with the \
participants found in the BIDS directory ({})."""
            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')

    # Set up group level
    if 'group' in analysis_levels:
        from ..reports import group_html
        from ..utils.misc import generate_csv  # , generate_pred

        log.info('Group level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label,
            settings)

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            log.info('Summary CSV table for the %s data generated (%s)', mod,
                     out_csv)

            # out_pred = generate_pred(derivatives_dir, settings['output_dir'], mod)
            # if out_pred is not None:
            #     log.info('Predicted QA CSV table for the %s data generated (%s)',
            #                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv,
                       mod,
                       csv_failed=op.join(settings['output_dir'],
                                          'failed_' + mod + '.csv'),
                       out_file=out_html)
            log.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception(
                "No data found. No group level reports were generated.")

        log.info('Group level finished successfully.')
Ejemplo n.º 21
0
def main():
    from nipype import config as ncfg

    """Entry point"""
    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument('-v', '--version', action='version',
                        version='mriqc v{}'.format(__version__))

    parser.add_argument('bids_dir', action='store',
                        help='The directory with the input dataset '
                             'formatted according to the BIDS standard.')
    parser.add_argument('output_dir', action='store',
                        help='The directory where the output files '
                             'should be stored. If you are running group level analysis '
                             'this folder should be prepopulated with the results of the'
                             'participant level analysis.')
    parser.add_argument('analysis_level', action='store',
                        help='Level of the analysis that will be performed. '
                             'Multiple participant level analyses can be run independently '
                             '(in parallel) using the same output_dir.',
                        choices=['participant', 'group'])
    parser.add_argument('--participant_label', '--subject_list', '-S', action='store',
                        help='The label(s) of the participant(s) that should be analyzed. '
                             'The label corresponds to sub-<participant_label> from the '
                             'BIDS spec (so it does not include "sub-"). If this parameter '
                             'is not provided all subjects should be analyzed. Multiple '
                             'participants can be specified with a space separated list.',
                        nargs="*")

    g_input = parser.add_argument_group('mriqc specific inputs')
    g_input.add_argument('-d', '--data-type', action='store', nargs='*',
                         choices=['anat', 'func'], default=['anat', 'func'])
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('--nthreads', action='store', default=0,
                         type=int, help='number of threads')
    g_input.add_argument('--write-graph', action='store_true', default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--dry-run', action='store_true', default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin', action='store', default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--testing', action='store_true', default=False,
                         help='use testing settings for a minimal footprint')

    g_outputs = parser.add_argument_group('mriqc specific outputs')
    g_outputs.add_argument('-w', '--work-dir', action='store', default=op.join(os.getcwd(), 'work'))
    g_outputs.add_argument('--report-dir', action='store')
    g_outputs.add_argument('--verbose-reports', default=False, action='store_true')

    # ANTs options
    g_ants = parser.add_argument_group('specific settings for ANTs registrations')
    g_ants.add_argument('--ants-nthreads', action='store', type=int,
                        help='number of threads that will be set in ANTs processes')
    g_ants.add_argument('--ants-settings', action='store',
                        help='path to JSON file with settings for ANTS')

    # AFNI head motion correction settings
    g_afni = parser.add_argument_group('specific settings for AFNI head motion correction')
    g_afni.add_argument('--hmc-afni', action='store_true', default=False,
                        help='Use ANFI 3dvolreg for head motion correction (HMC) and '
                             'frame displacement (FD) estimation')
    g_afni.add_argument('--deoblique', action='store_true', default=False,
                        help='Deoblique the functional scans during head motion '
                             'correction preprocessing')
    g_afni.add_argument('--despike', action='store_true', default=False,
                        help='Despike the functional scans during head motion correction '
                             'preprocessing')
    g_afni.add_argument('--start-idx', action='store', type=int,
                        help='Initial volume in functional timeseries that should be '
                             'considered for preprocessing')
    g_afni.add_argument('--stop-idx', action='store', type=int,
                        help='Final volume in functional timeseries that should be '
                             'considered for preprocessing')
    g_afni.add_argument('--correct-slice-timing', action='store_true', default=False,
                        help='Perform slice timing correction')

    opts = parser.parse_args()

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)
    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'nthreads': opts.nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    if opts.ants_nthreads:
        settings['ants_nthreads'] = opts.ants_nthreads

    log_dir = op.join(settings['output_dir'], 'logs')

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['work_dir'], 'reports')

    with LockFile(op.join(os.getenv('HOME'), '.mriqc-lock')):
        check_folder(settings['output_dir'])
        check_folder(settings['work_dir'])
        check_folder(log_dir)
        check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir}
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}

    MRIQC_LOG.info(
        'Running MRIQC-%s (analysis_level=%s, participant_label=%s)\n\tSettings=%s',
        __version__, opts.analysis_level, opts.participant_label, settings)

    # Set up participant level
    if opts.analysis_level == 'participant':
        for dtype in opts.data_type:
            ms_func = getattr(mwc, 'ms_' + dtype)
            workflow = ms_func(subject_id=opts.participant_label, session_id=opts.session_id,
                               run_id=opts.run_id, settings=settings)
            if workflow is None:
                MRIQC_LOG.warn('No scans were found for the given inputs')
                continue

            workflow.base_dir = settings['work_dir']
            if settings.get('write_graph', False):
                workflow.write_graph()

            if not opts.dry_run:
                workflow.run(**plugin_settings)

    # Set up group level
    elif opts.analysis_level == 'group':
        from mriqc.reports import MRIQCReportPDF

        for dtype in opts.data_type:
            reporter = MRIQCReportPDF(dtype, settings)
            reporter.group_report()
            reporter.individual_report()
Ejemplo n.º 22
0
def create_workflow(opts):
    import logging
    from nipype import config as ncfg
    from fmriprep.utils import make_folder
    from fmriprep.viz.reports import run_reports
    from fmriprep.workflows.base import base_workflow_enumerator

    settings = {
        'bids_root': op.abspath(opts.bids_dir),
        'write_graph': opts.write_graph,
        'nthreads': opts.nthreads,
        'mem_mb': opts.mem_mb,
        'debug': opts.debug,
        'ants_nthreads': opts.ants_nthreads,
        'skull_strip_ants': opts.skull_strip_ants,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'workflow_type': opts.workflow_type,
        'skip_native': opts.skip_native
    }

    # set up logger
    logger = logging.getLogger('cli')

    if opts.debug:
        settings['ants_t1-mni_settings'] = 't1-mni_registration_test'
        logger.setLevel(logging.DEBUG)

    log_dir = op.join(settings['output_dir'], 'log')
    derivatives = op.join(settings['output_dir'], 'derivatives')

    # Check and create output and working directories
    # Using make_folder to prevent https://github.com/poldracklab/mriqc/issues/111
    make_folder(settings['output_dir'])
    make_folder(settings['work_dir'])
    make_folder(derivatives)
    make_folder(log_dir)

    logger.addHandler(logging.FileHandler(op.join(log_dir, 'run_workflow')))

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir, 
                      'remove_unnecessary_outputs': False}
    })

    # nipype plugin configuration
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}
            if settings['mem_mb']:
                plugin_settings['plugin_args']['memory_gb'] = settings['mem_mb']/1024

    if settings['ants_nthreads'] == 0:
        settings['ants_nthreads'] = cpu_count()

    # Determine subjects to be processed
    subject_list = opts.participant_label

    if subject_list is None or not subject_list:
        subject_list = [op.basename(subdir)[4:] for subdir in glob.glob(
            op.join(settings['bids_root'], 'sub-*'))]

    logger.info('Subject list: %s', ', '.join(subject_list))

    # Build main workflow and run
    preproc_wf = base_workflow_enumerator(subject_list, task_id=opts.task_id,
                                          settings=settings)
    preproc_wf.base_dir = settings['work_dir']
    preproc_wf.run(**plugin_settings)

    if opts.write_graph:
        preproc_wf.write_graph(graph2use="colored", format='svg',
                               simple_form=True)

    run_reports(settings['output_dir'])
def main():
    """Entry point"""
    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    g_input = parser.add_argument_group('Inputs')
    g_input.add_argument('-B', '--bids-root', action='store', default=os.getcwd())
    g_input.add_argument('-i', '--input-folder', action='store')
    g_input.add_argument('-S', '--subject-id', nargs='*', action='store')
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    # g_input.add_argument('-d', '--data-type', action='store', nargs='*',
    #                      choices=['anat', 'func'], default=['anat', 'func'])
    g_input.add_argument('-v', '--version', action='store_true', default=False,
                         help='Show current structural_dhcp_mriqc version')

    g_input.add_argument('--nthreads', action='store', default=0,
                         type=int, help='number of threads')
    g_input.add_argument('--write-graph', action='store_true', default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--use-plugin', action='store', default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--save-memory', action='store_true', default=False,
                         help='Save as much memory as possible')
    g_input.add_argument('--ants-settings', action='store',
                         help='path to JSON file with settings for ANTS')


    g_input.add_argument('--qc-measures', action='store',
                         help='path to JSON file with qc measures')
    g_input.add_argument('--dhcp-measures', action='store',
                         help='path to JSON file with dhcp measurements')


    g_outputs = parser.add_argument_group('Outputs')
    g_outputs.add_argument('-o', '--output-dir', action='store')
    g_outputs.add_argument('-w', '--work-dir', action='store', default=op.join(os.getcwd(), 'work'))

    opts = parser.parse_args()

    bids_root = op.abspath(opts.bids_root)
    if opts.input_folder is not None:
        warn('The --input-folder flag is deprecated, please use -B instead', DeprecationWarning)

        if bids_root == os.getcwd():
            bids_root = op.abspath(opts.input_folder)

    if opts.version:
        print('structural_dhcp_mriqc version ' + __version__)
        exit(0)

    settings = {'bids_root': bids_root,
                'output_dir': os.getcwd(),
                'write_graph': opts.write_graph,
                'save_memory': opts.save_memory,
                'nthreads': opts.nthreads}

    if opts.output_dir:
        settings['output_dir'] = op.abspath(opts.output_dir)

    if not op.exists(settings['output_dir']):
        os.makedirs(settings['output_dir'])

    settings['work_dir'] = op.abspath(opts.work_dir)
    if not op.exists(settings['work_dir']):
        os.makedirs(settings['work_dir'])

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['work_dir'] + '_log')
    if not op.exists(log_dir):
        os.makedirs(log_dir)

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir}
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}

    dtype='anat'
    settings['qc_measures'] = opts.qc_measures
    settings['dhcp_measures'] = opts.dhcp_measures

    # if settings['qc_measures'] == None:
    #     workflow = mwc.ms_anat_dhcp(subject_id=opts.subject_id, session_id=opts.session_id,
    #                        run_id=opts.run_id, settings=settings)
    #     workflow.base_dir = settings['work_dir']
    #     if settings.get('write_graph', False):
    #         workflow.write_graph()

    #     workflow.run(**plugin_settings)
    # else:
    #     print("skipping qc calculations")
    

    workflow = mwc.ms_anat_dhcp(subject_id=opts.subject_id, session_id=opts.session_id,
                           run_id=opts.run_id, settings=settings)
    workflow.base_dir = settings['work_dir']
    workflow.run(**plugin_settings)

    if opts.subject_id is None:
        workflow_report(dtype, settings)
Ejemplo n.º 24
0
def main():
    """Entry point"""
    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    g_input = parser.add_argument_group('Inputs')
    g_input.add_argument('-B', '--bids-root', action='store', default=os.getcwd())
    g_input.add_argument('-i', '--input-folder', action='store')
    g_input.add_argument('-S', '--subject-id', nargs='*', action='store')
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('-d', '--data-type', action='store', nargs='*',
                         choices=['anat', 'func'], default=['anat', 'func'])
    g_input.add_argument('-v', '--version', action='store_true', default=False,
                         help='Show current mriqc version')

    g_input.add_argument('--nthreads', action='store', default=0,
                         type=int, help='number of threads')
    g_input.add_argument('--write-graph', action='store_true', default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--test-run', action='store_true', default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin', action='store', default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--save-memory', action='store_true', default=False,
                         help='Save as much memory as possible')
    g_input.add_argument('--hmc-afni', action='store_true', default=False,
                         help='Use ANFI 3dvolreg for head motion correction (HMC) and '
                              'frame displacement (FD) estimation')
    g_input.add_argument('--ants-settings', action='store',
                         help='path to JSON file with settings for ANTS')


    g_outputs = parser.add_argument_group('Outputs')
    g_outputs.add_argument('-o', '--output-dir', action='store')
    g_outputs.add_argument('-w', '--work-dir', action='store', default=op.join(os.getcwd(), 'work'))

    opts = parser.parse_args()

    bids_root = op.abspath(opts.bids_root)
    if opts.input_folder is not None:
        warn('The --input-folder flag is deprecated, please use -B instead', DeprecationWarning)

        if bids_root == os.getcwd():
            bids_root = op.abspath(opts.input_folder)

    if opts.version:
        print('mriqc version ' + __version__)
        exit(0)

    settings = {'bids_root': bids_root,
                'output_dir': os.getcwd(),
                'write_graph': opts.write_graph,
                'save_memory': opts.save_memory,
                'hmc_afni': opts.hmc_afni,
                'nthreads': opts.nthreads}

    if opts.output_dir:
        settings['output_dir'] = op.abspath(opts.output_dir)

    if not op.exists(settings['output_dir']):
        os.makedirs(settings['output_dir'])

    settings['work_dir'] = op.abspath(opts.work_dir)

    with LockFile(settings['work_dir']):
        if not op.exists(settings['work_dir']):
            os.makedirs(settings['work_dir'])

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['work_dir'] + '_log')
    if not op.exists(log_dir):
        os.makedirs(log_dir)

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir}
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['nthreads'] == 0:
            settings['nthreads'] = cpu_count()

        if settings['nthreads'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['nthreads']}

    for dtype in opts.data_type:
        ms_func = getattr(mwc, 'ms_' + dtype)
        workflow = ms_func(subject_id=opts.subject_id, session_id=opts.session_id,
                           run_id=opts.run_id, settings=settings)
        if workflow is None:
            LOGGER.warn('No {} scans were found in {}', dtype, settings['bids_root'])
            continue

        workflow.base_dir = settings['work_dir']
        if settings.get('write_graph', False):
            workflow.write_graph()

        if not opts.test_run:
            workflow.run(**plugin_settings)

        if opts.subject_id is None and not opts.test_run:
            workflow_report(dtype, settings)
Ejemplo n.º 25
0
def parse_args(args=None, namespace=None):
    """Parse args and run further checks on the command line."""
    from logging import DEBUG
    from ..utils.bids import collect_bids_data

    parser = _build_parser()
    opts = parser.parse_args(args, namespace)
    config.execution.log_level = int(max(25 - 5 * opts.verbose_count, DEBUG))
    config.from_dict(vars(opts))
    config.loggers.init()

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml

        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        _plugin = plugin_settings.get("plugin")
        if _plugin:
            config.nipype.plugin = _plugin
            config.nipype.plugin_args = plugin_settings.get("plugin_args", {})
            config.nipype.nprocs = config.nipype.plugin_args.get(
                "nprocs", config.nipype.nprocs)

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    if 1 < config.nipype.nprocs < config.nipype.omp_nthreads:
        config.loggers.cli.warning(
            "Per-process threads (--omp-nthreads=%d) exceed total "
            "threads (--nthreads/--n_cpus=%d)",
            config.nipype.omp_nthreads,
            config.nipype.nprocs,
        )

    bids_dir = config.execution.bids_dir
    output_dir = config.execution.output_dir
    work_dir = config.execution.work_dir
    version = config.environment.version

    # Ensure input and output folders are not the same
    if output_dir == bids_dir:
        parser.error(
            "The selected output folder is the same as the input BIDS folder. "
            "Please modify the output path (suggestion: %s)." % bids_dir /
            "derivatives" / ("mriqc-%s" % version.split("+")[0]))

    if bids_dir in work_dir.parents:
        parser.error(
            "The selected working directory is a subdirectory of the input BIDS folder. "
            "Please modify the output path.")

    # Validate inputs
    # if not opts.skip_bids_validation:
    #     from ..utils.bids import validate_input_dir

    #     build_log.info(
    #         "Making sure the input data is BIDS compliant (warnings can be ignored in most "
    #         "cases)."
    #     )
    #     validate_input_dir(
    #         config.environment.exec_env, opts.bids_dir, opts.participant_label
    #     )

    # Setup directories
    config.execution.log_dir = output_dir / "logs"
    # Check and create output and working directories
    config.execution.log_dir.mkdir(exist_ok=True, parents=True)
    output_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Force initialization of the BIDSLayout
    config.execution.init()
    all_subjects = config.execution.layout.get_subjects()
    if config.execution.participant_label is None:
        config.execution.participant_label = all_subjects

    participant_label = set(config.execution.participant_label)
    missing_subjects = participant_label - set(all_subjects)
    if missing_subjects:
        parser.error(
            "One or more participant labels were not found in the BIDS directory: "
            f"{', '.join(missing_subjects)}.")

    config.execution.participant_label = sorted(participant_label)

    # Handle analysis_level
    analysis_level = set(config.workflow.analysis_level)
    if not config.execution.participant_label:
        analysis_level.add("group")
    config.workflow.analysis_level = list(analysis_level)

    # List of files to be run
    bids_filters = {
        "participant_label": config.execution.participant_label,
        "session": config.execution.session_id,
        "run": config.execution.run_id,
        "task": config.execution.task_id,
        "bids_type": config.execution.modalities,
    }
    config.workflow.inputs = {
        mod: files
        for mod, files in collect_bids_data(config.execution.layout, **
                                            bids_filters).items() if files
    }

    # Check the query is not empty
    if not list(config.workflow.inputs.values()):
        _j = "\n *"
        parser.error(f"""\
Querying BIDS dataset at <{config.execution.bids_dir}> got an empty result.
Please, check out your currently set filters:
{_j.join([''] + [': '.join((k, str(v))) for k, v in bids_filters.items()])}""")

    # Check no DWI or others are sneaked into MRIQC
    unknown_mods = set(config.workflow.inputs.keys()) - set(
        ("T1w", "T2w", "bold"))
    if unknown_mods:
        parser.error("MRIQC is unable to process the following modalities: "
                     f'{", ".join(unknown_mods)}.')

    # Estimate the biggest file size
    config.workflow.biggest_file_gb = _get_biggest_file_size_gb(
        [i for sublist in config.workflow.inputs.values() for i in sublist])
Ejemplo n.º 26
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows smriprep to enforce
    a hard-limited memory-scope.

    """
    from shutil import copyfile
    from os import cpu_count
    import uuid
    from time import strftime
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf

    import json
    from bids import BIDSLayout
    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_smriprep_wf

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running sMRIPrep version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.

    {spaces}
    """.format

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = opts.bids_dir.resolve()
    layout = BIDSLayout(str(bids_dir), validate=False)
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)

    bids_filters = json.loads(
        opts.bids_filter_file.read_text()) if opts.bids_filter_file else None

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nprocs = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nprocs is None or opts.nprocs is not None:
        nprocs = opts.nprocs
        if nprocs is None or nprocs < 1:
            nprocs = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nprocs

    if opts.mem_gb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nprocs - 1 if nprocs > 1 else cpu_count(), 8)

    if 1 < nprocs < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'available CPUs (--nprocs/--ncpus=%d)', omp_nthreads, nprocs)

    # Set up directories
    output_dir = opts.output_dir.resolve()
    log_dir = output_dir / 'smriprep' / 'logs'
    work_dir = opts.work_dir.resolve()

    # Check and create output and working directories
    log_dir.mkdir(parents=True, exist_ok=True)
    work_dir.mkdir(parents=True, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': str(log_dir),
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': str(log_dir),
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['bids_dir'] = str(bids_dir)
    retval['output_dir'] = str(output_dir)
    retval['work_dir'] = str(work_dir)
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        from niworkflows.reports import generate_reports

        logger.log(25, 'Running --reports-only on participants %s',
                   ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list,
                                                 str(output_dir),
                                                 run_uuid,
                                                 packagename="smriprep")
        return retval

    logger.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid,
                 spaces=opts.output_spaces))

    # Build main workflow
    retval['workflow'] = init_smriprep_wf(
        debug=opts.sloppy,
        fast_track=opts.fast_track,
        freesurfer=opts.run_reconall,
        fs_subjects_dir=opts.fs_subjects_dir,
        hires=opts.hires,
        layout=layout,
        longitudinal=opts.longitudinal,
        low_mem=opts.low_mem,
        omp_nthreads=omp_nthreads,
        output_dir=str(output_dir),
        run_uuid=run_uuid,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        skull_strip_mode=opts.skull_strip_mode,
        skull_strip_template=opts.skull_strip_template[0],
        spaces=opts.output_spaces,
        subject_list=subject_list,
        work_dir=str(work_dir),
        bids_filters=bids_filters,
    )
    retval['return_code'] = 0

    boilerplate = retval['workflow'].visit_desc()
    (log_dir / 'CITATION.md').write_text(boilerplate)
    logger.log(
        25, 'Works derived from this sMRIPrep execution should '
        'include the following boilerplate:\n\n%s', boilerplate)

    # Generate HTML file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('smriprep',
              'data/boilerplate.bib'), '--filter', 'pandoc-citeproc',
        '--metadata', 'pagetitle="sMRIPrep citation boilerplate"',
        str(log_dir / 'CITATION.md'), '-o',
        str(log_dir / 'CITATION.html')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.html file:\n%s',
                       ' '.join(cmd))

    # Generate LaTex file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('smriprep', 'data/boilerplate.bib'), '--natbib',
        str(log_dir / 'CITATION.md'), '-o',
        str(log_dir / 'CITATION.tex')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.tex file:\n%s',
                       ' '.join(cmd))
    else:
        copyfile(pkgrf('smriprep', 'data/boilerplate.bib'),
                 str(log_dir / 'CITATION.bib'))
    return retval
Ejemplo n.º 27
0
def init_mriqc(opts, retval):
    """Build the workflow enumerator"""

    from bids.grabbids import BIDSLayout
    from nipype import config as ncfg
    from nipype.pipeline.engine import Workflow

    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow

    retval['workflow'] = None
    retval['plugin_settings'] = None

    # Build settings dict
    bids_dir = Path(opts.bids_dir).expanduser()
    output_dir = Path(opts.output_dir).expanduser()

    # Number of processes
    n_procs = opts.n_procs or cpu_count()

    settings = {
        'bids_dir': bids_dir.resolve(),
        'output_dir': output_dir.resolve(),
        'work_dir': opts.work_dir.expanduser().resolve(),
        'write_graph': opts.write_graph,
        'n_procs': n_procs,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'ants_nthreads': opts.ants_nthreads,
        'ants_float': opts.ants_float,
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    if opts.dsname:
        settings['dataset_name'] = opts.dsname

    log_dir = settings['output_dir'] / 'logs'

    # Create directories
    log_dir.mkdir(parents=True, exist_ok=True)
    settings['work_dir'].mkdir(parents=True, exist_ok=True)

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': str(log_dir), 'log_to_file': True},
        'execution': {
            'crashdump_dir': str(log_dir), 'crashfile_format': 'txt',
            'resource_monitor': opts.profile},
    })

    # Plugin configuration
    plugin_settings = {}
    if n_procs == 1:
        plugin_settings['plugin'] = 'Linear'

        if settings['ants_nthreads'] == 0:
            settings['ants_nthreads'] = 1
    else:
        plugin_settings['plugin'] = 'MultiProc'
        plugin_settings['plugin_args'] = {'n_procs': n_procs}
        if opts.mem_gb:
            plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

        if settings['ants_nthreads'] == 0:
            # always leave one extra thread for non ANTs work,
            # don't use more than 8 threads - the speed up is minimal
            settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)

    # Overwrite options if --use-plugin provided
    if opts.use_plugin and opts.use_plugin.exists():
        from yaml import load as loadyml
        with opts.use_plugin.open() as pfile:
            plugin_settings.update(loadyml(pfile))

    # Process data types
    modalities = opts.modalities

    layout = BIDSLayout(str(settings['bids_dir']),
                        exclude=['derivatives', 'sourcedata'])
    dataset = collect_bids_data(
        layout,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
        bids_type=modalities,
    )

    workflow = Workflow(name='workflow_enumerator')
    workflow.base_dir = settings['work_dir']

    wf_list = []
    subject_list = []
    for mod in modalities:
        if dataset[mod]:
            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))
            subject_list += dataset[mod]

    retval['subject_list'] = subject_list
    if not wf_list:
        retval['return_code'] = 1
        return retval

    workflow.add_nodes(wf_list)
    retval['plugin_settings'] = plugin_settings
    retval['workflow'] = workflow
    retval['return_code'] = 0
    return retval
Ejemplo n.º 28
0
def create_workflow(opts):
    """Build workflow"""
    import logging
    from fmriprep.utils import make_folder
    from fmriprep.viz.reports import run_reports
    from fmriprep.workflows.base import init_fmriprep_wf

    errno = 0

    # set up logger
    logger = logging.getLogger('cli')

    if opts.debug:
        logger.setLevel(logging.DEBUG)

    # ERROR check if use_aroma was specified, but the correct template was not
    if opts.use_aroma is True and str(opts.template) != 'MNI152NLin2009cAsym':
        raise RuntimeError('ERROR: if use_aroma is set, the template must be set '
                           'to MNI152NLin2009cAsym not %s' % opts.template)

    run_uuid = strftime('%Y%m%d-%H%M%S_') + str(uuid.uuid4())

    # Check and create output and working directories
    # Using make_folder to prevent https://github.com/poldracklab/mriqc/issues/111
    make_folder(opts.output_dir)
    make_folder(opts.work_dir)

    # nipype plugin configuration
    plugin_settings = {'plugin': 'Linear'}
    nthreads = opts.nthreads
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
    else:
        # Setup multiprocessing
        nthreads = opts.nthreads
        if nthreads == 0:
            nthreads = cpu_count()

        if nthreads > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': nthreads}
            if opts.mem_mb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb/1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        print('Per-process threads (--omp-nthreads={:d}) cannot exceed total '
              'threads (--nthreads/--n_cpus={:d})'.format(omp_nthreads, nthreads))
        sys.exit(1)

    if 'template' not in opts.output_space and (opts.use_syn_sdc or opts.force_syn):
        logger.warn('SyN SDC correction requires T1 to MNI registration, but '
                    '"template" is not specified in "--output-space" arguments')
        if opts.force_syn:
            sys.exit(1)

    # Determine subjects to be processed
    subject_list = opts.participant_label

    if subject_list is None or not subject_list:
        bids_dir = op.abspath(opts.bids_dir)
        subject_list = [op.basename(subdir)[4:]
                        for subdir in glob.glob(op.join(bids_dir, 'sub-*'))]
        if not subject_list:
            print('Could not find subjects in {}\n'
                  'If you are using Docker for Mac or Docker for Windows, you '
                  'may need to adjust your "File sharing" preferences.'.format(bids_dir))
            sys.exit(1)
    else:
        subject_list = [sub[4:] if sub.startswith('sub-') else sub for sub in subject_list]

    logger.info('Subject list: %s', ', '.join(subject_list))

    # Build main workflow and run
    reportlets_dir = op.join(op.abspath(opts.work_dir), 'reportlets')
    output_dir = op.abspath(opts.output_dir)
    bids_dir = op.abspath(opts.bids_dir)
    fmriprep_wf = init_fmriprep_wf(subject_list=subject_list,
                                   task_id=opts.task_id,
                                   run_uuid=run_uuid,
                                   ignore=opts.ignore,
                                   debug=opts.debug,
                                   anat_only=opts.anat_only,
                                   omp_nthreads=omp_nthreads,
                                   skull_strip_ants=opts.skull_strip_ants,
                                   reportlets_dir=reportlets_dir,
                                   output_dir=output_dir,
                                   bids_dir=bids_dir,
                                   freesurfer=opts.freesurfer,
                                   output_spaces=opts.output_space,
                                   template=opts.template,
                                   output_grid_ref=opts.output_grid_reference,
                                   hires=opts.hires,
                                   bold2t1w_dof=opts.bold2t1w_dof,
                                   fmap_bspline=opts.fmap_bspline,
                                   fmap_demean=opts.fmap_no_demean,
                                   use_syn=opts.use_syn_sdc,
                                   force_syn=opts.force_syn,
                                   use_aroma=opts.use_aroma,
                                   ignore_aroma_err=opts.ignore_aroma_denoising_errors)

    fmriprep_wf.base_dir = op.abspath(opts.work_dir)

    if opts.reports_only:
        if opts.write_graph:
            fmriprep_wf.write_graph(graph2use="colored", format='svg',
                                    simple_form=True)

        for subject_label in subject_list:
            run_reports(reportlets_dir,
                        output_dir,
                        subject_label, run_uuid=run_uuid)
        sys.exit()

    try:
        fmriprep_wf.run(**plugin_settings)
    except RuntimeError as e:
        if "Workflow did not execute cleanly" in str(e):
            errno = 1
        else:
            raise(e)

    if opts.write_graph:
        fmriprep_wf.write_graph(graph2use="colored", format='svg',
                                simple_form=True)

    report_errors = 0
    for subject_label in subject_list:
        report_errors += run_reports(reportlets_dir,
                                     output_dir,
                                     subject_label, run_uuid=run_uuid)
    if errno == 1:
        assert(report_errors > 0)

    sys.exit(errno)
Ejemplo n.º 29
0
def build_qsiprep_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows qsiprep to enforce
    a hard-limited memory-scope.

    """
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf
    from bids import BIDSLayout
    from nipype import logging, config as ncfg
    from ..__about__ import __version__
    from ..workflows.base import init_qsiprep_wf
    from ..utils.bids import collect_participants
    from ..viz.reports import generate_reports

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running qsiprep version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    bids_dir = opts.bids_dir.resolve()
    output_dir = opts.output_dir.resolve()
    work_dir = opts.work_dir.resolve()

    retval['return_code'] = 1
    retval['workflow'] = None
    retval['bids_dir'] = str(bids_dir)
    retval['work_dir'] = str(work_dir)
    retval['output_dir'] = str(output_dir)

    if output_dir == bids_dir:
        logger.error(
            'The selected output folder is the same as the input BIDS folder. '
            'Please modify the output path (suggestion: %s).',
            bids_dir / 'derivatives' / ('qsiprep-%s' % __version__.split('+')[0]))
        retval['return_code'] = 1
        return retval

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())
    retval['run_uuid'] = run_uuid

    # First check that bids_dir looks like a BIDS folder
    layout = BIDSLayout(str(bids_dir), validate=False)
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)
    retval['subject_list'] = subject_list

    output_spaces = opts.output_space or []

    force_spatial_normalization = opts.force_spatial_normalization or \
        'template' in output_spaces

    if 'template' in output_spaces:
        logger.warning("Using 'template' as an output space is no longer supported.")
        output_spaces = ["T1w"]

    # Check output_space
    if not force_spatial_normalization and (opts.use_syn_sdc or opts.force_syn):
        msg = [
            'SyN SDC correction requires T1 to MNI registration.',
            'Adding T1w-based normalization'
        ]
        force_spatial_normalization = True
        logger.warning(' '.join(msg))

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)
    retval['plugin_settings'] = plugin_settings
    logger.info('Running with omp_nthreads=%d, nthreads=%d', omp_nthreads, nthreads)

    # Set up directories
    log_dir = output_dir / 'qsiprep' / 'logs'
    # Check and create output and working directories
    output_dir.mkdir(exist_ok=True, parents=True)
    log_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': str(log_dir),
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': str(log_dir),
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash':
            opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s',
                   ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
            retval['run_uuid'] = run_uuid
        retval['return_code'] = generate_reports(subject_list, output_dir,
                                                 work_dir, run_uuid)
        return retval

    # Build main workflow
    logger.log(
        25,
        INIT_MSG(
            version=__version__,
            bids_dir=bids_dir,
            subject_list=subject_list,
            uuid=run_uuid))

    retval['workflow'] = init_qsiprep_wf(
        subject_list=subject_list,
        run_uuid=run_uuid,
        work_dir=work_dir,
        output_dir=str(output_dir),
        ignore=opts.ignore,
        hires=False,
        freesurfer=opts.do_reconall,
        debug=opts.sloppy,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        b0_threshold=opts.b0_threshold,
        combine_all_dwis=opts.combine_all_dwis,
        distortion_group_merge=opts.distortion_group_merge,
        dwi_denoise_window=opts.dwi_denoise_window,
        unringing_method=opts.unringing_method,
        dwi_no_biascorr=opts.dwi_no_biascorr,
        no_b0_harmonization=opts.no_b0_harmonization,
        denoise_before_combining=opts.denoise_before_combining,
        write_local_bvecs=opts.write_local_bvecs,
        omp_nthreads=omp_nthreads,
        skull_strip_template=opts.skull_strip_template,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        force_spatial_normalization=force_spatial_normalization,
        output_spaces=output_spaces,
        output_resolution=opts.output_resolution,
        template=opts.template,
        bids_dir=bids_dir,
        motion_corr_to=opts.b0_motion_corr_to,
        hmc_transform=opts.hmc_transform,
        hmc_model=opts.hmc_model,
        eddy_config=opts.eddy_config,
        shoreline_iters=opts.shoreline_iters,
        impute_slice_threshold=opts.impute_slice_threshold,
        b0_to_t1w_transform=opts.b0_to_t1w_transform,
        intramodal_template_iters=opts.intramodal_template_iters,
        intramodal_template_transform=opts.intramodal_template_transform,
        prefer_dedicated_fmaps=opts.prefer_dedicated_fmaps,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'qsiprep' / 'logs'
    boilerplate = retval['workflow'].visit_desc()
    (logs_path / 'CITATION.md').write_text(boilerplate)
    logger.log(
        25, 'Works derived from this qsiprep execution should '
        'include the following boilerplate:\n\n%s', boilerplate)

    # Generate HTML file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('qsiprep', 'data/boilerplate.bib'), '--filter',
        'pandoc-citeproc',
        str(logs_path / 'CITATION.md'), '-o',
        str(logs_path / 'CITATION.html')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.html file:\n%s',
                       ' '.join(cmd))

    # Generate LaTex file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('qsiprep', 'data/boilerplate.bib'), '--natbib',
        str(logs_path / 'CITATION.md'), '-o',
        str(logs_path / 'CITATION.tex')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.tex file:\n%s',
                       ' '.join(cmd))
    return retval
Ejemplo n.º 30
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from bids import BIDSLayout

    from nipype import logging as nlogging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from niworkflows.reports import generate_reports
    from ..__about__ import __version__
    from ..workflows.base import init_fmriprep_wf

    build_log = nlogging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    bids_dir = opts.bids_dir.resolve()
    output_dir = opts.output_dir.resolve()
    work_dir = opts.work_dir.resolve()

    retval['return_code'] = 1
    retval['workflow'] = None
    retval['bids_dir'] = str(bids_dir)
    retval['output_dir'] = str(output_dir)
    retval['work_dir'] = str(work_dir)

    if output_dir == bids_dir:
        build_log.error(
            'The selected output folder is the same as the input BIDS folder. '
            'Please modify the output path (suggestion: %s).', bids_dir /
            'derivatives' / ('fmriprep-%s' % __version__.split('+')[0]))
        retval['return_code'] = 1
        return retval

    output_spaces = parse_spaces(opts)

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())
    retval['run_uuid'] = run_uuid

    # First check that bids_dir looks like a BIDS folder
    layout = BIDSLayout(str(bids_dir),
                        validate=False,
                        ignore=("code", "stimuli", "sourcedata", "models",
                                "derivatives", re.compile(r'^\.')))
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)
    retval['subject_list'] = subject_list

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        build_log.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)
    retval['plugin_settings'] = plugin_settings

    # Set up directories
    log_dir = output_dir / 'fmriprep' / 'logs'
    # Check and create output and working directories
    output_dir.mkdir(exist_ok=True, parents=True)
    log_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': str(log_dir),
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': str(log_dir),
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    # Called with reports only
    if opts.reports_only:
        build_log.log(25, 'Running --reports-only on participants %s',
                      ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
            retval['run_uuid'] = run_uuid
        retval['return_code'] = generate_reports(subject_list,
                                                 output_dir,
                                                 work_dir,
                                                 run_uuid,
                                                 packagename='fmriprep')
        return retval

    # Build main workflow
    build_log.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    retval['workflow'] = init_fmriprep_wf(
        anat_only=opts.anat_only,
        aroma_melodic_dim=opts.aroma_melodic_dimensionality,
        bold2t1w_dof=opts.bold2t1w_dof,
        cifti_output=opts.cifti_output,
        debug=opts.sloppy,
        dummy_scans=opts.dummy_scans,
        echo_idx=opts.echo_idx,
        err_on_aroma_warn=opts.error_on_aroma_warnings,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        force_syn=opts.force_syn,
        freesurfer=opts.run_reconall,
        hires=opts.hires,
        ignore=opts.ignore,
        layout=layout,
        longitudinal=opts.longitudinal,
        low_mem=opts.low_mem,
        medial_surface_nan=opts.medial_surface_nan,
        omp_nthreads=omp_nthreads,
        output_dir=str(output_dir),
        output_spaces=output_spaces,
        run_uuid=run_uuid,
        regressors_all_comps=opts.return_all_components,
        regressors_fd_th=opts.fd_spike_threshold,
        regressors_dvars_th=opts.dvars_spike_threshold,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        skull_strip_template=opts.skull_strip_template,
        subject_list=subject_list,
        t2s_coreg=opts.t2s_coreg,
        task_id=opts.task_id,
        use_aroma=opts.use_aroma,
        use_bbr=opts.use_bbr,
        use_syn=opts.use_syn_sdc,
        work_dir=str(work_dir),
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'fmriprep' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        citation_files = {
            ext: logs_path / ('CITATION.%s' % ext)
            for ext in ('bib', 'tex', 'md', 'html')
        }
        # To please git-annex users and also to guarantee consistency
        # among different renderings of the same file, first remove any
        # existing one
        for citation_file in citation_files.values():
            try:
                citation_file.unlink()
            except FileNotFoundError:
                pass

        citation_files['md'].write_text(boilerplate)
        build_log.log(
            25, 'Works derived from this fMRIPrep execution should '
            'include the following boilerplate:\n\n%s', boilerplate)
    return retval
Ejemplo n.º 31
0
def build_workflow(opts, retval):
    from nipype import logging as nlogging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from niworkflows.reports import generate_reports
    from ..__about__ import __version__
    from time import strftime
    import uuid
    from ..workflows.base import init_base_wf

    build_log = nlogging.getLogger('nipype.workflow')

    INIT_MSG = """
    #     Running atlasTransform version {version}:
    #       * BIDS dataset path: {bids_dir}.
    #       * Participant list: {subject_list}.
    #       * Run identifier: {uuid}.
    #     """.format

    pass

    bids_dir = opts.bids_dir.resolve()
    output_dir = opts.output_dir.resolve()
    work_dir = opts.work_dir.resolve()

    retval['return_code'] = 1
    retval['workflow'] = None
    retval['bids_dir'] = str(bids_dir)
    retval['output_dir'] = str(output_dir)
    retval['work_dir'] = str(work_dir)

    if output_dir == bids_dir:
        build_log.error(
            'The selected output folder is the same as the input BIDS folder. '
            'Please modify the output path (suggestion: %s).', bids_dir /
            'derivatives' / ('atlasTransform-%s' % __version__.split('+')[0]))
        retval['return_code'] = 1
        return retval

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())
    retval['run_uuid'] = run_uuid

    # First check that bids_dir looks like a BIDS folder
    layout = BIDSLayout(str(bids_dir),
                        validate=False,
                        ignore=("code", "stimuli", "sourcedata", "models",
                                "derivatives", re.compile(r'^\.')))
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)
    retval['subject_list'] = subject_list

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        build_log.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)
    retval['plugin_settings'] = plugin_settings

    # Set up directories
    log_dir = output_dir / 'atlasTransform' / 'logs'
    # Check and create output and working directories
    output_dir.mkdir(exist_ok=True, parents=True)
    log_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': str(log_dir),
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': str(log_dir),
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    # Called with reports only
    if opts.reports_only:
        build_log.log(25, 'Running --reports-only on participants %s',
                      ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
            retval['run_uuid'] = run_uuid
        retval['return_code'] = generate_reports(subject_list,
                                                 output_dir,
                                                 work_dir,
                                                 run_uuid,
                                                 packagename='atlasTransform')
        return retval

    # Build main workflow
    build_log.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    retval['workflow'] = init_base_wf(
        opts=opts,
        layout=layout,
        run_uuid=run_uuid,
        subject_list=subject_list,
        work_dir=str(work_dir),
        output_dir=str(output_dir),
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'atlasTransform' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        citation_files = {
            ext: logs_path / ('CITATION.%s' % ext)
            for ext in ('bib', 'tex', 'md', 'html')
        }
        # To please git-annex users and also to guarantee consistency
        # among different renderings of the same file, first remove any
        # existing one
        for citation_file in citation_files.values():
            try:
                citation_file.unlink()
            except FileNotFoundError:
                pass

        citation_files['md'].write_text(boilerplate)
        build_log.log(
            25, 'Works derived from this atlasTransform execution should '
            'include the following boilerplate:\n\n%s', boilerplate)
    return retval