Example #1
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows smriprep to enforce
    a hard-limited memory-scope.

    """
    from shutil import copyfile
    from os import cpu_count
    import uuid
    from time import strftime
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf

    import json
    from bids import BIDSLayout
    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_smriprep_wf

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running sMRIPrep version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.

    {spaces}
    """.format

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = opts.bids_dir.resolve()
    layout = BIDSLayout(str(bids_dir), validate=False)
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)

    bids_filters = json.loads(
        opts.bids_filter_file.read_text()) if opts.bids_filter_file else None

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nprocs = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nprocs is None or opts.nprocs is not None:
        nprocs = opts.nprocs
        if nprocs is None or nprocs < 1:
            nprocs = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nprocs

    if opts.mem_gb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nprocs - 1 if nprocs > 1 else cpu_count(), 8)

    if 1 < nprocs < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'available CPUs (--nprocs/--ncpus=%d)', omp_nthreads, nprocs)

    # Set up directories
    output_dir = opts.output_dir.resolve()
    log_dir = output_dir / 'smriprep' / 'logs'
    work_dir = opts.work_dir.resolve()

    # Check and create output and working directories
    log_dir.mkdir(parents=True, exist_ok=True)
    work_dir.mkdir(parents=True, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': str(log_dir),
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': str(log_dir),
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['bids_dir'] = str(bids_dir)
    retval['output_dir'] = str(output_dir)
    retval['work_dir'] = str(work_dir)
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        from niworkflows.reports import generate_reports

        logger.log(25, 'Running --reports-only on participants %s',
                   ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list,
                                                 str(output_dir),
                                                 run_uuid,
                                                 packagename="smriprep")
        return retval

    logger.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid,
                 spaces=opts.output_spaces))

    # Build main workflow
    retval['workflow'] = init_smriprep_wf(
        debug=opts.sloppy,
        fast_track=opts.fast_track,
        freesurfer=opts.run_reconall,
        fs_subjects_dir=opts.fs_subjects_dir,
        hires=opts.hires,
        layout=layout,
        longitudinal=opts.longitudinal,
        low_mem=opts.low_mem,
        omp_nthreads=omp_nthreads,
        output_dir=str(output_dir),
        run_uuid=run_uuid,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        skull_strip_mode=opts.skull_strip_mode,
        skull_strip_template=opts.skull_strip_template[0],
        spaces=opts.output_spaces,
        subject_list=subject_list,
        work_dir=str(work_dir),
        bids_filters=bids_filters,
    )
    retval['return_code'] = 0

    boilerplate = retval['workflow'].visit_desc()
    (log_dir / 'CITATION.md').write_text(boilerplate)
    logger.log(
        25, 'Works derived from this sMRIPrep execution should '
        'include the following boilerplate:\n\n%s', boilerplate)

    # Generate HTML file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('smriprep',
              'data/boilerplate.bib'), '--filter', 'pandoc-citeproc',
        '--metadata', 'pagetitle="sMRIPrep citation boilerplate"',
        str(log_dir / 'CITATION.md'), '-o',
        str(log_dir / 'CITATION.html')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.html file:\n%s',
                       ' '.join(cmd))

    # Generate LaTex file resolving citations
    cmd = [
        'pandoc', '-s', '--bibliography',
        pkgrf('smriprep', 'data/boilerplate.bib'), '--natbib',
        str(log_dir / 'CITATION.md'), '-o',
        str(log_dir / 'CITATION.tex')
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning('Could not generate CITATION.tex file:\n%s',
                       ' '.join(cmd))
    else:
        copyfile(pkgrf('smriprep', 'data/boilerplate.bib'),
                 str(log_dir / 'CITATION.bib'))
    return retval
Example #2
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from bids import BIDSLayout

    from nipype import logging as nlogging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from niworkflows.reports import generate_reports
    from ..__about__ import __version__
    from ..workflows.base import init_fmriprep_wf

    build_log = nlogging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    bids_dir = opts.bids_dir.resolve()
    output_dir = opts.output_dir.resolve()
    work_dir = opts.work_dir.resolve()

    retval['return_code'] = 1
    retval['workflow'] = None
    retval['bids_dir'] = str(bids_dir)
    retval['output_dir'] = str(output_dir)
    retval['work_dir'] = str(work_dir)

    if output_dir == bids_dir:
        build_log.error(
            'The selected output folder is the same as the input BIDS folder. '
            'Please modify the output path (suggestion: %s).', bids_dir /
            'derivatives' / ('fmriprep-%s' % __version__.split('+')[0]))
        retval['return_code'] = 1
        return retval

    output_spaces = parse_spaces(opts)

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())
    retval['run_uuid'] = run_uuid

    # First check that bids_dir looks like a BIDS folder
    layout = BIDSLayout(str(bids_dir),
                        validate=False,
                        ignore=("code", "stimuli", "sourcedata", "models",
                                "derivatives", re.compile(r'^\.')))
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)
    retval['subject_list'] = subject_list

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        build_log.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)
    retval['plugin_settings'] = plugin_settings

    # Set up directories
    log_dir = output_dir / 'fmriprep' / 'logs'
    # Check and create output and working directories
    output_dir.mkdir(exist_ok=True, parents=True)
    log_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': str(log_dir),
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': str(log_dir),
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    # Called with reports only
    if opts.reports_only:
        build_log.log(25, 'Running --reports-only on participants %s',
                      ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
            retval['run_uuid'] = run_uuid
        retval['return_code'] = generate_reports(subject_list,
                                                 output_dir,
                                                 work_dir,
                                                 run_uuid,
                                                 packagename='fmriprep')
        return retval

    # Build main workflow
    build_log.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    retval['workflow'] = init_fmriprep_wf(
        anat_only=opts.anat_only,
        aroma_melodic_dim=opts.aroma_melodic_dimensionality,
        bold2t1w_dof=opts.bold2t1w_dof,
        cifti_output=opts.cifti_output,
        debug=opts.sloppy,
        dummy_scans=opts.dummy_scans,
        echo_idx=opts.echo_idx,
        err_on_aroma_warn=opts.error_on_aroma_warnings,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        force_syn=opts.force_syn,
        freesurfer=opts.run_reconall,
        hires=opts.hires,
        ignore=opts.ignore,
        layout=layout,
        longitudinal=opts.longitudinal,
        low_mem=opts.low_mem,
        medial_surface_nan=opts.medial_surface_nan,
        omp_nthreads=omp_nthreads,
        output_dir=str(output_dir),
        output_spaces=output_spaces,
        run_uuid=run_uuid,
        regressors_all_comps=opts.return_all_components,
        regressors_fd_th=opts.fd_spike_threshold,
        regressors_dvars_th=opts.dvars_spike_threshold,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        skull_strip_template=opts.skull_strip_template,
        subject_list=subject_list,
        t2s_coreg=opts.t2s_coreg,
        task_id=opts.task_id,
        use_aroma=opts.use_aroma,
        use_bbr=opts.use_bbr,
        use_syn=opts.use_syn_sdc,
        work_dir=str(work_dir),
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'fmriprep' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        citation_files = {
            ext: logs_path / ('CITATION.%s' % ext)
            for ext in ('bib', 'tex', 'md', 'html')
        }
        # To please git-annex users and also to guarantee consistency
        # among different renderings of the same file, first remove any
        # existing one
        for citation_file in citation_files.values():
            try:
                citation_file.unlink()
            except FileNotFoundError:
                pass

        citation_files['md'].write_text(boilerplate)
        build_log.log(
            25, 'Works derived from this fMRIPrep execution should '
            'include the following boilerplate:\n\n%s', boilerplate)
    return retval
Example #3
0
def build_workflow(config_file, retval):
    """Create the Nipype Workflow that supports the whole execution graph."""
    from niworkflows.utils.bids import collect_participants, check_pipeline_version
    from niworkflows.utils.misc import check_valid_fs_license
    from niworkflows.reports import generate_reports
    from .. import config
    from ..utils.misc import check_deps
    from ..workflows.base import init_fmriprep_wf

    config.load(config_file)
    build_log = config.loggers.workflow

    output_dir = config.execution.output_dir
    version = config.environment.version

    retval["return_code"] = 1
    retval["workflow"] = None

    # warn if older results exist: check for dataset_description.json in output folder
    msg = check_pipeline_version(
        version, output_dir / "fmriprep-rodents" / "dataset_description.json")
    if msg is not None:
        build_log.warning(msg)

    # Please note this is the input folder's dataset_description.json
    dset_desc_path = config.execution.bids_dir / "dataset_description.json"
    if dset_desc_path.exists():
        from hashlib import sha256

        desc_content = dset_desc_path.read_bytes()
        config.execution.bids_description_hash = sha256(
            desc_content).hexdigest()

    # First check that bids_dir looks like a BIDS folder
    subject_list = collect_participants(
        config.execution.layout,
        participant_label=config.execution.participant_label)

    # Called with reports only
    if config.execution.reports_only:
        from pkg_resources import resource_filename as pkgrf

        build_log.log(25, "Running --reports-only on participants %s",
                      ", ".join(subject_list))
        retval["return_code"] = generate_reports(
            subject_list,
            config.execution.output_dir,
            config.execution.run_uuid,
            config=pkgrf("fmriprep_rodents", "data/reports-spec.yml"),
            packagename="fmriprep-rodents",
        )
        return retval

    # Build main workflow
    init_msg = f"""
    Running fMRIPREP version {config.environment.version}:
      * BIDS dataset path: {config.execution.bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {config.execution.run_uuid}.
      * Output spaces: {config.execution.output_spaces}."""

    if config.execution.anat_derivatives:
        init_msg += f"""
      * Anatomical derivatives: {config.execution.anat_derivatives}."""

    if config.execution.fs_subjects_dir:
        init_msg += f"""
      * Pre-run FreeSurfer's SUBJECTS_DIR: {config.execution.fs_subjects_dir}."""
    build_log.log(25, init_msg)

    retval["workflow"] = init_fmriprep_wf()

    # Check for FS license after building the workflow
    if not check_valid_fs_license():
        build_log.critical("""\
ERROR: a valid license file is required for FreeSurfer to run. fMRIPrep looked for an existing \
license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \
2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \
(for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html"""
                           )
        retval["return_code"] = 126  # 126 == Command invoked cannot execute.
        return retval

    # Check workflow for missing commands
    missing = check_deps(retval["workflow"])
    if missing:
        build_log.critical(
            "Cannot run fMRIPrep. Missing dependencies:%s",
            "\n\t* ".join(
                [""] +
                [f"{cmd} (Interface: {iface})" for iface, cmd in missing]),
        )
        retval["return_code"] = 127  # 127 == command not found.
        return retval

    config.to_filename(config_file)
    build_log.info(
        "fMRIPrep workflow graph with %d nodes built successfully.",
        len(retval["workflow"]._get_all_nodes()),
    )
    retval["return_code"] = 0
    return retval
Example #4
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf
    from shutil import copyfile

    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_fmriprep_wf
    from ..viz.reports import generate_reports

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    output_spaces = opts.output_space or []

    # Validity of some inputs
    # ERROR check if use_aroma was specified, but the correct template was not
    if opts.use_aroma and (opts.template != 'MNI152NLin2009cAsym'
                           or 'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--use-aroma" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").')

    if opts.cifti_output and (opts.template != 'MNI152NLin2009cAsym'
                              or 'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--cifti-output" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").')

    # Check output_space
    if 'template' not in output_spaces and (opts.use_syn_sdc
                                            or opts.force_syn):
        msg = [
            'SyN SDC correction requires T1 to MNI registration, but '
            '"template" is not specified in "--output-space" arguments.',
            'Option --use-syn will be cowardly dismissed.'
        ]
        if opts.force_syn:
            output_spaces.append('template')
            msg[1] = (
                ' Since --force-syn has been requested, "template" has been added to'
                ' the "--output-space" list.')
        logger.warning(' '.join(msg))

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = os.path.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir or 'work')  # Set work/ as default

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir':
            log_dir,
            'crashfile_format':
            'txt',
            'get_linked_libs':
            False,
            'stop_on_first_crash':
            opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['bids_dir'] = bids_dir
    retval['output_dir'] = output_dir
    retval['work_dir'] = work_dir
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s',
                   ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list, output_dir,
                                                 work_dir, run_uuid)
        return retval

    # Build main workflow
    logger.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    template_out_grid = opts.template_resampling_grid
    if opts.output_grid_reference is not None:
        logger.warning(
            'Option --output-grid-reference is deprecated, please use '
            '--template-resampling-grid')
        template_out_grid = template_out_grid or opts.output_grid_reference
    if opts.debug:
        logger.warning('Option --debug is deprecated and has no effect')

    retval['workflow'] = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        echo_idx=opts.echo_idx,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.sloppy,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        t2s_coreg=opts.t2s_coreg,
        omp_nthreads=omp_nthreads,
        skull_strip_template=opts.skull_strip_template,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.run_reconall,
        output_spaces=output_spaces,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        cifti_output=opts.cifti_output,
        template_out_grid=template_out_grid,
        hires=opts.hires,
        use_bbr=opts.use_bbr,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        aroma_melodic_dim=opts.aroma_melodic_dimensionality,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'fmriprep' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        (logs_path / 'CITATION.md').write_text(boilerplate)
        logger.log(
            25, 'Works derived from this fMRIPrep execution should '
            'include the following boilerplate:\n\n%s', boilerplate)

        # Generate HTML file resolving citations
        cmd = [
            'pandoc', '-s', '--bibliography',
            pkgrf('fmriprep',
                  'data/boilerplate.bib'), '--filter', 'pandoc-citeproc',
            '--metadata', 'pagetitle="fMRIPrep citation boilerplate"',
            str(logs_path / 'CITATION.md'), '-o',
            str(logs_path / 'CITATION.html')
        ]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.html file:\n%s',
                           ' '.join(cmd))

        # Generate LaTex file resolving citations
        cmd = [
            'pandoc', '-s', '--bibliography',
            pkgrf('fmriprep', 'data/boilerplate.bib'), '--natbib',
            str(logs_path / 'CITATION.md'), '-o',
            str(logs_path / 'CITATION.tex')
        ]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.tex file:\n%s',
                           ' '.join(cmd))
        else:
            copyfile(pkgrf('fmriprep', 'data/boilerplate.bib'),
                     (logs_path / 'CITATION.bib'))

    return retval
Example #5
0
def build_workflow(config_file, retval):
    """Create the Nipype Workflow that supports the whole execution graph."""
    from niworkflows.utils.bids import collect_participants, check_pipeline_version
    from niworkflows.reports import generate_reports
    from .. import config
    from ..utils.misc import check_deps
    from ..workflows.base import init_fmriprep_wf

    config.load(config_file)
    build_log = config.loggers.workflow

    output_dir = config.execution.output_dir
    version = config.environment.version

    retval["return_code"] = 1
    retval["workflow"] = None

    # warn if older results exist: check for dataset_description.json in output folder
    msg = check_pipeline_version(
        version, output_dir / "fmriprep" / "dataset_description.json")
    if msg is not None:
        build_log.warning(msg)

    # Please note this is the input folder's dataset_description.json
    dset_desc_path = config.execution.bids_dir / "dataset_description.json"
    if dset_desc_path.exists():
        from hashlib import sha256

        desc_content = dset_desc_path.read_bytes()
        config.execution.bids_description_hash = sha256(
            desc_content).hexdigest()

    # First check that bids_dir looks like a BIDS folder
    subject_list = collect_participants(
        config.execution.layout,
        participant_label=config.execution.participant_label)

    # Called with reports only
    if config.execution.reports_only:
        from pkg_resources import resource_filename as pkgrf

        build_log.log(25, "Running --reports-only on participants %s",
                      ", ".join(subject_list))
        retval["return_code"] = generate_reports(
            subject_list,
            config.execution.output_dir,
            config.execution.run_uuid,
            config=pkgrf("fmriprep", "data/reports-spec.yml"),
            packagename="fmriprep",
        )
        return retval

    # Build main workflow
    init_msg = f"""
    Running fMRIPREP version {config.environment.version}:
      * BIDS dataset path: {config.execution.bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {config.execution.run_uuid}.
      * Output spaces: {config.execution.output_spaces}."""

    if config.execution.anat_derivatives:
        init_msg += f"""
      * Anatomical derivatives: {config.execution.anat_derivatives}."""

    if config.execution.fs_subjects_dir:
        init_msg += f"""
      * Pre-run FreeSurfer's SUBJECTS_DIR: {config.execution.fs_subjects_dir}."""
    build_log.log(25, init_msg)

    retval["workflow"] = init_fmriprep_wf()

    # Check workflow for missing commands
    missing = check_deps(retval["workflow"])
    if missing:
        build_log.critical(
            "Cannot run fMRIPrep. Missing dependencies:%s",
            "\n\t* ".join(
                [""] +
                [f"{cmd} (Interface: {iface})" for iface, cmd in missing]),
        )
        retval["return_code"] = 127  # 127 == command not found.
        return retval

    config.to_filename(config_file)
    build_log.info(
        "fMRIPrep workflow graph with %d nodes built successfully.",
        len(retval["workflow"]._get_all_nodes()),
    )
    retval["return_code"] = 0
    return retval
Example #6
0
def build_workflow(config_file, retval):
    """Create the Nipype Workflow that supports the whole execution graph."""
    from niworkflows.utils.bids import collect_participants, check_pipeline_version
    from niworkflows.reports import generate_reports
    from .. import config
    from ..utils.misc import check_deps
    from ..workflows.base import init_fmriprep_wf

    config.load(config_file)
    build_log = config.loggers.workflow

    output_dir = config.execution.output_dir
    version = config.environment.version

    retval['return_code'] = 1
    retval['workflow'] = None

    # warn if older results exist: check for dataset_description.json in output folder
    msg = check_pipeline_version(
        version, output_dir / 'fmriprep' / 'dataset_description.json')
    if msg is not None:
        build_log.warning(msg)

    # Please note this is the input folder's dataset_description.json
    dset_desc_path = config.execution.bids_dir / 'dataset_description.json'
    if dset_desc_path.exists():
        from hashlib import sha256
        desc_content = dset_desc_path.read_bytes()
        config.execution.bids_description_hash = sha256(
            desc_content).hexdigest()

    # First check that bids_dir looks like a BIDS folder
    subject_list = collect_participants(
        config.execution.layout,
        participant_label=config.execution.participant_label)

    # Called with reports only
    if config.execution.reports_only:
        from pkg_resources import resource_filename as pkgrf

        build_log.log(25, 'Running --reports-only on participants %s',
                      ', '.join(subject_list))
        retval['return_code'] = generate_reports(subject_list,
                                                 config.execution.output_dir,
                                                 config.execution.work_dir,
                                                 config.execution.run_uuid,
                                                 config=pkgrf(
                                                     'fmriprep',
                                                     'data/reports-spec.yml'),
                                                 packagename='fmriprep')
        return retval

    # Build main workflow
    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
      * Output spaces: {spaces}.
    """.format
    build_log.log(
        25,
        INIT_MSG(version=config.environment.version,
                 bids_dir=config.execution.bids_dir,
                 subject_list=subject_list,
                 uuid=config.execution.run_uuid,
                 spaces=config.execution.output_spaces))

    retval['workflow'] = init_fmriprep_wf()

    # Check workflow for missing commands
    missing = check_deps(retval['workflow'])
    if missing:
        build_log.critical(
            "Cannot run fMRIPrep. Missing dependencies:%s", '\n\t* %s'.join([
                "{} (Interface: {})".format(cmd, iface)
                for iface, cmd in missing
            ]))
        retval['return_code'] = 127  # 127 == command not found.
        return retval

    config.to_filename(config_file)
    build_log.info("fMRIPrep workflow graph with %d nodes built successfully.",
                   len(retval['workflow']._get_all_nodes()))
    retval['return_code'] = 0
    return retval
Example #7
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf
    from shutil import copyfile

    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_fmriprep_wf
    from ..viz.reports import generate_reports

    logger = logging.getLogger('nipype.workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    output_spaces = opts.output_space or []

    # Validity of some inputs
    # ERROR check if use_aroma was specified, but the correct template was not
    if opts.use_aroma and (opts.template != 'MNI152NLin2009cAsym' or
                           'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--use-aroma" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").'
        )

    if opts.cifti_output and (opts.template != 'MNI152NLin2009cAsym' or
                              'template' not in output_spaces):
        output_spaces.append('template')
        logger.warning(
            'Option "--cifti-output" requires functional images to be resampled to MNI space. '
            'The argument "template" has been automatically added to the list of output '
            'spaces (option "--output-space").'
        )

    # Check output_space
    if 'template' not in output_spaces and (opts.use_syn_sdc or opts.force_syn):
        msg = ['SyN SDC correction requires T1 to MNI registration, but '
               '"template" is not specified in "--output-space" arguments.',
               'Option --use-syn will be cowardly dismissed.']
        if opts.force_syn:
            output_spaces.append('template')
            msg[1] = (' Since --force-syn has been requested, "template" has been added to'
                      ' the "--output-space" list.')
        logger.warning(' '.join(msg))

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = os.path.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir or 'work')  # Set work/ as default

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['bids_dir'] = bids_dir
    retval['output_dir'] = output_dir
    retval['work_dir'] = work_dir
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s', ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list, output_dir, work_dir, run_uuid)
        return retval

    # Build main workflow
    logger.log(25, INIT_MSG(
        version=__version__,
        bids_dir=bids_dir,
        subject_list=subject_list,
        uuid=run_uuid)
    )

    template_out_grid = opts.template_resampling_grid
    if opts.output_grid_reference is not None:
        logger.warning(
            'Option --output-grid-reference is deprecated, please use '
            '--template-resampling-grid')
        template_out_grid = template_out_grid or opts.output_grid_reference
    if opts.debug:
        logger.warning('Option --debug is deprecated and has no effect')

    retval['workflow'] = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        echo_idx=opts.echo_idx,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.sloppy,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        t2s_coreg=opts.t2s_coreg,
        omp_nthreads=omp_nthreads,
        skull_strip_template=opts.skull_strip_template,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.run_reconall,
        output_spaces=output_spaces,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        cifti_output=opts.cifti_output,
        template_out_grid=template_out_grid,
        hires=opts.hires,
        use_bbr=opts.use_bbr,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        aroma_melodic_dim=opts.aroma_melodic_dimensionality,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'fmriprep' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        (logs_path / 'CITATION.md').write_text(boilerplate)
        logger.log(25, 'Works derived from this fMRIPrep execution should '
                   'include the following boilerplate:\n\n%s', boilerplate)

        # Generate HTML file resolving citations
        cmd = ['pandoc', '-s', '--bibliography',
               pkgrf('fmriprep', 'data/boilerplate.bib'),
               '--filter', 'pandoc-citeproc',
               str(logs_path / 'CITATION.md'),
               '-o', str(logs_path / 'CITATION.html')]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.html file:\n%s',
                           ' '.join(cmd))

        # Generate LaTex file resolving citations
        cmd = ['pandoc', '-s', '--bibliography',
               pkgrf('fmriprep', 'data/boilerplate.bib'),
               '--natbib', str(logs_path / 'CITATION.md'),
               '-o', str(logs_path / 'CITATION.tex')]
        try:
            check_call(cmd, timeout=10)
        except (FileNotFoundError, CalledProcessError, TimeoutExpired):
            logger.warning('Could not generate CITATION.tex file:\n%s',
                           ' '.join(cmd))
        else:
            copyfile(pkgrf('fmriprep', 'data/boilerplate.bib'),
                     (logs_path / 'CITATION.bib'))

    return retval
Example #8
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows smriprep to enforce
    a hard-limited memory-scope.

    """
    from shutil import copyfile
    from os import cpu_count
    import uuid
    from time import strftime
    from subprocess import check_call, CalledProcessError, TimeoutExpired
    from pkg_resources import resource_filename as pkgrf

    import json
    from bids import BIDSLayout
    from nipype import logging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from ..__about__ import __version__
    from ..workflows.base import init_smriprep_wf

    logger = logging.getLogger("nipype.workflow")

    INIT_MSG = """
    Running sMRIPrep version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.

    {spaces}
    """.format

    # Set up some instrumental utilities
    run_uuid = "%s_%s" % (strftime("%Y%m%d-%H%M%S"), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = opts.bids_dir.resolve()
    layout = BIDSLayout(str(bids_dir), validate=False)
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)

    bids_filters = (json.loads(opts.bids_filter_file.read_text())
                    if opts.bids_filter_file else None)

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml

        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault("plugin_args", {})
    else:
        # Defaults
        plugin_settings = {
            "plugin": "MultiProc",
            "plugin_args": {
                "raise_insufficient": False,
                "maxtasksperchild": 1,
            },
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nprocs = plugin_settings["plugin_args"].get("n_procs")
    # Permit overriding plugin config with specific CLI options
    if nprocs is None or opts.nprocs is not None:
        nprocs = opts.nprocs
        if nprocs is None or nprocs < 1:
            nprocs = cpu_count()
        plugin_settings["plugin_args"]["n_procs"] = nprocs

    if opts.mem_gb:
        plugin_settings["plugin_args"]["memory_gb"] = opts.mem_gb

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nprocs - 1 if nprocs > 1 else cpu_count(), 8)

    if 1 < nprocs < omp_nthreads:
        logger.warning(
            "Per-process threads (--omp-nthreads=%d) exceed total "
            "available CPUs (--nprocs/--ncpus=%d)",
            omp_nthreads,
            nprocs,
        )

    # Set up directories
    output_dir = opts.output_dir.resolve()
    log_dir = output_dir / "smriprep" / "logs"
    work_dir = opts.work_dir.resolve()

    # Check and create output and working directories
    log_dir.mkdir(parents=True, exist_ok=True)
    work_dir.mkdir(parents=True, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        "logging": {
            "log_directory": str(log_dir),
            "log_to_file": True
        },
        "execution": {
            "crashdump_dir": str(log_dir),
            "crashfile_format": "txt",
            "get_linked_libs": False,
            "stop_on_first_crash": opts.stop_on_first_crash,
        },
        "monitoring": {
            "enabled": opts.resource_monitor,
            "sample_frequency": "0.5",
            "summary_append": True,
        },
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval["return_code"] = 0
    retval["plugin_settings"] = plugin_settings
    retval["bids_dir"] = str(bids_dir)
    retval["output_dir"] = str(output_dir)
    retval["work_dir"] = str(work_dir)
    retval["subject_list"] = subject_list
    retval["run_uuid"] = run_uuid
    retval["workflow"] = None

    # Called with reports only
    if opts.reports_only:
        from niworkflows.reports import generate_reports

        logger.log(25, "Running --reports-only on participants %s",
                   ", ".join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval["return_code"] = generate_reports(subject_list,
                                                 str(output_dir),
                                                 run_uuid,
                                                 packagename="smriprep")
        return retval

    output_spaces = opts.output_spaces
    if not output_spaces.is_cached():
        output_spaces.checkpoint()

    logger.log(
        25,
        INIT_MSG(
            version=__version__,
            bids_dir=bids_dir,
            subject_list=subject_list,
            uuid=run_uuid,
            spaces=output_spaces,
        ),
    )

    # Build main workflow
    retval["workflow"] = init_smriprep_wf(
        debug=opts.sloppy,
        fast_track=opts.fast_track,
        freesurfer=opts.run_reconall,
        fs_subjects_dir=opts.fs_subjects_dir,
        hires=opts.hires,
        layout=layout,
        longitudinal=opts.longitudinal,
        low_mem=opts.low_mem,
        omp_nthreads=omp_nthreads,
        output_dir=str(output_dir),
        run_uuid=run_uuid,
        skull_strip_fixed_seed=opts.skull_strip_fixed_seed,
        skull_strip_mode=opts.skull_strip_mode,
        skull_strip_template=opts.skull_strip_template[0],
        spaces=output_spaces,
        subject_list=subject_list,
        work_dir=str(work_dir),
        bids_filters=bids_filters,
    )
    retval["return_code"] = 0

    boilerplate = retval["workflow"].visit_desc()
    (log_dir / "CITATION.md").write_text(boilerplate)
    logger.log(
        25,
        "Works derived from this sMRIPrep execution should "
        "include the following boilerplate:\n\n%s",
        boilerplate,
    )

    # Generate HTML file resolving citations
    cmd = [
        "pandoc",
        "-s",
        "--bibliography",
        pkgrf("smriprep", "data/boilerplate.bib"),
        "--citeproc",
        "--metadata",
        'pagetitle="sMRIPrep citation boilerplate"',
        str(log_dir / "CITATION.md"),
        "-o",
        str(log_dir / "CITATION.html"),
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning("Could not generate CITATION.html file:\n%s",
                       " ".join(cmd))

    # Generate LaTex file resolving citations
    cmd = [
        "pandoc",
        "-s",
        "--bibliography",
        pkgrf("smriprep", "data/boilerplate.bib"),
        "--natbib",
        str(log_dir / "CITATION.md"),
        "-o",
        str(log_dir / "CITATION.tex"),
    ]
    try:
        check_call(cmd, timeout=10)
    except (FileNotFoundError, CalledProcessError, TimeoutExpired):
        logger.warning("Could not generate CITATION.tex file:\n%s",
                       " ".join(cmd))
    else:
        copyfile(pkgrf("smriprep", "data/boilerplate.bib"),
                 str(log_dir / "CITATION.bib"))
    return retval
Example #9
0
def build_workflow(opts, retval):
    from nipype import logging as nlogging, config as ncfg
    from niworkflows.utils.bids import collect_participants
    from niworkflows.reports import generate_reports
    from ..__about__ import __version__
    from time import strftime
    import uuid
    from ..workflows.base import init_base_wf

    build_log = nlogging.getLogger('nipype.workflow')

    INIT_MSG = """
    #     Running atlasTransform version {version}:
    #       * BIDS dataset path: {bids_dir}.
    #       * Participant list: {subject_list}.
    #       * Run identifier: {uuid}.
    #     """.format

    pass

    bids_dir = opts.bids_dir.resolve()
    output_dir = opts.output_dir.resolve()
    work_dir = opts.work_dir.resolve()

    retval['return_code'] = 1
    retval['workflow'] = None
    retval['bids_dir'] = str(bids_dir)
    retval['output_dir'] = str(output_dir)
    retval['work_dir'] = str(work_dir)

    if output_dir == bids_dir:
        build_log.error(
            'The selected output folder is the same as the input BIDS folder. '
            'Please modify the output path (suggestion: %s).', bids_dir /
            'derivatives' / ('atlasTransform-%s' % __version__.split('+')[0]))
        retval['return_code'] = 1
        return retval

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())
    retval['run_uuid'] = run_uuid

    # First check that bids_dir looks like a BIDS folder
    layout = BIDSLayout(str(bids_dir),
                        validate=False,
                        ignore=("code", "stimuli", "sourcedata", "models",
                                "derivatives", re.compile(r'^\.')))
    subject_list = collect_participants(
        layout, participant_label=opts.participant_label)
    retval['subject_list'] = subject_list

    # Load base plugin_settings from file if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
        plugin_settings.setdefault('plugin_args', {})
    else:
        # Defaults
        plugin_settings = {
            'plugin': 'MultiProc',
            'plugin_args': {
                'raise_insufficient': False,
                'maxtasksperchild': 1,
            }
        }

    # Resource management options
    # Note that we're making strong assumptions about valid plugin args
    # This may need to be revisited if people try to use batch plugins
    nthreads = plugin_settings['plugin_args'].get('n_procs')
    # Permit overriding plugin config with specific CLI options
    if nthreads is None or opts.nthreads is not None:
        nthreads = opts.nthreads
        if nthreads is None or nthreads < 1:
            nthreads = cpu_count()
        plugin_settings['plugin_args']['n_procs'] = nthreads

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        build_log.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)
    retval['plugin_settings'] = plugin_settings

    # Set up directories
    log_dir = output_dir / 'atlasTransform' / 'logs'
    # Check and create output and working directories
    output_dir.mkdir(exist_ok=True, parents=True)
    log_dir.mkdir(exist_ok=True, parents=True)
    work_dir.mkdir(exist_ok=True, parents=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': str(log_dir),
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': str(log_dir),
            'crashfile_format': 'txt',
            'get_linked_libs': False,
            'stop_on_first_crash': opts.stop_on_first_crash,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    # Called with reports only
    if opts.reports_only:
        build_log.log(25, 'Running --reports-only on participants %s',
                      ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
            retval['run_uuid'] = run_uuid
        retval['return_code'] = generate_reports(subject_list,
                                                 output_dir,
                                                 work_dir,
                                                 run_uuid,
                                                 packagename='atlasTransform')
        return retval

    # Build main workflow
    build_log.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    retval['workflow'] = init_base_wf(
        opts=opts,
        layout=layout,
        run_uuid=run_uuid,
        subject_list=subject_list,
        work_dir=str(work_dir),
        output_dir=str(output_dir),
    )
    retval['return_code'] = 0

    logs_path = Path(output_dir) / 'atlasTransform' / 'logs'
    boilerplate = retval['workflow'].visit_desc()

    if boilerplate:
        citation_files = {
            ext: logs_path / ('CITATION.%s' % ext)
            for ext in ('bib', 'tex', 'md', 'html')
        }
        # To please git-annex users and also to guarantee consistency
        # among different renderings of the same file, first remove any
        # existing one
        for citation_file in citation_files.values():
            try:
                citation_file.unlink()
            except FileNotFoundError:
                pass

        citation_files['md'].write_text(boilerplate)
        build_log.log(
            25, 'Works derived from this atlasTransform execution should '
            'include the following boilerplate:\n\n%s', boilerplate)
    return retval