Example #1
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))
    print(log_level)

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts.stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' %
                           ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt'
        },
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('filemanip').setLevel(log_level)

    callback_log_path = None
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label,
            settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod,
                            settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod,
                                          settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                if plugin_settings['plugin'] == 'MultiProc' and opts.profile:
                    import logging
                    from niworkflows.nipype.pipeline.plugins.callback_log import log_nodes_cb
                    plugin_settings['plugin_args'][
                        'status_callback'] = log_nodes_cb
                    callback_log_path = op.join(log_dir, 'run_stats.log')
                    logger = logging.getLogger('callback')
                    logger.setLevel(logging.DEBUG)
                    handler = logging.FileHandler(callback_log_path)
                    logger.addHandler(handler)

                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning('Anonymized quality metrics will be submitted'
                                ' to MRIQC\'s metrics repository.'
                                ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                if callback_log_path is not None:
                    from niworkflows.nipype.utils.draw_gantt_chart import generate_gantt_chart
                    generate_gantt_chart(callback_log_path,
                                         cores=settings['n_procs'])
        else:
            msg = """\
Error reading BIDS directory ({}), or the dataset is not \
BIDS-compliant."""
            if opts.participant_label is not None:
                msg = """\
None of the supplied labels (--participant_label) matched with the \
participants found in the BIDS directory ({})."""
            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')

    # Set up group level
    if 'group' in analysis_levels:
        from ..reports import group_html
        from ..utils.misc import generate_csv  # , generate_pred

        log.info('Group level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label,
            settings)

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            log.info('Summary CSV table for the %s data generated (%s)', mod,
                     out_csv)

            # out_pred = generate_pred(derivatives_dir, settings['output_dir'], mod)
            # if out_pred is not None:
            #     log.info('Predicted QA CSV table for the %s data generated (%s)',
            #                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv,
                       mod,
                       csv_failed=op.join(settings['output_dir'],
                                          'failed_' + mod + '.csv'),
                       out_file=out_html)
            log.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception(
                "No data found. No group level reports were generated.")

        log.info('Group level finished successfully.')
Example #2
0
def build_workflow(opts, retval):
    """
    Create the Nipype Workflow that supports the whole execution
    graph, given the inputs.

    All the checks and the construction of the workflow are done
    inside this function that has pickleable inputs and output
    dictionary (``retval``) to allow isolation using a
    ``multiprocessing.Process`` that allows fmriprep to enforce
    a hard-limited memory-scope.

    """
    from niworkflows.nipype import logging, config as ncfg
    from ..info import __version__
    from ..workflows.base import init_fmriprep_wf
    from ..utils.bids import collect_participants
    from ..viz.reports import generate_reports

    logger = logging.getLogger('workflow')

    INIT_MSG = """
    Running fMRIPREP version {version}:
      * BIDS dataset path: {bids_dir}.
      * Participant list: {subject_list}.
      * Run identifier: {uuid}.
    """.format

    # Validity of some inputs
    # ERROR check if use_aroma was specified, but the correct template was not
    if opts.use_aroma and (opts.template != 'MNI152NLin2009cAsym'
                           or 'template' not in opts.output_space):
        raise RuntimeError(
            'ERROR: --use-aroma requires functional images to be resampled to '
            'MNI152NLin2009cAsym.\n'
            '\t--template must be set to "MNI152NLin2009cAsym" (was: "{}")\n'
            '\t--output-space list must include "template" (was: "{}")'.format(
                opts.template, ' '.join(opts.output_space)))
    # Check output_space
    if 'template' not in opts.output_space and (opts.use_syn_sdc
                                                or opts.force_syn):
        msg = ('SyN SDC correction requires T1 to MNI registration, but '
               '"template" is not specified in "--output-space" arguments')
        if opts.force_syn:
            raise RuntimeError(msg)
        logger.warning(msg)

    # Set up some instrumental utilities
    run_uuid = '%s_%s' % (strftime('%Y%m%d-%H%M%S'), uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = op.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Setting up MultiProc
    nthreads = opts.nthreads
    if nthreads < 1:
        nthreads = cpu_count()

    plugin_settings = {
        'plugin': 'MultiProc',
        'plugin_args': {
            'n_procs': nthreads,
            'raise_insufficient': False,
            'maxtasksperchild': 1,
        }
    }

    if opts.mem_mb:
        plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    # Overload plugin_settings if --use-plugin
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        logger.warning(
            'Per-process threads (--omp-nthreads=%d) exceed total '
            'threads (--nthreads/--n_cpus=%d)', omp_nthreads, nthreads)

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir or 'work')  # Set work/ as default

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir':
            log_dir,
            'crashfile_format':
            'txt',
            'get_linked_libs':
            False,
            'stop_on_first_crash':
            opts.stop_on_first_crash or opts.work_dir is None,
        },
        'monitoring': {
            'enabled': opts.resource_monitor,
            'sample_frequency': '0.5',
            'summary_append': True,
        }
    })

    if opts.resource_monitor:
        ncfg.enable_resource_monitor()

    retval['return_code'] = 0
    retval['plugin_settings'] = plugin_settings
    retval['output_dir'] = output_dir
    retval['work_dir'] = work_dir
    retval['subject_list'] = subject_list
    retval['run_uuid'] = run_uuid
    retval['workflow'] = None

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s',
                   ', '.join(subject_list))
        if opts.run_uuid is not None:
            run_uuid = opts.run_uuid
        retval['return_code'] = generate_reports(subject_list, output_dir,
                                                 work_dir, run_uuid)
        return retval

    # Build main workflow
    logger.log(
        25,
        INIT_MSG(version=__version__,
                 bids_dir=bids_dir,
                 subject_list=subject_list,
                 uuid=run_uuid))

    retval['workflow'] = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.debug,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        t2s_coreg=opts.t2s_coreg,
        omp_nthreads=omp_nthreads,
        skull_strip_template=opts.skull_strip_template,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.run_reconall,
        output_spaces=opts.output_space,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        output_grid_ref=opts.output_grid_reference,
        hires=opts.hires,
        use_bbr=opts.use_bbr,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )
    retval['return_code'] = 0
    return retval
Example #3
0
def create_workflow(opts):
    """Build workflow"""
    from niworkflows.nipype import config as ncfg
    from ..viz.reports import run_reports
    from ..workflows.base import init_fmriprep_wf
    from ..utils.bids import collect_participants

    # Set up some instrumental utilities
    errno = 0
    run_uuid = strftime('%Y%m%d-%H%M%S_') + str(uuid.uuid4())

    # First check that bids_dir looks like a BIDS folder
    bids_dir = op.abspath(opts.bids_dir)
    subject_list = collect_participants(
        bids_dir, participant_label=opts.participant_label)

    # Nipype plugin configuration
    plugin_settings = {'plugin': 'Linear'}
    nthreads = opts.nthreads
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as f:
            plugin_settings = loadyml(f)
    else:
        # Setup multiprocessing
        nthreads = opts.nthreads
        if nthreads == 0:
            nthreads = cpu_count()

        if nthreads > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': nthreads}
            if opts.mem_mb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_mb / 1024

    omp_nthreads = opts.omp_nthreads
    if omp_nthreads == 0:
        omp_nthreads = min(nthreads - 1 if nthreads > 1 else cpu_count(), 8)

    if 1 < nthreads < omp_nthreads:
        raise RuntimeError(
            'Per-process threads (--omp-nthreads={:d}) cannot exceed total '
            'threads (--nthreads/--n_cpus={:d})'.format(omp_nthreads, nthreads))

    # Set up directories
    output_dir = op.abspath(opts.output_dir)
    log_dir = op.join(output_dir, 'fmriprep', 'logs')
    work_dir = op.abspath(opts.work_dir)

    # Check and create output and working directories
    os.makedirs(output_dir, exist_ok=True)
    os.makedirs(log_dir, exist_ok=True)
    os.makedirs(work_dir, exist_ok=True)

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt'},
    })

    # Called with reports only
    if opts.reports_only:
        logger.log(25, 'Running --reports-only on participants %s', ', '.join(subject_list))
        report_errors = [
            run_reports(op.join(work_dir, 'reportlets'), output_dir, subject_label,
                        run_uuid=run_uuid)
            for subject_label in subject_list]
        sys.exit(int(sum(report_errors) > 0))

    # Build main workflow
    logger.log(25, INIT_MSG(
        version=__version__,
        subject_list=subject_list,
        uuid=run_uuid)
    )

    fmriprep_wf = init_fmriprep_wf(
        subject_list=subject_list,
        task_id=opts.task_id,
        run_uuid=run_uuid,
        ignore=opts.ignore,
        debug=opts.debug,
        low_mem=opts.low_mem,
        anat_only=opts.anat_only,
        longitudinal=opts.longitudinal,
        omp_nthreads=omp_nthreads,
        skull_strip_ants=True,
        work_dir=work_dir,
        output_dir=output_dir,
        bids_dir=bids_dir,
        freesurfer=opts.freesurfer,
        output_spaces=opts.output_space,
        template=opts.template,
        medial_surface_nan=opts.medial_surface_nan,
        output_grid_ref=opts.output_grid_reference,
        hires=opts.hires,
        bold2t1w_dof=opts.bold2t1w_dof,
        fmap_bspline=opts.fmap_bspline,
        fmap_demean=opts.fmap_no_demean,
        use_syn=opts.use_syn_sdc,
        force_syn=opts.force_syn,
        use_aroma=opts.use_aroma,
        ignore_aroma_err=opts.ignore_aroma_denoising_errors,
    )

    if opts.write_graph:
        fmriprep_wf.write_graph(graph2use="colored", format='svg', simple_form=True)

    try:
        fmriprep_wf.run(**plugin_settings)
    except RuntimeError as e:
        if "Workflow did not execute cleanly" in str(e):
            errno = 1
        else:
            raise(e)

    # Generate reports phase
    report_errors = [run_reports(
        op.join(work_dir, 'reportlets'), output_dir, subject_label, run_uuid=run_uuid)
        for subject_label in subject_list]

    if sum(report_errors):
        logger.warning('Errors occurred while generating reports for participants: %s.',
                       ', '.join(['%s (%d)' % (subid, err)
                                  for subid, err in zip(subject_list, report_errors)]))

    errno += sum(report_errors)
    sys.exit(int(errno > 0))
Example #4
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'ants_float': opts.ants_float,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url': opts.webapi_url,
        'webapi_port': opts.webapi_port,
        'upload_strict': opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

#    check_folder(log_dir)
#    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
#        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {#'crashdump_dir': log_dir, 
                      'crashfile_format': 'txt',
                      'resource_monitor': opts.profile},
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('utils').setLevel(log_level)

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod, settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics will be submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')
        else:
            msg = 'Error reading BIDS directory ({}), or the dataset is not ' \
                  'BIDS-compliant.'

            if opts.participant_label or opts.session_id or opts.run_id or opts.task_id:

                msg = 'The combination of supplied labels'

                if opts.participant_label is not None:
                    msg += ' (--participant_label {})'.format(" ".join(opts.participant_label))
                if opts.session_id is not None:
                    msg += ' (--session-id {})'.format(" ".join(opts.session_id))
                if opts.run_id is not None:
                    msg += ' (--run-id {})'.format(" ".join(opts.run_id))
                if opts.task_id is not None:
                    msg += ' (--task-id {})'.format(" ".join(opts.task_id))

                msg += ' did not result in matches within the BIDS directory ({}).'

            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')


        for mod in modalities:
            dataframe, order, jsonfiles = generate_csv(settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, settings['output_dir'])
                continue

            if 'jsonfile' in order: order.remove('jsonfile')

            base_name = 'mclf_run-20170724-191452_mod-rfc_ver-0.9.7-rc8_class-2_cv-loso'
            load_classifier = pkgrf(
                'mriqc',
                'data/mclf_run-20170724-191452_mod-rfc_ver-0.9.7-rc8_class-2_cv-'
                'loso_data-all_estimator.pklz')

            cvhelper = CVHelper(load_clf=load_classifier, n_jobs=-1,
                                rate_label=['rater_1'], basename=base_name)
            
            prediction = cvhelper.predict(dataframe[order])
            dataframe['y_prob'] = prediction[0][:,1]
            dataframe['y_pred'] = prediction[1]

            dataframe.index = jsonfiles

            for jsonfile in dataframe.index.values:
                with open(jsonfile, 'r+') as json_file:
                    json_dict = json.load(json_file, object_pairs_hook=OrderedDict)
                    json_dict['y_prob'] = float(dataframe.get_value(jsonfile, 'y_prob'))
                    json_dict['y_pred'] = float(dataframe.get_value(jsonfile, 'y_pred'))
                    json_file.seek(0)
                    json.dump(json_dict, json_file, separators=(',', ':'), sort_keys=True, indent=4)
                    json_file.truncate()
Example #5
0
def main():
    """Entry point"""
    from niworkflows.nipype import config as ncfg, logging as nlog
    from niworkflows.nipype.pipeline.engine import Workflow

    from .. import logging
    from ..utils.bids import collect_bids_data
    from ..workflows.core import build_workflow
    from ..utils.misc import check_folder

    # Run parser
    opts = get_parser().parse_args()

    # Retrieve logging level
    log_level = int(max(3 - opts.verbose_count, 0) * 10)
    if opts.verbose_count > 1:
        log_level = int(max(25 - 5 * opts.verbose_count, 1))
    print(log_level)

    logging.getLogger().setLevel(log_level)
    log = logging.getLogger('mriqc.cli')

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = opts.n_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'hmc_fsl': opts.hmc_fsl,
        'fft_spikes_detector': opts.fft_spikes_detector,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing,
        'float32': opts.float32,
        'ica': opts.ica,
        'no_sub': opts.no_sub,
        'email': opts.email,
        'fd_thres': opts.fd_thres,
        'webapi_url' : opts.webapi_url,
        'webapi_port' : opts.webapi_port,
        'upload_strict' : opts.upload_strict,
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts. stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' % ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {'log_directory': log_dir, 'log_to_file': True},
        'execution': {'crashdump_dir': log_dir, 'crashfile_format': 'txt'},
    })

    # Set nipype logging level
    nlog.getLogger('workflow').setLevel(log_level)
    nlog.getLogger('interface').setLevel(log_level)
    nlog.getLogger('filemanip').setLevel(log_level)

    callback_log_path = None
    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = cpu_count()

        if settings['ants_nthreads'] == 0:
            if settings['n_procs'] > 1:
                # always leave one extra thread for non ANTs work,
                # don't use more than 8 threads - the speed up is minimal
                settings['ants_nthreads'] = min(settings['n_procs'] - 1, 8)
            else:
                settings['ants_nthreads'] = 1

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}
            if opts.mem_gb:
                plugin_settings['plugin_args']['memory_gb'] = opts.mem_gb

    # Process data types
    modalities = opts.modalities

    dataset = collect_bids_data(
        settings['bids_dir'],
        modalities=modalities,
        participant_label=opts.participant_label,
        session=opts.session_id,
        run=opts.run_id,
        task=opts.task_id,
    )

    # Set up participant level
    if 'participant' in analysis_levels:
        log.info('Participant level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for mod in modalities:
            if not dataset[mod]:
                log.warning('No %s scans were found in %s', mod, settings['bids_dir'])
                continue

            wf_list.append(build_workflow(dataset[mod], mod, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                if plugin_settings['plugin'] == 'MultiProc' and opts.profile:
                    import logging
                    from niworkflows.nipype.pipeline.plugins.callback_log import log_nodes_cb
                    plugin_settings['plugin_args']['status_callback'] = log_nodes_cb
                    callback_log_path = op.join(log_dir, 'run_stats.log')
                    logger = logging.getLogger('callback')
                    logger.setLevel(logging.DEBUG)
                    handler = logging.FileHandler(callback_log_path)
                    logger.addHandler(handler)

                # Warn about submitting measures BEFORE
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics will be submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                # run MRIQC
                workflow.run(**plugin_settings)

                # Warn about submitting measures AFTER
                if not settings['no_sub']:
                    log.warning(
                        'Anonymized quality metrics have beeen submitted'
                        ' to MRIQC\'s metrics repository.'
                        ' Use --no-sub to disable submission.')

                if callback_log_path is not None:
                    from niworkflows.nipype.utils.draw_gantt_chart import generate_gantt_chart
                    generate_gantt_chart(callback_log_path, cores=settings['n_procs'])
        else:
            msg = """\
Error reading BIDS directory ({}), or the dataset is not \
BIDS-compliant."""
            if opts.participant_label is not None:
                msg = """\
None of the supplied labels (--participant_label) matched with the \
participants found in the BIDS directory ({})."""
            raise RuntimeError(msg.format(settings['bids_dir']))

        log.info('Participant level finished successfully.')

    # Set up group level
    if 'group' in analysis_levels:
        from ..reports import group_html
        from ..utils.misc import generate_csv  # , generate_pred

        log.info('Group level started...')
        log.info(
            'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
            __version__, ', '.join(analysis_levels), opts.participant_label, settings)

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

        n_group_reports = 0
        for mod in modalities:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], mod)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                log.warning(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', mod, derivatives_dir)
                continue

            log.info('Summary CSV table for the %s data generated (%s)', mod, out_csv)

            # out_pred = generate_pred(derivatives_dir, settings['output_dir'], mod)
            # if out_pred is not None:
            #     log.info('Predicted QA CSV table for the %s data generated (%s)',
            #                    mod, out_pred)

            out_html = op.join(reports_dir, mod + '_group.html')
            group_html(out_csv, mod,
                       csv_failed=op.join(settings['output_dir'], 'failed_' + mod + '.csv'),
                       out_file=out_html)
            log.info('Group-%s report generated (%s)', mod, out_html)
            n_group_reports += 1

        if n_group_reports == 0:
            raise Exception("No data found. No group level reports were generated.")

        log.info('Group level finished successfully.')
Example #6
0
def main():
    # Get commandline arguments
    opts = get_parser().parse_args()

    # get process options to name output directory
    if opts.smooth:
        smooth_name = '_{}mm'.format(opts.smooth)
    else:
        smooth_name = ''

    if opts.low_pass:
        lp_name = '_{}Hz'.format(opts.low_pass)
    else:
        lp_name = ''

    if opts.regfilt:
        regfilt_name = '_regfilt'
    else:
        regfilt_name = ''

    # Set up main directories
    output_dir = os.path.abspath(opts.output_dir)
    result_dir = os.path.join(
        output_dir, 'NuisanceRegression{}{}{}'.format(smooth_name, lp_name,
                                                      regfilt_name))
    log_dir = os.path.join(result_dir, 'logs')
    work_dir = os.path.abspath(opts.work_dir)

    # make directories
    # Check and create output and working directories
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)
    if not os.path.exists(work_dir):
        os.makedirs(work_dir)

    # Set up some instrumental utilities
    run_uuid = strftime('%Y%m%d-%H%M%S_') + str(uuid.uuid4())

    # Nipype config (logs and execution)
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir,
            'crashfile_format': 'txt',
            'parameterize_dirs': False,
        },
    })

    # Nipype plugin configuration
    plugin_settings = {'plugin': 'Linear'}

    # only for a subset of subjects
    if opts.participant_label:
        subject_list = opts.participant_label
    # for all subjects
    else:
        subject_dirs = [
            directory
            for directory in glob(os.path.join(opts.deriv_pipe_dir, "sub-*"))
            if os.path.isdir(os.path.join(opts.deriv_pipe_dir, directory))
        ]
        subject_list = [
            subject_dir.split("-")[-1] for subject_dir in subject_dirs
        ]

    nuisance_regression_wf = init_nuisance_regression_wf(
        confound_names=opts.confounds,
        deriv_pipe_dir=opts.deriv_pipe_dir,
        exclude_variant=opts.exclude_variant,
        low_pass=opts.low_pass,
        regfilt=opts.regfilt,
        res=opts.res,
        result_dir=result_dir,
        run_id=opts.run,
        run_uuid=run_uuid,
        ses_id=opts.ses,
        smooth=opts.smooth,
        space=opts.space,
        subject_list=subject_list,
        task_id=opts.task_id,
        variant=opts.variant,
        work_dir=work_dir,
    )
    if opts.graph:
        nuisance_regression_wf.write_graph(graph2use='colored',
                                           dotfilename=os.path.join(
                                               work_dir, 'graph_colored.dot'))

    try:
        nuisance_regression_wf.run(**plugin_settings)
    except RuntimeError as e:
        print('ERROR!')
        raise (e)