Exemplo n.º 1
0
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
                             task_id=None, output_dir=None, subj_prefix='*'):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """

    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    registration = create_reg_workflow()

    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')

    """
    Set up openfmri data specific components
    """

    subjects = sorted([path.split(os.path.sep)[-1] for path in
                       glob(os.path.join(data_dir, subj_prefix))])

    infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
                                                       'model_id',
                                                       'task_id']),
                         name='infosource')
    if len(subject) == 0:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]),
                                ('task_id', task_id)]
    else:
        infosource.iterables = [('subject_id',
                                 [subjects[subjects.index(subj)] for subj in subject]),
                                ('model_id', [model_id]),
                                ('task_id', task_id)]

    subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
                                                 'task_id', 'model_id'],
                                    output_names=['run_id', 'conds', 'TR'],
                                    function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir

    """
    Return data components as anat, bold and behav
    """

    datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
                                                   'task_id', 'model_id'],
                                         outfields=['anat', 'bold', 'behav',
                                                    'contrasts']),
                         name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
                                'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
                                'behav': ('%s/model/model%03d/onsets/task%03d_'
                                          'run%03d/cond*.txt'),
                                'contrasts': ('models/model%03d/'
                                              'task_contrasts.txt')}
    datasource.inputs.template_args = {'anat': [['subject_id']],
                                       'bold': [['subject_id', 'task_id']],
                                       'behav': [['subject_id', 'model_id',
                                                  'task_id', 'run_id']],
                                       'contrasts': [['model_id']]}
    datasource.inputs.sort_filelist = True

    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]),
                ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2 * TR)
    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')

    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        contrast_def = np.genfromtxt(contrast_file, dtype=object)
        if len(contrast_def.shape) == 1:
            contrast_def = contrast_def[None, :]
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [row[1], 'T', ['cond%03d' % (i + 1)  for i in range(len(conds))],
                   row[2:].astype(float).tolist()]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(input_names=['contrast_file',
                                                    'task_id', 'conds'],
                                       output_names=['contrasts'],
                                       function=get_contrasts),
                          name='contrastgen')

    art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False],
                                                 use_norm=True,
                                                 norm_threshold=1,
                                                 zintensity_threshold=3,
                                                 parameter_source='FSL',
                                                 mask_type='file'),
                     iterfield=['realigned_files', 'realignment_parameters',
                                'mask_file'],
                     name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(),
                           name="modelspec")
    modelspec.inputs.input_units = 'secs'

    def check_behav_list(behav):
        out_behav = []
        if isinstance(behav, six.string_types):
            behav = [behav]
        for val in behav:
            if not isinstance(val, list):
                out_behav.append([val])
            else:
                out_behav.append(val)
        return out_behav

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, ('behav', check_behav_list), modelspec, 'event_files')
    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art, [('outputspec.motion_parameters',
                                 'realignment_parameters'),
                                ('outputspec.realigned_files',
                                 'realigned_files'),
                                ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec, [('outputspec.highpassed_files',
                                       'functional_runs'),
                                      ('outputspec.motion_parameters',
                                       'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])
                ])

    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(files):
        numelements = len(files[0])
        outfiles = []
        for i in range(numelements):
            outfiles.insert(i, [])
            for j, elements in enumerate(files):
                outfiles[i].append(elements[i])
        return outfiles

    def num_copes(files):
        return len(files)

    pickfirst = lambda x: x[0]

    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, fixed_fx, [(('outputspec.copes', sort_copes),
                                       'inputspec.copes'),
                                       ('outputspec.dof_file',
                                        'inputspec.dof_files'),
                                       (('outputspec.varcopes',
                                         sort_copes),
                                        'inputspec.varcopes'),
                                       (('outputspec.copes', num_copes),
                                        'l2model.num_copes'),
                                       ])
                ])

    wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image')
    wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
    registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
    registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
    registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes',
                                                  'zstats'],
                                   output_names=['out_files', 'splits'],
                                   function=merge_files),
                      name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
                                 [('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[0]]
        varcopes = in_files[splits[0]:(splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]):]
        return copes, varcopes, zstats

    splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'],
                                     output_names=['copes', 'varcopes',
                                                   'zstats'],
                                     function=split_files),
                      name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files',
               splitfunc, 'in_files')


    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp',
        'mean'))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
        'affine'))

        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp.' % i,
                         'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
        return subs

    subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds',
                                                'model_id', 'task_id'],
                                   output_names=['substitutions'],
                                   function=get_subs),
                      name='subsgen')

    datasink = pe.Node(interface=nio.DataSink(),
                       name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                                 [('res4d', 'res4d'),
                                  ('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ('tstats', 'tstats')])
                                 ])
    wf.connect([(splitfunc, datasink,
                 [('copes', 'copes.mni'),
                  ('varcopes', 'varcopes.mni'),
                  ('zstats', 'zstats.mni'),
                  ])])
    wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni')
    wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat')
    wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target')

    """
    Set processing parameters
    """

    hpcutoff = 120.
    preproc.inputs.inputspec.fwhm = 6.0
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Exemplo n.º 2
0
def analyze_openfmri_dataset(
    data_dir,
    subject=None,
    model_id=None,
    task_id=None,
    output_dir=None,
    subj_prefix="*",
    hpcutoff=120.0,
    use_derivatives=True,
    fwhm=6.0,
    subjects_dir=None,
    target=None,
):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """

    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol="first")
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    if subjects_dir:
        registration = create_fs_reg_workflow()
    else:
        registration = create_reg_workflow()

    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node("plot_motion"), "out_file", preproc.get_node("outputspec"), "motion_plots")

    """
    Set up openfmri data specific components
    """

    subjects = sorted([path.split(os.path.sep)[-1] for path in glob(os.path.join(data_dir, subj_prefix))])

    infosource = pe.Node(niu.IdentityInterface(fields=["subject_id", "model_id", "task_id"]), name="infosource")
    if len(subject) == 0:
        infosource.iterables = [("subject_id", subjects), ("model_id", [model_id]), ("task_id", task_id)]
    else:
        infosource.iterables = [
            ("subject_id", [subjects[subjects.index(subj)] for subj in subject]),
            ("model_id", [model_id]),
            ("task_id", task_id),
        ]

    subjinfo = pe.Node(
        niu.Function(
            input_names=["subject_id", "base_dir", "task_id", "model_id"],
            output_names=["run_id", "conds", "TR"],
            function=get_subjectinfo,
        ),
        name="subjectinfo",
    )
    subjinfo.inputs.base_dir = data_dir

    """
    Return data components as anat, bold and behav
    """
    contrast_file = os.path.join(data_dir, "models", "model%03d" % model_id, "task_contrasts.txt")
    has_contrast = os.path.exists(contrast_file)
    if has_contrast:
        datasource = pe.Node(
            nio.DataGrabber(
                infields=["subject_id", "run_id", "task_id", "model_id"],
                outfields=["anat", "bold", "behav", "contrasts"],
            ),
            name="datasource",
        )
    else:
        datasource = pe.Node(
            nio.DataGrabber(
                infields=["subject_id", "run_id", "task_id", "model_id"], outfields=["anat", "bold", "behav"]
            ),
            name="datasource",
        )
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = "*"

    if has_contrast:
        datasource.inputs.field_template = {
            "anat": "%s/anatomy/T1_001.nii.gz",
            "bold": "%s/BOLD/task%03d_r*/bold.nii.gz",
            "behav": ("%s/model/model%03d/onsets/task%03d_" "run%03d/cond*.txt"),
            "contrasts": ("models/model%03d/" "task_contrasts.txt"),
        }
        datasource.inputs.template_args = {
            "anat": [["subject_id"]],
            "bold": [["subject_id", "task_id"]],
            "behav": [["subject_id", "model_id", "task_id", "run_id"]],
            "contrasts": [["model_id"]],
        }
    else:
        datasource.inputs.field_template = {
            "anat": "%s/anatomy/T1_001.nii.gz",
            "bold": "%s/BOLD/task%03d_r*/bold.nii.gz",
            "behav": ("%s/model/model%03d/onsets/task%03d_" "run%03d/cond*.txt"),
        }
        datasource.inputs.template_args = {
            "anat": [["subject_id"]],
            "bold": [["subject_id", "task_id"]],
            "behav": [["subject_id", "model_id", "task_id", "run_id"]],
        }

    datasource.inputs.sort_filelist = True

    """
    Create meta workflow
    """

    wf = pe.Workflow(name="openfmri")
    wf.connect(infosource, "subject_id", subjinfo, "subject_id")
    wf.connect(infosource, "model_id", subjinfo, "model_id")
    wf.connect(infosource, "task_id", subjinfo, "task_id")
    wf.connect(infosource, "subject_id", datasource, "subject_id")
    wf.connect(infosource, "model_id", datasource, "model_id")
    wf.connect(infosource, "task_id", datasource, "task_id")
    wf.connect(subjinfo, "run_id", datasource, "run_id")
    wf.connect([(datasource, preproc, [("bold", "inputspec.func")])])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2 * TR)

    gethighpass = pe.Node(
        niu.Function(input_names=["TR", "hpcutoff"], output_names=["highpass"], function=get_highpass),
        name="gethighpass",
    )
    wf.connect(subjinfo, "TR", gethighpass, "TR")
    wf.connect(gethighpass, "highpass", preproc, "inputspec.highpass")

    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os

        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, "rt") as fp:
                contrast_def.extend([np.array(row.split()) for row in fp.readlines() if row.strip()])
        contrasts = []
        for row in contrast_def:
            if row[0] != "task%03d" % task_id:
                continue
            con = [row[1], "T", ["cond%03d" % (i + 1) for i in range(len(conds))], row[2:].astype(float).tolist()]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, "T", ["cond%03d" % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(
        niu.Function(
            input_names=["contrast_file", "task_id", "conds"], output_names=["contrasts"], function=get_contrasts
        ),
        name="contrastgen",
    )

    art = pe.MapNode(
        interface=ra.ArtifactDetect(
            use_differences=[True, False],
            use_norm=True,
            norm_threshold=1,
            zintensity_threshold=3,
            parameter_source="FSL",
            mask_type="file",
        ),
        iterfield=["realigned_files", "realignment_parameters", "mask_file"],
        name="art",
    )

    modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")
    modelspec.inputs.input_units = "secs"

    def check_behav_list(behav, run_id, conds):
        from nipype.external import six
        import numpy as np

        num_conds = len(conds)
        if isinstance(behav, six.string_types):
            behav = [behav]
        behav_array = np.array(behav).flatten()
        num_elements = behav_array.shape[0]
        return behav_array.reshape(num_elements / num_conds, num_conds).tolist()

    reshape_behav = pe.Node(
        niu.Function(input_names=["behav", "run_id", "conds"], output_names=["behav"], function=check_behav_list),
        name="reshape_behav",
    )

    wf.connect(subjinfo, "TR", modelspec, "time_repetition")
    wf.connect(datasource, "behav", reshape_behav, "behav")
    wf.connect(subjinfo, "run_id", reshape_behav, "run_id")
    wf.connect(subjinfo, "conds", reshape_behav, "conds")
    wf.connect(reshape_behav, "behav", modelspec, "event_files")

    wf.connect(subjinfo, "TR", modelfit, "inputspec.interscan_interval")
    wf.connect(subjinfo, "conds", contrastgen, "conds")
    if has_contrast:
        wf.connect(datasource, "contrasts", contrastgen, "contrast_file")
    else:
        contrastgen.inputs.contrast_file = ""
    wf.connect(infosource, "task_id", contrastgen, "task_id")
    wf.connect(contrastgen, "contrasts", modelfit, "inputspec.contrasts")

    wf.connect(
        [
            (
                preproc,
                art,
                [
                    ("outputspec.motion_parameters", "realignment_parameters"),
                    ("outputspec.realigned_files", "realigned_files"),
                    ("outputspec.mask", "mask_file"),
                ],
            ),
            (
                preproc,
                modelspec,
                [
                    ("outputspec.highpassed_files", "functional_runs"),
                    ("outputspec.motion_parameters", "realignment_parameters"),
                ],
            ),
            (art, modelspec, [("outlier_files", "outlier_files")]),
            (modelspec, modelfit, [("session_info", "inputspec.session_info")]),
            (preproc, modelfit, [("outputspec.highpassed_files", "inputspec.functional_data")]),
        ]
    )

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=["in_file"], name="tsnr")
    wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(
        Function(input_names=["in_files"], output_names=["median_file"], function=median, imports=imports),
        name="median",
    )
    wf.connect(tsnr, "detrended_file", calc_median, "in_files")

    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(copes, varcopes, contrasts):
        import numpy as np

        if not isinstance(copes, list):
            copes = [copes]
            varcopes = [varcopes]
        num_copes = len(contrasts)
        n_runs = len(copes)
        all_copes = np.array(copes).flatten()
        all_varcopes = np.array(varcopes).flatten()
        outcopes = all_copes.reshape(len(all_copes) / num_copes, num_copes).T.tolist()
        outvarcopes = all_varcopes.reshape(len(all_varcopes) / num_copes, num_copes).T.tolist()
        return outcopes, outvarcopes, n_runs

    cope_sorter = pe.Node(
        niu.Function(
            input_names=["copes", "varcopes", "contrasts"],
            output_names=["copes", "varcopes", "n_runs"],
            function=sort_copes,
        ),
        name="cope_sorter",
    )

    pickfirst = lambda x: x[0]

    wf.connect(contrastgen, "contrasts", cope_sorter, "contrasts")
    wf.connect(
        [
            (preproc, fixed_fx, [(("outputspec.mask", pickfirst), "flameo.mask_file")]),
            (modelfit, cope_sorter, [("outputspec.copes", "copes")]),
            (modelfit, cope_sorter, [("outputspec.varcopes", "varcopes")]),
            (
                cope_sorter,
                fixed_fx,
                [("copes", "inputspec.copes"), ("varcopes", "inputspec.varcopes"), ("n_runs", "l2model.num_copes")],
            ),
            (modelfit, fixed_fx, [("outputspec.dof_file", "inputspec.dof_files")]),
        ]
    )

    wf.connect(calc_median, "median_file", registration, "inputspec.mean_image")
    if subjects_dir:
        wf.connect(infosource, "subject_id", registration, "inputspec.subject_id")
        registration.inputs.inputspec.subjects_dir = subjects_dir
        registration.inputs.inputspec.target_image = fsl.Info.standard_image("MNI152_T1_2mm_brain.nii.gz")
        if target:
            registration.inputs.inputspec.target_image = target
    else:
        wf.connect(datasource, "anat", registration, "inputspec.anatomical_image")
        registration.inputs.inputspec.target_image = fsl.Info.standard_image("MNI152_T1_2mm.nii.gz")
        registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image("MNI152_T1_2mm_brain.nii.gz")
        registration.inputs.inputspec.config_file = "T1_2_MNI152_2mm"

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(
        niu.Function(
            input_names=["copes", "varcopes", "zstats"], output_names=["out_files", "splits"], function=merge_files
        ),
        name="merge_files",
    )
    wf.connect(
        [
            (
                fixed_fx.get_node("outputspec"),
                mergefunc,
                [("copes", "copes"), ("varcopes", "varcopes"), ("zstats", "zstats")],
            )
        ]
    )
    wf.connect(mergefunc, "out_files", registration, "inputspec.source_files")

    def split_files(in_files, splits):
        copes = in_files[: splits[0]]
        varcopes = in_files[splits[0] : (splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]) :]
        return copes, varcopes, zstats

    splitfunc = pe.Node(
        niu.Function(
            input_names=["in_files", "splits"], output_names=["copes", "varcopes", "zstats"], function=split_files
        ),
        name="split_files",
    )
    wf.connect(mergefunc, "splits", splitfunc, "splits")
    wf.connect(registration, "outputspec.transformed_files", splitfunc, "in_files")

    if subjects_dir:
        get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=["in_file"], name="get_aparc_means")
        get_roi_mean.inputs.avgwf_txt_file = True
        wf.connect(fixed_fx.get_node("outputspec"), "copes", get_roi_mean, "in_file")
        wf.connect(registration, "outputspec.aparc", get_roi_mean, "segmentation_file")

        get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True), iterfield=["in_file"], name="get_aparc_tsnr")
        get_roi_tsnr.inputs.avgwf_txt_file = True
        wf.connect(tsnr, "tsnr_file", get_roi_tsnr, "in_file")
        wf.connect(registration, "outputspec.aparc", get_roi_tsnr, "segmentation_file")

    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, run_id, model_id, task_id):
        subs = [("_subject_id_%s_" % subject_id, "")]
        subs.append(("_model_id_%d" % model_id, "model%03d" % model_id))
        subs.append(("task_id_%d/" % task_id, "/task%03d_" % task_id))
        subs.append(("bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp", "mean"))
        subs.append(("bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt", "affine"))

        for i in range(len(conds)):
            subs.append(("_flameo%d/cope1." % i, "cope%02d." % (i + 1)))
            subs.append(("_flameo%d/varcope1." % i, "varcope%02d." % (i + 1)))
            subs.append(("_flameo%d/zstat1." % i, "zstat%02d." % (i + 1)))
            subs.append(("_flameo%d/tstat1." % i, "tstat%02d." % (i + 1)))
            subs.append(("_flameo%d/res4d." % i, "res4d%02d." % (i + 1)))
            subs.append(("_warpall%d/cope1_warp." % i, "cope%02d." % (i + 1)))
            subs.append(("_warpall%d/varcope1_warp." % (len(conds) + i), "varcope%02d." % (i + 1)))
            subs.append(("_warpall%d/zstat1_warp." % (2 * len(conds) + i), "zstat%02d." % (i + 1)))
            subs.append(("_warpall%d/cope1_trans." % i, "cope%02d." % (i + 1)))
            subs.append(("_warpall%d/varcope1_trans." % (len(conds) + i), "varcope%02d." % (i + 1)))
            subs.append(("_warpall%d/zstat1_trans." % (2 * len(conds) + i), "zstat%02d." % (i + 1)))
            subs.append(("__get_aparc_means%d/" % i, "/cope%02d_" % (i + 1)))

        for i, run_num in enumerate(run_id):
            subs.append(("__get_aparc_tsnr%d/" % i, "/run%02d_" % run_num))
            subs.append(("__art%d/" % i, "/run%02d_" % run_num))
            subs.append(("__dilatemask%d/" % i, "/run%02d_" % run_num))
            subs.append(("__realign%d/" % i, "/run%02d_" % run_num))
            subs.append(("__modelgen%d/" % i, "/run%02d_" % run_num))
        subs.append(("/model%03d/task%03d/" % (model_id, task_id), "/"))
        subs.append(("/model%03d/task%03d_" % (model_id, task_id), "/"))
        subs.append(("_bold_dtype_mcf_bet_thresh_dil", "_mask"))
        subs.append(("_output_warped_image", "_anat2target"))
        subs.append(("median_flirt_brain_mask", "median_brain_mask"))
        subs.append(("median_bbreg_brain_mask", "median_brain_mask"))
        return subs

    subsgen = pe.Node(
        niu.Function(
            input_names=["subject_id", "conds", "run_id", "model_id", "task_id"],
            output_names=["substitutions"],
            function=get_subs,
        ),
        name="subsgen",
    )
    wf.connect(subjinfo, "run_id", subsgen, "run_id")

    datasink = pe.Node(interface=nio.DataSink(), name="datasink")
    wf.connect(infosource, "subject_id", datasink, "container")
    wf.connect(infosource, "subject_id", subsgen, "subject_id")
    wf.connect(infosource, "model_id", subsgen, "model_id")
    wf.connect(infosource, "task_id", subsgen, "task_id")
    wf.connect(contrastgen, "contrasts", subsgen, "conds")
    wf.connect(subsgen, "substitutions", datasink, "substitutions")
    wf.connect(
        [
            (
                fixed_fx.get_node("outputspec"),
                datasink,
                [
                    ("res4d", "res4d"),
                    ("copes", "copes"),
                    ("varcopes", "varcopes"),
                    ("zstats", "zstats"),
                    ("tstats", "tstats"),
                ],
            )
        ]
    )
    wf.connect(
        [
            (
                modelfit.get_node("modelgen"),
                datasink,
                [
                    ("design_cov", "qa.model"),
                    ("design_image", "qa.model.@matrix_image"),
                    ("design_file", "qa.model.@matrix"),
                ],
            )
        ]
    )
    wf.connect(
        [
            (
                preproc,
                datasink,
                [
                    ("outputspec.motion_parameters", "qa.motion"),
                    ("outputspec.motion_plots", "qa.motion.plots"),
                    ("outputspec.mask", "qa.mask"),
                ],
            )
        ]
    )
    wf.connect(registration, "outputspec.mean2anat_mask", datasink, "qa.mask.mean2anat")
    wf.connect(art, "norm_files", datasink, "qa.art.@norm")
    wf.connect(art, "intensity_files", datasink, "qa.art.@intensity")
    wf.connect(art, "outlier_files", datasink, "qa.art.@outlier_files")
    wf.connect(registration, "outputspec.anat2target", datasink, "qa.anat2target")
    wf.connect(tsnr, "tsnr_file", datasink, "qa.tsnr.@map")
    if subjects_dir:
        wf.connect(registration, "outputspec.min_cost_file", datasink, "qa.mincost")
        wf.connect([(get_roi_tsnr, datasink, [("avgwf_txt_file", "qa.tsnr"), ("summary_file", "qa.tsnr.@summary")])])
        wf.connect(
            [(get_roi_mean, datasink, [("avgwf_txt_file", "copes.roi"), ("summary_file", "copes.roi.@summary")])]
        )
    wf.connect(
        [(splitfunc, datasink, [("copes", "copes.mni"), ("varcopes", "varcopes.mni"), ("zstats", "zstats.mni")])]
    )
    wf.connect(calc_median, "median_file", datasink, "mean")
    wf.connect(registration, "outputspec.transformed_mean", datasink, "mean.mni")
    wf.connect(registration, "outputspec.func2anat_transform", datasink, "xfm.mean2anat")
    wf.connect(registration, "outputspec.anat2target_transform", datasink, "xfm.anat2target")

    """
    Set processing parameters
    """

    preproc.inputs.inputspec.fwhm = fwhm
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {"dgamma": {"derivs": use_derivatives}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Exemplo n.º 3
0
modelspec = pe.Node(interface=model.SpecifyModel(),
                    name="modelspec")
modelspec.inputs.input_units = 'secs'
modelspec.inputs.time_repetition = TR
modelspec.inputs.high_pass_filter_cutoff = 100
modelspec.inputs.subject_info = [Bunch(conditions=['Visual','Auditory'],
                                onsets=[range(0,int(180*TR),60),range(0,int(180*TR),90)],
                                durations=[[30], [45]],
                                amplitudes=None,
                                tmod=None,
                                pmod=None,
                                regressor_names=None,
                                regressors=None)]

modelfit = create_modelfit_workflow(f_contrasts=True)
modelfit.inputs.inputspec.interscan_interval = TR
modelfit.inputs.inputspec.model_serial_correlations = True
modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
cont1 = ['Visual>Baseline','T', ['Visual','Auditory'],[1,0]]
cont2 = ['Auditory>Baseline','T', ['Visual','Auditory'],[0,1]]
cont3 = ['Task','F', [cont1, cont2]]
modelfit.inputs.inputspec.contrasts = [cont1,cont2,cont3]

registration = create_reg_workflow()
registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

"""
Set up complete workflow
Exemplo n.º 4
0
def create_workflow(contrasts, combine_runs=True):

    level1_workflow = pe.Workflow(name='level1flow')
    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            #'funcmasks',
            'fwhm',  # smoothing
            'highpass',
            'funcs',
            'event_log',
            'motion_parameters',
            'motion_outlier_files',
            'ref_func',
            'ref_funcmask',
        ]),
        name="inputspec")

    def remove_runs_missing_funcs(in_files, in_funcs):
        import os
        # import pdb
        import re

        # if input.synchronize = True, then in_files and in_funcs will
        #  be single strings
        assert not isinstance(in_files, str), "in_files must be list"
        assert not isinstance(in_funcs, str), "in_funcs must be list"

        if isinstance(in_files, str):
            in_files = [in_files]

        if isinstance(in_funcs, str):
            in_funcs = [in_funcs]

        has_func = set()
        for f in in_funcs:
            base = os.path.basename(f)
            try:
                sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1)
                ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1)
                run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1)
            except AttributeError as e:
                raise RuntimeError(
                    'Could not process "sub-*_", "ses-*_", or "run-*_" from func "%s"'
                    % f)
            has_func.add((sub, ses, run))

        files = []
        for f in in_files:
            base = os.path.basename(f)
            try:
                sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1)
                ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1)
                run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1)
            except AttributeError as e:
                raise RuntimeError(
                    'Could not process "sub-*_", "ses-*_", or "run-*_" from event file "%s"'
                    % f)
            if (sub, ses, run) in has_func:
                files.append(f)
        return files

    input_events = pe.Node(
        interface=niu.Function(input_names=['in_files', 'in_funcs'],
                               output_names=['out_files'],
                               function=remove_runs_missing_funcs),
        name='input_events',
    )

    level1_workflow.connect([
        (inputnode, input_events, [
            ('funcs', 'in_funcs'),
            ('event_log', 'in_files'),
        ]),
    ])

    # -------------------------------------------------------------------
    #            /~_ _  _  _  _. _   _ . _  _ |. _  _
    #            \_/(/_| |(/_| |(_  |_)||_)(/_||| |(/_
    #                               |   |
    # -------------------------------------------------------------------
    """
    Preliminaries
    -------------
    Setup any package specific configuration. The output file format for FSL
    routines is being set to compressed NIFTI.
    """

    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    modelfit = fslflows.create_modelfit_workflow()

    if combine_runs:
        fixed_fx = fslflows.create_fixed_effects_flow()
    else:
        fixed_fx = None
    """
    Artifact detection is done in preprocessing workflow.
    """
    """
    Add model specification nodes between the preprocessing and modelfitting
    workflows.
    """
    modelspec = pe.Node(model.SpecifyModel(), name="modelspec")
    """
    Set up first-level workflow
    ---------------------------
    """

    def sort_copes(files):
        """ Sort by copes and the runs, ie. 
            [[cope1_run1, cope1_run2], [cope2_run1, cope2_run2]]
        """
        assert files[0] is not str

        numcopes = len(files[0])
        assert numcopes > 1

        outfiles = []
        for i in range(numcopes):
            outfiles.insert(i, [])
            for j, elements in enumerate(files):
                outfiles[i].append(elements[i])
        return outfiles

    def num_copes(files):
        return len(files)

    if fixed_fx is not None:
        level1_workflow.connect([
            (inputnode, fixed_fx, [('ref_funcmask', 'flameo.mask_file')
                                   ]),  # To-do: use reference mask!!!
            (modelfit, fixed_fx, [
                (('outputspec.copes', sort_copes), 'inputspec.copes'),
                ('outputspec.dof_file', 'inputspec.dof_files'),
                (('outputspec.varcopes', sort_copes), 'inputspec.varcopes'),
                (('outputspec.copes', num_copes), 'l2model.num_copes'),
            ])
        ])

    # -------------------------------------------------------------------
    #          /~\  _|_ _   _|_
    #          \_/|_|| |_)|_||
    #                  |
    # -------------------------------------------------------------------
    # Datasink

    outputfiles = pe.Node(nio.DataSink(base_directory=ds_root,
                                       container='derivatives/modelfit',
                                       parameterization=True),
                          name="output_files")

    # Use the following DataSink output substitutions
    outputfiles.inputs.substitutions = [
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        # ('/mask/', '/'),
        # ('_preproc_flirt_thresh.nii.gz', '_transformedmask.nii.gz'),
        # ('_preproc_volreg_unwarped.nii.gz', '_preproc.nii.gz'),
        # ('_preproc_flirt_unwarped.nii.gz', '_preproc-mask.nii.gz'),
        # ('/_mc_method_afni3dvolreg/', '/'),
        # ('/funcs/', '/'),
        # ('/funcmasks/', '/'),
        # ('preproc_volreg.nii.gz', 'preproc.nii.gz'),
        ('/_mc_method_afni3dAllinSlices/', '/'),
    ]
    # Put result into a BIDS-like format
    outputfiles.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),

        # (r'/_addmean[0-9]+/', r'/func/'),
        # (r'/_dilatemask[0-9]+/', r'/func/'),
        # (r'/_funcbrain[0-9]+/', r'/func/'),
        # (r'/_maskfunc[0-9]+/', r'/func/'),
        # (r'/_mc[0-9]+/', r'/func/'),
        # (r'/_meanfunc[0-9]+/', r'/func/'),
        # (r'/_outliers[0-9]+/', r'/func/'),
        # (r'/_undistort_masks[0-9]+/', r'/func/'),
        # (r'/_undistort[0-9]+/', r'/func/'),
    ]
    level1_workflow.connect([
        (modelfit, outputfiles, [
            (('outputspec.copes', sort_copes), 'copes'),
            ('outputspec.dof_file', 'dof_files'),
            (('outputspec.varcopes', sort_copes), 'varcopes'),
        ]),
    ])
    if fixed_fx is not None:
        level1_workflow.connect([
            (fixed_fx, outputfiles, [
                ('outputspec.res4d', 'fx.res4d'),
                ('outputspec.copes', 'fx.copes'),
                ('outputspec.varcopes', 'fx.varcopes'),
                ('outputspec.zstats', 'fx.zstats'),
                ('outputspec.tstats', 'fx.tstats'),
            ]),
        ])

    # -------------------------------------------------------------------
    #          (~   _  _  _. _ _  _  _ _|_   _ _  _  _. |`. _
    #          (_><|_)(/_| || | |(/_| | |   _\|_)(/_(_|~|~|(_
    #              |                          |
    # -------------------------------------------------------------------
    #    """
    #    Experiment specific components
    #    ------------------------------
    #    """
    """
    Use the get_node function to retrieve an internal node by name. Then set the
    iterables on this node to perform two different extents of smoothing.
    """

    featinput = level1_workflow.get_node('modelfit.inputspec')
    # featinput.iterables = ('fwhm', [5., 10.])
    featinput.inputs.fwhm = 2.0

    hpcutoff_s = 50.  # FWHM in seconds
    TR = 2.5
    hpcutoff_nvol = hpcutoff_s / 2.5  # FWHM in volumns

    # Use Python3 for processing. See code/requirements.txt for pip packages.
    featinput.inputs.highpass = hpcutoff_nvol / 2.355  # Gaussian: σ in volumes - (REMEMBER to run with Python 3)
    """
    Setup a function that returns subject-specific information about the
    experimental paradigm. This is used by the
    :class:`nipype.modelgen.SpecifyModel` to create the information necessary
    to generate an SPM design matrix. In this tutorial, the same paradigm was used
    for every participant. Other examples of this function are available in the
    `doc/examples` folder. Note: Python knowledge required here.
    """

    # from timeevents.curvetracing import calc_curvetracing_events
    from timeevents import process_time_events

    timeevents = pe.MapNode(
        interface=process_time_events,  # calc_curvetracing_events,
        iterfield=('event_log', 'in_nvols', 'TR'),
        name='timeevents')

    def get_nvols(funcs):
        import nibabel as nib
        nvols = []

        if isinstance(funcs, str):
            funcs = [funcs]

        for func in funcs:
            func_img = nib.load(func)
            header = func_img.header
            try:
                nvols.append(func_img.get_data().shape[3])
            except IndexError as e:
                # if shape only has 3 dimensions, then it is only 1 volume
                nvols.append(1)
        return (nvols)

    def get_TR(funcs):
        import nibabel as nib
        TRs = []

        if isinstance(funcs, str):
            funcs = [funcs]

        for func in funcs:
            func_img = nib.load(func)
            header = func_img.header
            try:
                TR = round(header.get_zooms()[3], 5)
            except IndexError as e:
                TR = 2.5
                print("Warning: %s did not have TR defined in the header. "
                      "Using default TR of %0.2f" % (func, TR))

            assert TR > 1
            TRs.append(TR)
        return (TRs)

    level1_workflow.connect([
        (inputnode, timeevents, [
            (('funcs', get_nvols), 'in_nvols'),
            (('funcs', get_TR), 'TR'),
        ]),
        (input_events, timeevents, [('out_files', 'event_log')]),
        (inputnode, modelspec, [('motion_parameters', 'realignment_parameters')
                                ]),
        (modelspec, modelfit, [('session_info', 'inputspec.session_info')]),
    ])

    # Ignore volumes after last good response
    filter_outliers = pe.MapNode(interface=FilterNumsTask(),
                                 name='filter_outliers',
                                 iterfield=('in_file', 'max_number'))

    level1_workflow.connect([
        (inputnode, filter_outliers, [('motion_outlier_files', 'in_file')]),
        (filter_outliers, modelspec, [('out_file', 'outlier_files')]),
        (timeevents, filter_outliers, [('out_nvols', 'max_number')]),
    ])

    def evt_info(cond_events):
        output = []

        # for each run
        for ev in cond_events:
            from nipype.interfaces.base import Bunch
            from copy import deepcopy
            names = []

            for name in ev.keys():
                if ev[name].shape[0] > 0:
                    names.append(name)

            onsets = [
                deepcopy(ev[name].time) if ev[name].shape[0] > 0 else []
                for name in names
            ]
            durations = [
                deepcopy(ev[name].dur) if ev[name].shape[0] > 0 else []
                for name in names
            ]
            amplitudes = [
                deepcopy(ev[name].amplitude) if ev[name].shape[0] > 0 else []
                for name in names
            ]

            run_results = Bunch(
                conditions=names,
                onsets=[deepcopy(ev[name].time) for name in names],
                durations=[deepcopy(ev[name].dur) for name in names],
                amplitudes=[deepcopy(ev[name].amplitude) for name in names])

            output.append(run_results)
        return output

    modelspec.inputs.input_units = 'secs'
    modelspec.inputs.time_repetition = TR  # to-do: specify per func
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff_s

    modelfit.inputs.inputspec.interscan_interval = TR  # to-do: specify per func
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
    modelfit.inputs.inputspec.contrasts = contrasts
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    # level1_workflow.base_dir = os.path.abspath('./workingdirs/level1flow')
    modelfit.config['execution'] = dict(crashdump_dir=os.path.abspath('.'))

    # Ignore volumes after subject has finished working for the run
    beh_roi = pe.MapNode(fsl.ExtractROI(t_min=0),
                         name='beh_roi',
                         iterfield=['in_file', 't_size'])

    level1_workflow.connect([
        (timeevents, modelspec, [
            (('out_events', evt_info), 'subject_info'),
        ]),
        (inputnode, beh_roi, [
            ('funcs', 'in_file'),
        ]),
        (timeevents, beh_roi, [
            ('out_nvols', 't_size'),
        ]),
        (beh_roi, modelspec, [
            ('roi_file', 'functional_runs'),
        ]),
        (beh_roi, modelfit, [
            ('roi_file', 'inputspec.functional_data'),
        ]),
        (beh_roi, outputfiles, [
            ('roi_file', 'roi_file'),
        ]),
        # (inputnode, datasource, [('in_data', 'base_directory')]),
        # (infosource, datasource, [('subject_id', 'subject_id')]),
        # (infosource, modelspec, [(('subject_id', subjectinfo), 'subject_info')]),
        # (datasource, preproc, [('func', 'inputspec.func')]),
    ])
    return (level1_workflow)
Exemplo n.º 5
0
                                       create_fixed_effects_flow)
"""
Preliminaries
-------------

Setup any package specific configuration. The output file format for FSL
routines is being set to compressed NIFTI.
"""

fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

level1_workflow = pe.Workflow(name='level1flow')

preproc = create_featreg_preproc(whichvol='first')

modelfit = create_modelfit_workflow()

fixed_fx = create_fixed_effects_flow()
"""
Add artifact detection and model specification nodes between the preprocessing
and modelfitting workflows.
"""

art = pe.MapNode(
    ra.ArtifactDetect(use_differences=[True, False],
                      use_norm=True,
                      norm_threshold=1,
                      zintensity_threshold=3,
                      parameter_source='FSL',
                      mask_type='file'),
    iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
Exemplo n.º 6
0
modelspec.inputs.high_pass_filter_cutoff = 100
modelspec.inputs.subject_info = [
    Bunch(conditions=['Visual', 'Auditory'],
          onsets=[
              list(range(0, int(180 * TR), 60)),
              list(range(0, int(180 * TR), 90))
          ],
          durations=[[30], [45]],
          amplitudes=None,
          tmod=None,
          pmod=None,
          regressor_names=None,
          regressors=None)
]

modelfit = create_modelfit_workflow(f_contrasts=True)
modelfit.inputs.inputspec.interscan_interval = TR
modelfit.inputs.inputspec.model_serial_correlations = True
modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
cont1 = ['Visual>Baseline', 'T', ['Visual', 'Auditory'], [1, 0]]
cont2 = ['Auditory>Baseline', 'T', ['Visual', 'Auditory'], [0, 1]]
cont3 = ['Task', 'F', [cont1, cont2]]
modelfit.inputs.inputspec.contrasts = [cont1, cont2, cont3]

registration = create_reg_workflow()
registration.inputs.inputspec.target_image = fsl.Info.standard_image(
    'MNI152_T1_2mm.nii.gz')
registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image(
    'MNI152_T1_2mm_brain.nii.gz')
registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'
"""
Exemplo n.º 7
0
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
                             task_id=None, output_dir=None, subj_prefix='*',
                             hpcutoff=120., use_derivatives=True,
                             fwhm=6.0, subjects_dir=None, target=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """

    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    if subjects_dir:
        registration = create_fs_reg_workflow()
    else:
        registration = create_reg_workflow()

    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')

    """
    Set up openfmri data specific components
    """

    subjects = sorted([path.split(os.path.sep)[-1] for path in
                       glob(os.path.join(data_dir, subj_prefix))])

    infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
                                                       'model_id',
                                                       'task_id']),
                         name='infosource')
    if len(subject) == 0:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]),
                                ('task_id', task_id)]
    else:
        infosource.iterables = [('subject_id',
                                 [subjects[subjects.index(subj)] for subj in subject]),
                                ('model_id', [model_id]),
                                ('task_id', task_id)]

    subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
                                                 'task_id', 'model_id'],
                                    output_names=['run_id', 'conds', 'TR'],
                                    function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir

    """
    Return data components as anat, bold and behav
    """

    contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id,
                                 'task_contrasts.txt')
    has_contrast = os.path.exists(contrast_file)
    if has_contrast:
        datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
                                                   'task_id', 'model_id'],
                                         outfields=['anat', 'bold', 'behav',
                                                    'contrasts']),
                         name='datasource')
    else:
        datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
                                                   'task_id', 'model_id'],
                                         outfields=['anat', 'bold', 'behav']),
                         name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'

    if has_contrast:
        datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
                                            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
                                            'behav': ('%s/model/model%03d/onsets/task%03d_'
                                                      'run%03d/cond*.txt'),
                                            'contrasts': ('models/model%03d/'
                                                          'task_contrasts.txt')}
        datasource.inputs.template_args = {'anat': [['subject_id']],
                                       'bold': [['subject_id', 'task_id']],
                                       'behav': [['subject_id', 'model_id',
                                                  'task_id', 'run_id']],
                                       'contrasts': [['model_id']]}
    else:
        datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
                                            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
                                            'behav': ('%s/model/model%03d/onsets/task%03d_'
                                                      'run%03d/cond*.txt')}
        datasource.inputs.template_args = {'anat': [['subject_id']],
                                       'bold': [['subject_id', 'task_id']],
                                       'behav': [['subject_id', 'model_id',
                                                  'task_id', 'run_id']]}

    datasource.inputs.sort_filelist = True

    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]),
                ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2 * TR)
    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')

    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os
        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, 'rt') as fp:
                contrast_def.extend([np.array(row.split()) for row in fp.readlines() if row.strip()])
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [row[1], 'T', ['cond%03d' % (i + 1)  for i in range(len(conds))],
                   row[2:].astype(float).tolist()]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(input_names=['contrast_file',
                                                    'task_id', 'conds'],
                                       output_names=['contrasts'],
                                       function=get_contrasts),
                          name='contrastgen')

    art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False],
                                                 use_norm=True,
                                                 norm_threshold=1,
                                                 zintensity_threshold=3,
                                                 parameter_source='FSL',
                                                 mask_type='file'),
                     iterfield=['realigned_files', 'realignment_parameters',
                                'mask_file'],
                     name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(),
                           name="modelspec")
    modelspec.inputs.input_units = 'secs'

    def check_behav_list(behav, run_id, conds):
        from nipype.external import six
        import numpy as np
        num_conds = len(conds)
        if isinstance(behav, six.string_types):
            behav = [behav]
        behav_array = np.array(behav).flatten()
        num_elements = behav_array.shape[0]
        return behav_array.reshape(num_elements/num_conds, num_conds).tolist()

    reshape_behav = pe.Node(niu.Function(input_names=['behav', 'run_id', 'conds'],
                                       output_names=['behav'],
                                       function=check_behav_list),
                          name='reshape_behav')

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', reshape_behav, 'behav')
    wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id')
    wf.connect(subjinfo, 'conds', reshape_behav, 'conds')
    wf.connect(reshape_behav, 'behav', modelspec, 'event_files')

    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    if has_contrast:
        wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    else:
        contrastgen.inputs.contrast_file = ''
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art, [('outputspec.motion_parameters',
                                 'realignment_parameters'),
                                ('outputspec.realigned_files',
                                 'realigned_files'),
                                ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec, [('outputspec.highpassed_files',
                                       'functional_runs'),
                                      ('outputspec.motion_parameters',
                                       'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])
                ])

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')

    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(copes, varcopes, contrasts):
        import numpy as np
        if not isinstance(copes, list):
            copes = [copes]
            varcopes = [varcopes]
        num_copes = len(contrasts)
        n_runs = len(copes)
        all_copes = np.array(copes).flatten()
        all_varcopes = np.array(varcopes).flatten()
        outcopes = all_copes.reshape(len(all_copes)/num_copes, num_copes).T.tolist()
        outvarcopes = all_varcopes.reshape(len(all_varcopes)/num_copes, num_copes).T.tolist()
        return outcopes, outvarcopes, n_runs

    cope_sorter = pe.Node(niu.Function(input_names=['copes', 'varcopes',
                                                    'contrasts'],
                                       output_names=['copes', 'varcopes',
                                                     'n_runs'],
                                       function=sort_copes),
                          name='cope_sorter')

    pickfirst = lambda x: x[0]

    wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts')
    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, cope_sorter, [('outputspec.copes', 'copes')]),
                (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]),
                (cope_sorter, fixed_fx, [('copes', 'inputspec.copes'),
                                         ('varcopes', 'inputspec.varcopes'),
                                         ('n_runs', 'l2model.num_copes')]),
                (modelfit, fixed_fx, [('outputspec.dof_file',
                                        'inputspec.dof_files'),
                                      ])
                ])

    wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image')
    if subjects_dir:
        wf.connect(infosource, 'subject_id', registration, 'inputspec.subject_id')
        registration.inputs.inputspec.subjects_dir = subjects_dir
        registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
        if target:
            registration.inputs.inputspec.target_image = target
    else:
        wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
        registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
        registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
        registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes',
                                                  'zstats'],
                                   output_names=['out_files', 'splits'],
                                   function=merge_files),
                      name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
                                 [('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[0]]
        varcopes = in_files[splits[0]:(splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]):]
        return copes, varcopes, zstats

    splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'],
                                     output_names=['copes', 'varcopes',
                                                   'zstats'],
                                     function=split_files),
                      name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files',
               splitfunc, 'in_files')

    if subjects_dir:
        get_roi_mean = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'], name='get_aparc_means')
        get_roi_mean.inputs.avgwf_txt_file = True
        wf.connect(fixed_fx.get_node('outputspec'), 'copes', get_roi_mean, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_mean, 'segmentation_file')

        get_roi_tsnr = pe.MapNode(fs.SegStats(default_color_table=True),
                                  iterfield=['in_file'], name='get_aparc_tsnr')
        get_roi_tsnr.inputs.avgwf_txt_file = True
        wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file')

    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, run_id, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp',
        'mean'))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
        'affine'))

        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp.' % i,
                         'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_trans.' % i,
                         'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1)))

        for i, run_num in enumerate(run_id):
            subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num))
            subs.append(('__art%d/' % i, '/run%02d_' % run_num))
            subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num))
            subs.append(('__realign%d/' % i, '/run%02d_' % run_num))
            subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num))
        subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/'))
        subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/'))
        subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask'))
        subs.append(('_output_warped_image', '_anat2target'))
        subs.append(('median_flirt_brain_mask', 'median_brain_mask'))
        subs.append(('median_bbreg_brain_mask', 'median_brain_mask'))
        return subs

    subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds', 'run_id',
                                                'model_id', 'task_id'],
                                   output_names=['substitutions'],
                                   function=get_subs),
                      name='subsgen')
    wf.connect(subjinfo, 'run_id', subsgen, 'run_id')

    datasink = pe.Node(interface=nio.DataSink(),
                       name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                                 [('res4d', 'res4d'),
                                  ('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ('tstats', 'tstats')])
                                 ])
    wf.connect([(modelfit.get_node('modelgen'), datasink,
                                 [('design_cov', 'qa.model'),
                                  ('design_image', 'qa.model.@matrix_image'),
                                  ('design_file', 'qa.model.@matrix'),
                                 ])])
    wf.connect([(preproc, datasink, [('outputspec.motion_parameters',
                                      'qa.motion'),
                                     ('outputspec.motion_plots',
                                      'qa.motion.plots'),
                                     ('outputspec.mask', 'qa.mask')])])
    wf.connect(registration, 'outputspec.mean2anat_mask', datasink, 'qa.mask.mean2anat')
    wf.connect(art, 'norm_files', datasink, 'qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.anat2target', datasink, 'qa.anat2target')
    wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map')
    if subjects_dir:
        wf.connect(registration, 'outputspec.min_cost_file', datasink, 'qa.mincost')
        wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'),
                                              ('summary_file', 'qa.tsnr.@summary')])])
        wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'),
                                              ('summary_file', 'copes.roi.@summary')])])
    wf.connect([(splitfunc, datasink,
                 [('copes', 'copes.mni'),
                  ('varcopes', 'varcopes.mni'),
                  ('zstats', 'zstats.mni'),
                  ])])
    wf.connect(calc_median, 'median_file', datasink, 'mean')
    wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni')
    wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat')
    wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target')

    """
    Set processing parameters
    """

    preproc.inputs.inputspec.fwhm = fwhm
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Exemplo n.º 8
0
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
                             task_id=None, output_dir=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """

    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    registration = create_reg_workflow()

    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')

    """
    Set up openfmri data specific components
    """

    subjects = sorted([path.split(os.path.sep)[-1] for path in
                       glob(os.path.join(data_dir, 'sub*'))])

    infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
                                                       'model_id',
                                                       'task_id']),
                         name='infosource')
    if subject is None:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]),
                                ('task_id', [task_id])]
    else:
        infosource.iterables = [('subject_id',
                                 [subjects[subjects.index(subject)]]),
                                ('model_id', [model_id]),
                                ('task_id', [task_id])]

    subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
                                                 'task_id', 'model_id'],
                                    output_names=['run_id', 'conds', 'TR'],
                                    function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir

    """
    Return data components as anat, bold and behav
    """

    datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
                                                   'task_id', 'model_id'],
                                         outfields=['anat', 'bold', 'behav',
                                                    'contrasts']),
                         name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz',
                                'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
                                'behav': ('%s/model/model%03d/onsets/task%03d_'
                                          'run%03d/cond*.txt'),
                                'contrasts': ('models/model%03d/'
                                              'task_contrasts.txt')}
    datasource.inputs.template_args = {'anat': [['subject_id']],
                                       'bold': [['subject_id', 'task_id']],
                                       'behav': [['subject_id', 'model_id',
                                                  'task_id', 'run_id']],
                                       'contrasts': [['model_id']]}
    datasource.inputs.sort_filelist = True

    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]),
                ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2 * TR)
    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')

    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        contrast_def = np.genfromtxt(contrast_file, dtype=object)
        if len(contrast_def.shape) == 1:
            contrast_def = contrast_def[None, :]
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [row[1], 'T', ['cond%03d' % (i + 1)  for i in range(len(conds))],
                   row[2:].astype(float).tolist()]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(input_names=['contrast_file',
                                                    'task_id', 'conds'],
                                       output_names=['contrasts'],
                                       function=get_contrasts),
                          name='contrastgen')

    art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False],
                                                 use_norm=True,
                                                 norm_threshold=1,
                                                 zintensity_threshold=3,
                                                 parameter_source='FSL',
                                                 mask_type='file'),
                     iterfield=['realigned_files', 'realignment_parameters',
                                'mask_file'],
                     name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(),
                           name="modelspec")
    modelspec.inputs.input_units = 'secs'

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', modelspec, 'event_files')
    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art, [('outputspec.motion_parameters',
                                 'realignment_parameters'),
                                ('outputspec.realigned_files',
                                 'realigned_files'),
                                ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec, [('outputspec.highpassed_files',
                                       'functional_runs'),
                                      ('outputspec.motion_parameters',
                                       'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])
                ])

    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(files):
        numelements = len(files[0])
        outfiles = []
        for i in range(numelements):
            outfiles.insert(i, [])
            for j, elements in enumerate(files):
                outfiles[i].append(elements[i])
        return outfiles

    def num_copes(files):
        return len(files)

    pickfirst = lambda x: x[0]

    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, fixed_fx, [(('outputspec.copes', sort_copes),
                                       'inputspec.copes'),
                                       ('outputspec.dof_file',
                                        'inputspec.dof_files'),
                                       (('outputspec.varcopes',
                                         sort_copes),
                                        'inputspec.varcopes'),
                                       (('outputspec.copes', num_copes),
                                        'l2model.num_copes'),
                                       ])
                ])

    wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image')
    wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
    registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')

    def merge_files(copes, varcopes):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        return out_files, splits

    mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes'],
                                   output_names=['out_files', 'splits'],
                                   function=merge_files),
                      name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
                                 [('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[1]]
        varcopes = in_files[splits[1]:]
        return copes, varcopes

    splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'],
                                     output_names=['copes', 'varcopes'],
                                     function=split_files),
                      name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files',
               splitfunc, 'in_files')


    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp_warp',
        'mean'))
        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp_warp.' % i,
                         'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
        return subs

    subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds',
                                                'model_id', 'task_id'],
                                   output_names=['substitutions'],
                                   function=get_subs),
                      name='subsgen')

    datasink = pe.Node(interface=nio.DataSink(),
                       name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                                 [('res4d', 'res4d'),
                                  ('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ('tstats', 'tstats')])
                                 ])
    wf.connect([(splitfunc, datasink,
                 [('copes', 'copes.mni'),
                  ('varcopes', 'varcopes.mni'),
                  ])])
    wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni')

    """
    Set processing parameters
    """

    hpcutoff = 120.
    preproc.inputs.inputspec.fwhm = 6.0
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Exemplo n.º 9
0
def analyze_openfmri_dataset(data_dir,
                             subject=None,
                             model_id=None,
                             task_id=None,
                             output_dir=None,
                             subj_prefix='*',
                             hpcutoff=120.,
                             use_derivatives=True,
                             fwhm=6.0,
                             subjects_dir=None,
                             target=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """
    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()
    if subjects_dir:
        registration = create_fs_reg_workflow()
    else:
        registration = create_reg_workflow()
    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(
        preproc.get_node('plot_motion'), 'out_file',
        preproc.get_node('outputspec'), 'motion_plots')
    """
    Set up openfmri data specific components
    """

    subjects = sorted([
        path.split(os.path.sep)[-1]
        for path in glob(os.path.join(data_dir, subj_prefix))
    ])

    infosource = pe.Node(
        niu.IdentityInterface(fields=['subject_id', 'model_id', 'task_id']),
        name='infosource')
    if len(subject) == 0:
        infosource.iterables = [('subject_id', subjects),
                                ('model_id', [model_id]), ('task_id', task_id)]
    else:
        infosource.iterables = [('subject_id', [
            subjects[subjects.index(subj)] for subj in subject
        ]), ('model_id', [model_id]), ('task_id', task_id)]

    subjinfo = pe.Node(
        niu.Function(
            input_names=['subject_id', 'base_dir', 'task_id', 'model_id'],
            output_names=['run_id', 'conds', 'TR'],
            function=get_subjectinfo),
        name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir
    """
    Return data components as anat, bold and behav
    """

    contrast_file = os.path.join(data_dir, 'models', 'model%03d' % model_id,
                                 'task_contrasts.txt')
    has_contrast = os.path.exists(contrast_file)
    if has_contrast:
        datasource = pe.Node(
            nio.DataGrabber(
                infields=['subject_id', 'run_id', 'task_id', 'model_id'],
                outfields=['anat', 'bold', 'behav', 'contrasts']),
            name='datasource')
    else:
        datasource = pe.Node(
            nio.DataGrabber(
                infields=['subject_id', 'run_id', 'task_id', 'model_id'],
                outfields=['anat', 'bold', 'behav']),
            name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'

    if has_contrast:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt'),
            'contrasts': ('models/model%03d/'
                          'task_contrasts.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']],
            'contrasts': [['model_id']]
        }
    else:
        datasource.inputs.field_template = {
            'anat': '%s/anatomy/T1_001.nii.gz',
            'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
            'behav': ('%s/model/model%03d/onsets/task%03d_'
                      'run%03d/cond*.txt')
        }
        datasource.inputs.template_args = {
            'anat': [['subject_id']],
            'bold': [['subject_id', 'task_id']],
            'behav': [['subject_id', 'model_id', 'task_id', 'run_id']]
        }

    datasource.inputs.sort_filelist = True
    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'task_id', subjinfo, 'task_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(infosource, 'task_id', datasource, 'task_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([
        (datasource, preproc, [('bold', 'inputspec.func')]),
    ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2. * TR)

    gethighpass = pe.Node(
        niu.Function(
            input_names=['TR', 'hpcutoff'],
            output_names=['highpass'],
            function=get_highpass),
        name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')
    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(contrast_file, task_id, conds):
        import numpy as np
        import os
        contrast_def = []
        if os.path.exists(contrast_file):
            with open(contrast_file, 'rt') as fp:
                contrast_def.extend([
                    np.array(row.split()) for row in fp.readlines()
                    if row.strip()
                ])
        contrasts = []
        for row in contrast_def:
            if row[0] != 'task%03d' % task_id:
                continue
            con = [
                row[1], 'T', ['cond%03d' % (i + 1) for i in range(len(conds))],
                row[2:].astype(float).tolist()
            ]
            contrasts.append(con)
        # add auto contrasts for each column
        for i, cond in enumerate(conds):
            con = [cond, 'T', ['cond%03d' % (i + 1)], [1]]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(
        niu.Function(
            input_names=['contrast_file', 'task_id', 'conds'],
            output_names=['contrasts'],
            function=get_contrasts),
        name='contrastgen')

    art = pe.MapNode(
        interface=ra.ArtifactDetect(
            use_differences=[True, False],
            use_norm=True,
            norm_threshold=1,
            zintensity_threshold=3,
            parameter_source='FSL',
            mask_type='file'),
        iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
        name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec")
    modelspec.inputs.input_units = 'secs'

    def check_behav_list(behav, run_id, conds):
        import numpy as np
        num_conds = len(conds)
        if isinstance(behav, (str, bytes)):
            behav = [behav]
        behav_array = np.array(behav).flatten()
        num_elements = behav_array.shape[0]
        return behav_array.reshape(int(num_elements / num_conds),
                                   num_conds).tolist()

    reshape_behav = pe.Node(
        niu.Function(
            input_names=['behav', 'run_id', 'conds'],
            output_names=['behav'],
            function=check_behav_list),
        name='reshape_behav')

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', reshape_behav, 'behav')
    wf.connect(subjinfo, 'run_id', reshape_behav, 'run_id')
    wf.connect(subjinfo, 'conds', reshape_behav, 'conds')
    wf.connect(reshape_behav, 'behav', modelspec, 'event_files')

    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    if has_contrast:
        wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
    else:
        contrastgen.inputs.contrast_file = ''
    wf.connect(infosource, 'task_id', contrastgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art,
                 [('outputspec.motion_parameters', 'realignment_parameters'),
                  ('outputspec.realigned_files',
                   'realigned_files'), ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec,
                 [('outputspec.highpassed_files', 'functional_runs'),
                  ('outputspec.motion_parameters', 'realignment_parameters')]),
                (art, modelspec,
                 [('outlier_files', 'outlier_files')]), (modelspec, modelfit, [
                     ('session_info', 'inputspec.session_info')
                 ]), (preproc, modelfit, [('outputspec.highpassed_files',
                                           'inputspec.functional_data')])])

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(preproc, "outputspec.realigned_files", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(CalculateMedian(), name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(copes, varcopes, contrasts):
        import numpy as np
        if not isinstance(copes, list):
            copes = [copes]
            varcopes = [varcopes]
        num_copes = len(contrasts)
        n_runs = len(copes)
        all_copes = np.array(copes).flatten()
        all_varcopes = np.array(varcopes).flatten()
        outcopes = all_copes.reshape(
            int(len(all_copes) / num_copes), num_copes).T.tolist()
        outvarcopes = all_varcopes.reshape(
            int(len(all_varcopes) / num_copes), num_copes).T.tolist()
        return outcopes, outvarcopes, n_runs

    cope_sorter = pe.Node(
        niu.Function(
            input_names=['copes', 'varcopes', 'contrasts'],
            output_names=['copes', 'varcopes', 'n_runs'],
            function=sort_copes),
        name='cope_sorter')

    pickfirst = lambda x: x[0]

    wf.connect(contrastgen, 'contrasts', cope_sorter, 'contrasts')
    wf.connect([(preproc, fixed_fx,
                 [(('outputspec.mask', pickfirst),
                   'flameo.mask_file')]), (modelfit, cope_sorter,
                                           [('outputspec.copes', 'copes')]),
                (modelfit, cope_sorter, [('outputspec.varcopes', 'varcopes')]),
                (cope_sorter, fixed_fx,
                 [('copes', 'inputspec.copes'), ('varcopes',
                                                 'inputspec.varcopes'),
                  ('n_runs', 'l2model.num_copes')]), (modelfit, fixed_fx, [
                      ('outputspec.dof_file', 'inputspec.dof_files'),
                  ])])

    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    if subjects_dir:
        wf.connect(infosource, 'subject_id', registration,
                   'inputspec.subject_id')
        registration.inputs.inputspec.subjects_dir = subjects_dir
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        if target:
            registration.inputs.inputspec.target_image = target
    else:
        wf.connect(datasource, 'anat', registration,
                   'inputspec.anatomical_image')
        registration.inputs.inputspec.target_image = fsl.Info.standard_image(
            'MNI152_T1_2mm.nii.gz')
        registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image(
            'MNI152_T1_2mm_brain.nii.gz')
        registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'

    def merge_files(copes, varcopes, zstats):
        out_files = []
        splits = []
        out_files.extend(copes)
        splits.append(len(copes))
        out_files.extend(varcopes)
        splits.append(len(varcopes))
        out_files.extend(zstats)
        splits.append(len(zstats))
        return out_files, splits

    mergefunc = pe.Node(
        niu.Function(
            input_names=['copes', 'varcopes', 'zstats'],
            output_names=['out_files', 'splits'],
            function=merge_files),
        name='merge_files')
    wf.connect([(fixed_fx.get_node('outputspec'), mergefunc, [
        ('copes', 'copes'),
        ('varcopes', 'varcopes'),
        ('zstats', 'zstats'),
    ])])
    wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')

    def split_files(in_files, splits):
        copes = in_files[:splits[0]]
        varcopes = in_files[splits[0]:(splits[0] + splits[1])]
        zstats = in_files[(splits[0] + splits[1]):]
        return copes, varcopes, zstats

    splitfunc = pe.Node(
        niu.Function(
            input_names=['in_files', 'splits'],
            output_names=['copes', 'varcopes', 'zstats'],
            function=split_files),
        name='split_files')
    wf.connect(mergefunc, 'splits', splitfunc, 'splits')
    wf.connect(registration, 'outputspec.transformed_files', splitfunc,
               'in_files')

    if subjects_dir:
        get_roi_mean = pe.MapNode(
            fs.SegStats(default_color_table=True),
            iterfield=['in_file'],
            name='get_aparc_means')
        get_roi_mean.inputs.avgwf_txt_file = True
        wf.connect(
            fixed_fx.get_node('outputspec'), 'copes', get_roi_mean, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_mean,
                   'segmentation_file')

        get_roi_tsnr = pe.MapNode(
            fs.SegStats(default_color_table=True),
            iterfield=['in_file'],
            name='get_aparc_tsnr')
        get_roi_tsnr.inputs.avgwf_txt_file = True
        wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
        wf.connect(registration, 'outputspec.aparc', get_roi_tsnr,
                   'segmentation_file')
    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds, run_id, model_id, task_id):
        subs = [('_subject_id_%s_' % subject_id, '')]
        subs.append(('_model_id_%d' % model_id, 'model%03d' % model_id))
        subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp',
                     'mean'))
        subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
                     'affine'))

        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_warp.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('_warpall%d/cope1_trans.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_warpall%d/varcope1_trans.' % (len(conds) + i),
                         'varcope%02d.' % (i + 1)))
            subs.append(('_warpall%d/zstat1_trans.' % (2 * len(conds) + i),
                         'zstat%02d.' % (i + 1)))
            subs.append(('__get_aparc_means%d/' % i, '/cope%02d_' % (i + 1)))

        for i, run_num in enumerate(run_id):
            subs.append(('__get_aparc_tsnr%d/' % i, '/run%02d_' % run_num))
            subs.append(('__art%d/' % i, '/run%02d_' % run_num))
            subs.append(('__dilatemask%d/' % i, '/run%02d_' % run_num))
            subs.append(('__realign%d/' % i, '/run%02d_' % run_num))
            subs.append(('__modelgen%d/' % i, '/run%02d_' % run_num))
        subs.append(('/model%03d/task%03d/' % (model_id, task_id), '/'))
        subs.append(('/model%03d/task%03d_' % (model_id, task_id), '/'))
        subs.append(('_bold_dtype_mcf_bet_thresh_dil', '_mask'))
        subs.append(('_output_warped_image', '_anat2target'))
        subs.append(('median_flirt_brain_mask', 'median_brain_mask'))
        subs.append(('median_bbreg_brain_mask', 'median_brain_mask'))
        return subs

    subsgen = pe.Node(
        niu.Function(
            input_names=[
                'subject_id', 'conds', 'run_id', 'model_id', 'task_id'
            ],
            output_names=['substitutions'],
            function=get_subs),
        name='subsgen')
    wf.connect(subjinfo, 'run_id', subsgen, 'run_id')

    datasink = pe.Node(interface=nio.DataSink(), name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(infosource, 'model_id', subsgen, 'model_id')
    wf.connect(infosource, 'task_id', subsgen, 'task_id')
    wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                 [('res4d', 'res4d'), ('copes', 'copes'), ('varcopes',
                                                           'varcopes'),
                  ('zstats', 'zstats'), ('tstats', 'tstats')])])
    wf.connect([(modelfit.get_node('modelgen'), datasink, [
        ('design_cov', 'qa.model'),
        ('design_image', 'qa.model.@matrix_image'),
        ('design_file', 'qa.model.@matrix'),
    ])])
    wf.connect([(preproc, datasink, [('outputspec.motion_parameters',
                                      'qa.motion'), ('outputspec.motion_plots',
                                                     'qa.motion.plots'),
                                     ('outputspec.mask', 'qa.mask')])])
    wf.connect(registration, 'outputspec.mean2anat_mask', datasink,
               'qa.mask.mean2anat')
    wf.connect(art, 'norm_files', datasink, 'qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'qa.anat2target')
    wf.connect(tsnr, 'tsnr_file', datasink, 'qa.tsnr.@map')
    if subjects_dir:
        wf.connect(registration, 'outputspec.min_cost_file', datasink,
                   'qa.mincost')
        wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'qa.tsnr'),
                                              ('summary_file',
                                               'qa.tsnr.@summary')])])
        wf.connect([(get_roi_mean, datasink, [('avgwf_txt_file', 'copes.roi'),
                                              ('summary_file',
                                               'copes.roi.@summary')])])
    wf.connect([(splitfunc, datasink, [
        ('copes', 'copes.mni'),
        ('varcopes', 'varcopes.mni'),
        ('zstats', 'zstats.mni'),
    ])])
    wf.connect(calc_median, 'median_file', datasink, 'mean')
    wf.connect(registration, 'outputspec.transformed_mean', datasink,
               'mean.mni')
    wf.connect(registration, 'outputspec.func2anat_transform', datasink,
               'xfm.mean2anat')
    wf.connect(registration, 'outputspec.anat2target_transform', datasink,
               'xfm.anat2target')
    """
    Set processing parameters
    """

    preproc.inputs.inputspec.fwhm = fwhm
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivatives}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    datasink.inputs.base_directory = output_dir
    return wf
Exemplo n.º 10
0
            focus_onset = Order1_onsets
            focus_durations = Order1_durations
            wander_onset = Order2_onsets
            wander_durations = Order2_durations
        output.insert(r,Bunch(conditions=names,onsets=[focus_onset, wander_onset],
                              durations=[focus_durations, wander_durations], regressors=None))
        return output
    ## end moral dilemma


    # workflow.connect(datasource, 'func', modelspec,'functional_runs')
    # workflow.connect(smooth, 'out_file', modelspec,'functional_runs')


    #modelfit
    modelfit = create_modelfit_workflow(name='feedback')
    modelfit.inputs.inputspec.interscan_interval = TR
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
    cont1 = ['Focus>Wander','T', ['Focus','Wander'],[1,-1]]
    cont2 = ['Wander>Focus','T', ['Focus', 'Wander'],[-1,1]]
    cont3 = ['Mean Focus','T',['Focus'],[1]]
    cont4 = ['Mean Wander','T',['Wander'],[1]]
    cont5 = ['Average Activation', 'T', ['Focus', 'Wander'],[.5,.5]]
    # cont6 = ['TaskOnset', 'T', ['TaskOnset'],[1]]

    # cont3 = ['Task','F', [cont1,cont2]]

    modelfit.inputs.inputspec.contrasts = [cont1, cont2, cont3,cont4,cont5]

    workflow.connect([(infosource,modelspec,[(('subject_id',subjectinfo,feedbackRun),'subject_info')])])
Exemplo n.º 11
0
"""
Preliminaries
-------------

Setup any package specific configuration. The output file format for FSL
routines is being set to compressed NIFTI.
"""

fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

level1_workflow = pe.Workflow(name='level1flow')

preproc = create_featreg_preproc(whichvol='first')

modelfit = create_modelfit_workflow()

fixed_fx = create_fixed_effects_flow()

"""
Add artifact detection and model specification nodes between the preprocessing
and modelfitting workflows.
"""

art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False],
                                             use_norm=True,
                                             norm_threshold=1,
                                             zintensity_threshold=3,
                                             parameter_source='FSL',
                                             mask_type='file'),
                 iterfield=['realigned_files', 'realignment_parameters', 'mask_file'],
Exemplo n.º 12
0
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=None):
    """Analyzes an open fmri dataset

    Parameters
    ----------

    data_dir : str
        Path to the base data directory

    work_dir : str
        Nipype working directory (defaults to cwd)
    """

    """
    Load nipype workflows
    """

    preproc = create_featreg_preproc(whichvol='first')
    modelfit = create_modelfit_workflow()
    fixed_fx = create_fixed_effects_flow()

    """
    Remove the plotting connection so that plot iterables don't propagate
    to the model stage
    """

    preproc.disconnect(preproc.get_node('plot_motion'), 'out_file',
                       preproc.get_node('outputspec'), 'motion_plots')

    """
    Set up openfmri data specific components
    """

    subjects = [path.split(os.path.sep)[-1] for path in
                glob(os.path.join(data_dir, 'sub*'))]

    infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
                                                       'model_id']),
                         name='infosource')
    if subject is None:
        infosource.iterables = [('subject_id', subjects),
            ('model_id', [model_id])]
    else:
        infosource.iterables = [('subject_id',
                                 [subjects[subjects.index(subject)]]),
                                ('model_id', [model_id])]

    subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
                                                 'task_id', 'model_id'],
                                    output_names=['run_id', 'conds', 'TR'],
                                    function=get_subjectinfo),
                       name='subjectinfo')
    subjinfo.inputs.base_dir = data_dir

    """
    Return data components as anat, bold and behav
    """

    datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
                                                   'model_id'],
                                         outfields=['anat', 'bold', 'behav']),
                         name='datasource')
    datasource.inputs.base_directory = data_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz',
                                'bold': '%s/BOLD/task001_r*/bold.nii.gz',
                                'behav': ('%s/model/model%03d/onsets/task001_'
                                          'run%03d/cond*.txt')}
    datasource.inputs.template_args = {'anat': [['subject_id']],
                                       'bold': [['subject_id']],
                                       'behav': [['subject_id', 'model_id',
                                                  'run_id']]}
    datasource.inputs.sorted = True

    """
    Create meta workflow
    """

    wf = pe.Workflow(name='openfmri')
    wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
    wf.connect(infosource, 'model_id', subjinfo, 'model_id')
    wf.connect(infosource, 'subject_id', datasource, 'subject_id')
    wf.connect(infosource, 'model_id', datasource, 'model_id')
    wf.connect(subjinfo, 'run_id', datasource, 'run_id')
    wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]),
                ])

    def get_highpass(TR, hpcutoff):
        return hpcutoff / (2 * TR)
    gethighpass = pe.Node(niu.Function(input_names=['TR', 'hpcutoff'],
                                       output_names=['highpass'],
                                       function=get_highpass),
                          name='gethighpass')
    wf.connect(subjinfo, 'TR', gethighpass, 'TR')
    wf.connect(gethighpass, 'highpass', preproc, 'inputspec.highpass')

    """
    Setup a basic set of contrasts, a t-test per condition
    """

    def get_contrasts(base_dir, model_id, conds):
        import numpy as np
        import os
        contrast_file = os.path.join(base_dir, 'models', 'model%03d' % model_id,
                                     'task_contrasts.txt')
        contrast_def = np.genfromtxt(contrast_file, dtype=object)
        contrasts = []
        for row in contrast_def:
            con = [row[0], 'T', ['cond%03d' % i  for i in range(len(conds))],
                   row[1:].astype(float).tolist()]
            contrasts.append(con)
        return contrasts

    contrastgen = pe.Node(niu.Function(input_names=['base_dir', 'model_id',
                                                    'conds'],
                                       output_names=['contrasts'],
                                       function=get_contrasts),
                          name='contrastgen')
    contrastgen.inputs.base_dir = data_dir

    art = pe.MapNode(interface=ra.ArtifactDetect(use_differences=[True, False],
                                                 use_norm=True,
                                                 norm_threshold=1,
                                                 zintensity_threshold=3,
                                                 parameter_source='FSL',
                                                 mask_type='file'),
                     iterfield=['realigned_files', 'realignment_parameters',
                                'mask_file'],
                     name="art")

    modelspec = pe.Node(interface=model.SpecifyModel(),
                           name="modelspec")
    modelspec.inputs.input_units = 'secs'

    wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
    wf.connect(datasource, 'behav', modelspec, 'event_files')
    wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
    wf.connect(subjinfo, 'conds', contrastgen, 'conds')
    wf.connect(infosource, 'model_id', contrastgen, 'model_id')
    wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')

    wf.connect([(preproc, art, [('outputspec.motion_parameters',
                                 'realignment_parameters'),
                                ('outputspec.realigned_files',
                                 'realigned_files'),
                                ('outputspec.mask', 'mask_file')]),
                (preproc, modelspec, [('outputspec.highpassed_files',
                                       'functional_runs'),
                                      ('outputspec.motion_parameters',
                                       'realignment_parameters')]),
                (art, modelspec, [('outlier_files', 'outlier_files')]),
                (modelspec, modelfit, [('session_info',
                                        'inputspec.session_info')]),
                (preproc, modelfit, [('outputspec.highpassed_files',
                                      'inputspec.functional_data')])
                ])

    """
    Reorder the copes so that now it combines across runs
    """

    def sort_copes(files):
        numelements = len(files[0])
        outfiles = []
        for i in range(numelements):
            outfiles.insert(i, [])
            for j, elements in enumerate(files):
                outfiles[i].append(elements[i])
        return outfiles

    def num_copes(files):
        return len(files)

    pickfirst = lambda x: x[0]

    wf.connect([(preproc, fixed_fx, [(('outputspec.mask', pickfirst),
                                      'flameo.mask_file')]),
                (modelfit, fixed_fx, [(('outputspec.copes', sort_copes),
                                       'inputspec.copes'),
                                       ('outputspec.dof_file',
                                        'inputspec.dof_files'),
                                       (('outputspec.varcopes',
                                         sort_copes),
                                        'inputspec.varcopes'),
                                       (('outputspec.copes', num_copes),
                                        'l2model.num_copes'),
                                       ])
                ])

    """
    Connect to a datasink
    """

    def get_subs(subject_id, conds):
        subs = [('_subject_id_%s/' % subject_id, '')]
        for i in range(len(conds)):
            subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
            subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
            subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
            subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
        return subs

    subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds'],
                                   output_names=['substitutions'],
                                   function=get_subs),
                      name='subsgen')

    datasink = pe.Node(interface=nio.DataSink(),
                       name="datasink")
    wf.connect(infosource, 'subject_id', datasink, 'container')
    wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
    wf.connect(subjinfo, 'conds', subsgen, 'conds')
    wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
    wf.connect([(fixed_fx.get_node('outputspec'), datasink,
                                 [('res4d', 'res4d'),
                                  ('copes', 'copes'),
                                  ('varcopes', 'varcopes'),
                                  ('zstats', 'zstats'),
                                  ('tstats', 'tstats')])
                                 ])

    """
    Set processing parameters
    """

    hpcutoff = 120.
    subjinfo.inputs.task_id = 1
    preproc.inputs.inputspec.fwhm = 6.0
    gethighpass.inputs.hpcutoff = hpcutoff
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    if work_dir is None:
        work_dir = os.path.join(os.getcwd(), 'working')
    wf.base_dir = work_dir
    datasink.inputs.base_directory = os.path.join(work_dir, 'output')
    wf.config['execution'] = dict(crashdump_dir=os.path.join(work_dir,
                                                             'crashdumps'),
                                  stop_on_first_crash=True)
    wf.run('MultiProc', plugin_args={'n_procs': 2})
def create_workflow(contrasts, out_label, contrasts_name, hrf, fwhm, HighPass,
                    RegSpace, motion_outliers_type):
    level1_workflow = pe.Workflow(name='level1flow')
    level1_workflow.base_dir = os.path.abspath('./workingdirs/level1flow/' +
                                               contrasts_name + '/' + RegSpace)

    # ===================================================================
    #                  _____                   _
    #                 |_   _|                 | |
    #                   | |  _ __  _ __  _   _| |_
    #                   | | | '_ \| '_ \| | | | __|
    #                  _| |_| | | | |_) | |_| | |_
    #                 |_____|_| |_| .__/ \__,_|\__|
    #                             | |
    #                             |_|
    # ===================================================================

    # ------------------ Specify variables
    inputnode = pe.Node(
        niu.IdentityInterface(fields=[
            'fwhm',  # smoothing
            'highpass',
            'funcs',
            'event_log',
            'motion_parameters',
            'motion_outlier_files',
            'ref_func',
            'ref_funcmask',
        ]),
        name="inputspec")

    def remove_runs_missing_funcs(in_files, in_funcs):
        import os
        import re

        assert not isinstance(in_files, str), "in_files must be list"
        assert not isinstance(in_funcs, str), "in_funcs must be list"

        if isinstance(in_files, str):
            in_files = [in_files]

        if isinstance(in_funcs, str):
            in_funcs = [in_funcs]

        has_func = set()
        for f in in_funcs:
            base = os.path.basename(f)
            try:
                sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1)
                ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1)
                run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1)
            except AttributeError as e:
                raise RuntimeError('Could not process "sub-*_", "ses-*_", " \
                    "or "run-*_" from func "%s"' % f)
            has_func.add((sub, ses, run))

        files = []
        for f in in_files:
            base = os.path.basename(f)
            try:
                sub = re.search(r'sub-([a-zA-Z0-9]+)_', base).group(1)
                ses = re.search(r'ses-([a-zA-Z0-9]+)_', base).group(1)
                run = re.search(r'run-([a-zA-Z0-9]+)_', base).group(1)
            except AttributeError as e:
                raise RuntimeError('Could not process "sub-*_", "ses-*_", " \
                    "or "run-*_" from event file "%s"' % f)
            if (sub, ses, run) in has_func:
                files.append(f)
        return files

    input_events = pe.Node(
        interface=niu.Function(input_names=['in_files', 'in_funcs'],
                               output_names=['out_files'],
                               function=remove_runs_missing_funcs),
        name='input_events',
    )

    level1_workflow.connect([
        (inputnode, input_events, [
            ('funcs', 'in_funcs'),
            ('event_log', 'in_files'),
        ]),
    ])

    # -------------------------------------------------------------------
    #            /~_ _  _  _  _. _   _ . _  _ |. _  _
    #            \_/(/_| |(/_| |(_  |_)||_)(/_||| |(/_
    #                               |   |
    # -------------------------------------------------------------------
    """
    Preliminaries
    -------------
    Setup any package specific configuration. The output file format for FSL
    routines is being set to compressed NIFTI.
    """

    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    modelfit = fslflows.create_modelfit_workflow()
    modelspec = pe.Node(model.SpecifyModel(), name="modelspec")
    """
    Set up first-level workflow
    ---------------------------
    """

    def sort_copes(files):
        """ Sort by copes and the runs, ie.
            [[cope1_run1, cope1_run2], [cope2_run1, cope2_run2]]
        """
        assert files[0] is not str

        numcopes = len(files[0])
        assert numcopes > 1

        outfiles = []
        for i in range(numcopes):
            outfiles.insert(i, [])
            for j, elements in enumerate(files):
                outfiles[i].append(elements[i])
        return outfiles

    def num_copes(files):
        return len(files)

    # ===================================================================
    #                   ____        _               _
    #                  / __ \      | |             | |
    #                 | |  | |_   _| |_ _ __  _   _| |_
    #                 | |  | | | | | __| '_ \| | | | __|
    #                 | |__| | |_| | |_| |_) | |_| | |_
    #                  \____/ \__,_|\__| .__/ \__,_|\__|
    #                                  | |
    #                                  |_|
    # ===================================================================
    # --- LEV1 ---
    # Datasink
    outputfiles_lev1 = pe.Node(nio.DataSink(
        base_directory=ds_root,
        container=('derivatives/modelfit/' + contrasts_name + '/' + RegSpace +
                   '/level1/mo-' + motion_outliers_type),
        parameterization=True),
                               name="output_files")

    # Use the following DataSink output substitutions
    outputfiles_lev1.inputs.substitutions = [
        ('subject_id_', 'sub-'),
        ('session_id_', 'ses-'),
        ('/_mc_method_afni3dAllinSlices/', '/'),
    ]
    # Put result into a BIDS-like format
    outputfiles_lev1.inputs.regexp_substitutions = [
        (r'_ses-([a-zA-Z0-9]+)_sub-([a-zA-Z0-9]+)', r'sub-\2/ses-\1'),
        (r'_refsub([a-zA-Z0-9]+)', r''),
    ]
    level1_workflow.connect([
        (modelfit, outputfiles_lev1, [
            (('outputspec.copes', sort_copes), 'copes'),
            ('outputspec.dof_file', 'dof_files'),
            (('outputspec.varcopes', sort_copes), 'varcopes'),
        ]),
    ])

    # -------------------------------------------------------------------
    #          (~   _  _  _. _ _  _  _ _|_   _ _  _  _. |`. _
    #          (_><|_)(/_| || | |(/_| | |   _\|_)(/_(_|~|~|(_
    #              |                          |
    # -------------------------------------------------------------------
    """
    Use the get_node function to retrieve an internal node by name. Then set
    iterables on this node to perform two different extents of smoothing.
    """

    featinput = level1_workflow.get_node('modelfit.inputspec')
    featinput.inputs.fwhm = fwhm

    hpcutoff_s = HighPass  # FWHM in seconds
    TR = 2.5
    hpcutoff_nvol = hpcutoff_s / 2.5  # FWHM in volumns

    # Use Python3 for processing. See code/requirements.txt for pip packages.
    featinput.inputs.highpass = hpcutoff_nvol / 2.355  # Gaussian: σ in vols
    """
    Setup a function that returns subject-specific information about the
    experimental paradigm. This is used by the
    :class:`nipype.modelgen.SpecifyModel` to create the information necessary
    to generate an SPM design matrix. In this tutorial, the same paradigm was
    used for every participant. Other examples of this function are available
    in the `doc/examples` folder. Note: Python knowledge required here.
    """

    # from timeevents.curvetracing import calc_curvetracing_events
    from timeevents import process_time_events

    timeevents = pe.MapNode(
        interface=process_time_events,  # calc_curvetracing_events,
        iterfield=('event_log', 'in_nvols', 'TR'),
        name='timeevents')

    def get_nvols(funcs):
        import nibabel as nib
        nvols = []

        if isinstance(funcs, str):
            funcs = [funcs]

        for func in funcs:
            func_img = nib.load(func)
            header = func_img.header
            try:
                nvols.append(func_img.get_data().shape[3])
            except IndexError as e:
                # if shape only has 3 dimensions, then it is only 1 volume
                nvols.append(1)
        return (nvols)

    def get_TR(funcs):
        import nibabel as nib
        TRs = []

        if isinstance(funcs, str):
            funcs = [funcs]

        for func in funcs:
            func_img = nib.load(func)
            header = func_img.header
            try:
                TR = round(header.get_zooms()[3], 5)
            except IndexError as e:
                TR = 2.5
                print("Warning: %s did not have TR defined in the header. "
                      "Using default TR of %0.2f" % (func, TR))

            assert TR > 1
            TRs.append(TR)
        return (TRs)

    level1_workflow.connect([
        (inputnode, timeevents, [
            (('funcs', get_nvols), 'in_nvols'),
            (('funcs', get_TR), 'TR'),
        ]),
        (input_events, timeevents, [('out_files', 'event_log')]),
        (inputnode, modelspec, [('motion_parameters', 'realignment_parameters')
                                ]),
        (modelspec, modelfit, [('session_info', 'inputspec.session_info')]),
    ])

    # Ignore volumes after last good response
    filter_outliers = pe.MapNode(interface=FilterNumsTask(),
                                 name='filter_outliers',
                                 iterfield=('in_file', 'max_number'))

    level1_workflow.connect([
        (inputnode, filter_outliers, [('motion_outlier_files', 'in_file')]),
        (filter_outliers, modelspec, [('out_file', 'outlier_files')]),
        (timeevents, filter_outliers, [('out_nvols', 'max_number')]),
    ])

    def evt_info(cond_events):
        output = []

        # for each run
        for ev in cond_events:
            from nipype.interfaces.base import Bunch
            from copy import deepcopy
            names = []

            for name in ev.keys():
                if ev[name].shape[0] > 0:
                    names.append(name)

            onsets = [
                deepcopy(ev[name].time) if ev[name].shape[0] > 0 else []
                for name in names
            ]
            durations = [
                deepcopy(ev[name].dur) if ev[name].shape[0] > 0 else []
                for name in names
            ]
            amplitudes = [
                deepcopy(ev[name].amplitude) if ev[name].shape[0] > 0 else []
                for name in names
            ]

            run_results = Bunch(
                conditions=names,
                onsets=[deepcopy(ev[name].time) for name in names],
                durations=[deepcopy(ev[name].dur) for name in names],
                amplitudes=[deepcopy(ev[name].amplitude) for name in names])

            output.append(run_results)
        return output

    modelspec.inputs.input_units = 'secs'
    modelspec.inputs.time_repetition = TR  # to-do: specify per func
    modelspec.inputs.high_pass_filter_cutoff = hpcutoff_s

    # Find out which HRF function we want to use
    if hrf == 'fsl_doublegamma':  # this is the default
        modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
    else:
        # use a custom hrf as defined in arguments
        currpath = os.getcwd()
        hrftxt = currpath + hrf[1:]
        modelfit.inputs.inputspec.bases = {'custom': {'bfcustompath': hrftxt}}

    modelfit.inputs.inputspec.interscan_interval = TR
    modelfit.inputs.inputspec.contrasts = contrasts
    modelfit.inputs.inputspec.model_serial_correlations = True
    modelfit.inputs.inputspec.film_threshold = 1000

    modelfit.config['execution'] = dict(crashdump_dir=os.path.abspath('.'))

    # Ignore volumes after subject has finished working for the run
    beh_roi = pe.MapNode(fsl.ExtractROI(t_min=0),
                         name='beh_roi',
                         iterfield=['in_file', 't_size'])

    level1_workflow.connect([
        (timeevents, modelspec, [
            (('out_events', evt_info), 'subject_info'),
        ]),
        (inputnode, beh_roi, [
            ('funcs', 'in_file'),
        ]),
        (timeevents, beh_roi, [
            ('out_nvols', 't_size'),
        ]),
        (beh_roi, modelspec, [
            ('roi_file', 'functional_runs'),
        ]),
        (beh_roi, modelfit, [
            ('roi_file', 'inputspec.functional_data'),
        ]),
        (beh_roi, outputfiles_lev1, [
            ('roi_file', 'roi_file'),
        ]),
    ])
    return (level1_workflow)
Exemplo n.º 14
0
def create_subject_ffx_wf(
        sub_id, bet_fracthr, spatial_fwhm, susan_brightthresh, hp_vols,
        lp_vols, remove_hemi, film_thresh, film_model_autocorr, use_derivs, tr,
        tcon_subtractive, cluster_threshold, cluster_thresh_frac, cluster_p,
        dilate_clusters_voxel, cond_ids, dsdir, work_basedir):
    # todo: new mapnode inputs: cluster_threshold, cluster_p
    """
    Make a workflow including preprocessing, first level, and second level GLM analysis for a given subject.
    This pipeline includes:
    - skull stripping
    - spatial smoothing
    - removing the irrelevant hemisphere
    - temporal band pass filter
    - 1st level GLM
    - averaging f-contrasts from 1st level GLM
    - clustering run-wise f-tests, dilating clusters, and returning binary roi mask
    """

    from nipype.algorithms.modelgen import SpecifyModel
    from nipype.interfaces.fsl import BET, SUSAN, ImageMaths
    from nipype.interfaces.fsl.model import SmoothEstimate, Cluster
    from nipype.interfaces.fsl.maths import TemporalFilter, MathsCommand
    from nipype.interfaces.utility import Function
    from nipype.pipeline.engine import Workflow, Node, MapNode
    from nipype.workflows.fmri.fsl import create_modelfit_workflow
    from nipype.interfaces.fsl.maths import MultiImageMaths
    from nipype.interfaces.utility import IdentityInterface
    import sys
    from os.path import join as pjoin
    import os
    sys.path.insert(
        0, "/data/project/somato/raw/code/roi_glm/custom_node_functions.py")
    # TODO: don't hardcode this
    import custom_node_functions

    # set up sub-workflow
    sub_wf = Workflow(name='subject_%s_wf' % sub_id)
    # set up sub-working-directory
    subwf_wd = pjoin(work_basedir, 'subject_ffx_wfs',
                     'subject_%s_ffx_workdir' % sub_id)
    if not os.path.exists(subwf_wd):
        os.makedirs(subwf_wd)
    sub_wf.base_dir = subwf_wd

    # Grab bold files for all four runs of one subject.
    # in the order [d1_d5, d5_d1, blocked_design1, blocked_design2]
    grab_boldfiles = Node(Function(
        function=custom_node_functions.grab_boldfiles_subject,
        input_names=['sub_id', 'cond_ids', 'ds_dir'],
        output_names=['boldfiles']),
                          name='grab_boldfiles')
    grab_boldfiles.inputs.sub_id = sub_id
    grab_boldfiles.inputs.cond_ids = cond_ids
    grab_boldfiles.inputs.ds_dir = dsdir

    getonsets = Node(Function(
        function=custom_node_functions.grab_blocked_design_onsets_subject,
        input_names=['sub_id', 'prepped_ds_dir'],
        output_names=['blocked_design_onsets_dicts']),
                     name='getonsets')
    getonsets.inputs.sub_id = sub_id
    getonsets.inputs.prepped_ds_dir = dsdir

    # pass bold files through preprocessing pipeline
    bet = MapNode(BET(frac=bet_fracthr, functional=True, mask=True),
                  iterfield=['in_file'],
                  name='bet')

    pick_mask = Node(Function(function=custom_node_functions.pick_first_mask,
                              input_names=['mask_files'],
                              output_names=['first_mask']),
                     name='pick_mask')

    # SUSAN smoothing node
    susan = MapNode(SUSAN(fwhm=spatial_fwhm,
                          brightness_threshold=susan_brightthresh),
                    iterfield=['in_file'],
                    name='susan')

    # bandpass filter node
    bpf = MapNode(TemporalFilter(highpass_sigma=hp_vols / 2.3548,
                                 lowpass_sigma=lp_vols / 2.3548),
                  iterfield=['in_file'],
                  name='bpf')

    # cut away hemisphere node
    if remove_hemi == 'r':
        roi_args = '-roi 96 -1 0 -1 0 -1 0 -1'
    elif remove_hemi == 'l':
        roi_args = '-roi 0 96 0 -1 0 -1 0 -1'
    else:
        raise IOError('did not recognite value of remove_hemi %s' %
                      remove_hemi)

    cut_hemi_func = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_func')
    cut_hemi_func.inputs.args = roi_args

    cut_hemi_mask = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_mask')
    cut_hemi_mask.inputs.args = roi_args

    # Make Design and Contrasts for that subject
    # subject_info ist a list of two "Bunches", each for one run, containing conditions, onsets, durations
    designgen = Node(Function(
        input_names=['subtractive_contrast', 'blocked_design_onsets_dicts'],
        output_names=['subject_info', 'contrasts'],
        function=custom_node_functions.make_bunch_and_contrasts),
                     name='designgen')
    designgen.inputs.subtractive_contrasts = tcon_subtractive

    # create 'session_info' for modelfit
    modelspec = MapNode(SpecifyModel(input_units='secs'),
                        name='modelspec',
                        iterfield=['functional_runs', 'subject_info'])
    modelspec.inputs.high_pass_filter_cutoff = hp_vols * tr
    modelspec.inputs.time_repetition = tr

    flatten_session_infos = Node(Function(
        input_names=['nested_list'],
        output_names=['flat_list'],
        function=custom_node_functions.flatten_nested_list),
                                 name='flatten_session_infos')

    # Fist-level workflow
    modelfit = create_modelfit_workflow(f_contrasts=True)
    modelfit.inputs.inputspec.interscan_interval = tr
    modelfit.inputs.inputspec.film_threshold = film_thresh
    modelfit.inputs.inputspec.model_serial_correlations = film_model_autocorr
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivs}}

    # node that reshapes list of copes returned from modelfit
    cope_sorter = Node(Function(input_names=['copes', 'varcopes', 'contrasts'],
                                output_names=['copes', 'varcopes', 'n_runs'],
                                function=custom_node_functions.sort_copes),
                       name='cope_sorter')

    # average zfstats from both runs
    split_zfstats = Node(Function(
        function=custom_node_functions.split_zfstats_runs,
        input_names=['zfstats_list'],
        output_names=['zfstat_run1', 'zfstat_run2']),
                         name='split_zfstats')
    average_zfstats = Node(MultiImageMaths(op_string='-add %s -div 2'),
                           name='mean_images')

    # estimate smoothness of 1st lvl zf-files
    smoothest = MapNode(SmoothEstimate(),
                        name='smoothest',
                        iterfield=['mask_file', 'zstat_file'])

    cluster = MapNode(Cluster(),
                      name='cluster',
                      iterfield=['in_file', 'volume', 'dlh'])
    cluster.inputs.threshold = cluster_threshold
    cluster.inputs.pthreshold = cluster_p
    cluster.inputs.fractional = cluster_thresh_frac
    cluster.inputs.no_table = True
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_localmax_vol_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_size_file = True

    # dilate clusters
    dilate = MapNode(MathsCommand(args='-kernel sphere %i -dilD' %
                                  dilate_clusters_voxel),
                     iterfield=['in_file'],
                     name='dilate')

    # binarize the result to a mask
    binarize_roi = MapNode(ImageMaths(op_string='-nan -thr 0.001 -bin'),
                           iterfield=['in_file'],
                           name='binarize_roi')

    # connect preprocessing
    sub_wf.connect(grab_boldfiles, 'boldfiles', bet, 'in_file')
    sub_wf.connect(bet, 'out_file', susan, 'in_file')
    sub_wf.connect(susan, 'smoothed_file', bpf, 'in_file')
    sub_wf.connect(bpf, 'out_file', cut_hemi_func, 'in_file')
    sub_wf.connect(bet, 'mask_file', cut_hemi_mask, 'in_file')
    # connect to 1st level model
    sub_wf.connect(cut_hemi_func, 'out_file', modelspec, 'functional_runs')
    sub_wf.connect(getonsets, 'blocked_design_onsets_dicts', designgen,
                   'blocked_design_onsets_dicts')
    sub_wf.connect(designgen, 'subject_info', modelspec, 'subject_info')
    sub_wf.connect(modelspec, 'session_info', flatten_session_infos,
                   'nested_list')
    sub_wf.connect(flatten_session_infos, 'flat_list', modelfit,
                   'inputspec.session_info')
    sub_wf.connect(designgen, 'contrasts', modelfit, 'inputspec.contrasts')
    sub_wf.connect(cut_hemi_func, 'out_file', modelfit,
                   'inputspec.functional_data')
    # connect to cluster thresholding
    sub_wf.connect(cut_hemi_mask, 'out_file', smoothest, 'mask_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', smoothest,
                   'zstat_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', cluster,
                   'in_file')
    sub_wf.connect(smoothest, 'dlh', cluster, 'dlh')
    sub_wf.connect(smoothest, 'volume', cluster, 'volume')
    sub_wf.connect(cluster, 'threshold_file', dilate, 'in_file')
    sub_wf.connect(dilate, 'out_file', binarize_roi, 'in_file')
    # connect to averaging f-contrasts
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats',
                   split_zfstats, 'zfstats_list')
    sub_wf.connect(split_zfstats, 'zfstat_run1', average_zfstats, 'in_file')
    sub_wf.connect(split_zfstats, 'zfstat_run2', average_zfstats,
                   'operand_files')
    # redirect to outputspec
    # TODO: redirekt outputspec to datasink in meta-wf
    outputspec = Node(IdentityInterface(fields=[
        'threshold_file', 'index_file', 'pval_file', 'localmax_txt_file'
    ]),
                      name='outputspec')
    sub_wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')
    sub_wf.connect(cluster, 'index_file', outputspec, 'index_file')
    sub_wf.connect(cluster, 'pval_file', outputspec, 'pval_file')
    sub_wf.connect(cluster, 'localmax_txt_file', outputspec,
                   'localmax_txt_file')
    sub_wf.connect(binarize_roi, 'out_file', outputspec, 'roi')

    # run subject-lvl workflow
    # sub_wf.write_graph(graph2use='colored', dotfilename='./subwf_graph.dot')
    # sub_wf.run(plugin='MultiProc', plugin_args={'n_procs': 6})
    # sub_wf.run(plugin='CondorDAGMan')
    # sub_wf.run()

    return sub_wf
Exemplo n.º 15
0
modelspec.inputs.input_units = 'secs'
modelspec.inputs.time_repetition = TR
modelspec.inputs.high_pass_filter_cutoff = 100
modelspec.inputs.subject_info = [Bunch(conditions=['Control','Interference'],
                                onsets=[range(12,193,60),range(42,223,60)],
                                #(drop first 4 TRs) onsets=[range(20,201,60),range(50,231,60)],
                                durations=[[30], [30]], regressors=None)]
## end moral dilemma


#workflow.connect(datasource, 'func', modelspec,'functional_runs')
workflow.connect(smooth, 'out_file', modelspec,'functional_runs')


#modelfit
modelfit = create_modelfit_workflow(name='ftest',f_contrasts=True)
modelfit.inputs.inputspec.interscan_interval = TR
modelfit.inputs.inputspec.model_serial_correlations = True
modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': True}}
cont1 = ['Control>Baseline','T', ['Control','Interference'],[1,0]]
cont2 = ['Interference>Baseline','T', ['Control', 'Interference'],[0,1]]
cont4 = ['Interference>Control', 'T', ['Control', 'Interference'],[-1,1]]
cont5 = ['Control>Interference', 'T', ['Control', 'Interference'], [1,-1]]
cont3 = ['Task','F', [cont1,cont2]]

modelfit.inputs.inputspec.contrasts = [cont1, cont2, cont3, cont4, cont5]

workflow.connect(modelspec, 'session_info', modelfit, 'inputspec.session_info')

#workflow.connect(datasource, 'func', modelfit, 'inputspec.functional_data')
workflow.connect(smooth, 'out_file', modelfit, 'inputspec.functional_data')
Exemplo n.º 16
0
def create_modelfit_workflow_bfsl(name='modelfit_workflow_bfsl'):
    
    
    inputspec = pe.Node(util.IdentityInterface(fields=['functional_runs',
                                                        'bases', 
                                                       'bfsl_files',
                                                       'contrasts', 
                                                       'interscan_interval', 
                                                       'film_threshold', 
                                                        'model_serial_correlations',
                                                        'highpass_filter',
                                                        'mask',
                                                        'realignment_parameters'],),
                                              name='inputspec')
    
    workflow = pe.Workflow(name=name)

        
    

    modelfit_workflow = create_modelfit_workflow()                      
                        
    inputspec.inputs.bases = {'dgamma': {'derivs': True}}
    inputspec.inputs.film_threshold = 1000
    inputspec.inputs.interscan_interval = 2.0
    inputspec.inputs.model_serial_correlations = True
    inputspec.inputs.highpass_filter = 128
    
    

    for field in ['bases', 'contrasts', 'film_threshold', 'interscan_interval', 'model_serial_correlations']:
        workflow.connect(inputspec, field, modelfit_workflow, 'inputspec.%s' % field)


    from nipype.algorithms.modelgen import SpecifyModel
    from nipype.interfaces import fsl

    specifymodel = pe.Node(SpecifyModel(), name='specifymodel')
    specifymodel.inputs.input_units = 'secs'
    
    def get_highpas_filter_cutoff(hz, tr):
        return float(hz) / (tr * 2)
    
    get_highpas_filter_cutoff_node = pe.Node(util.Function(function=get_highpas_filter_cutoff,
                                                           input_names=['hz', 'tr'],
                                                           output_names='cutoff'),
                                            name='get_highpas_filter_cutoff_node')
    
    workflow.connect(inputspec, 'interscan_interval', get_highpas_filter_cutoff_node, 'tr')
    workflow.connect(inputspec, 'highpass_filter', get_highpas_filter_cutoff_node, 'hz')    
    workflow.connect(get_highpas_filter_cutoff_node, 'cutoff', specifymodel, 'high_pass_filter_cutoff')
    
    workflow.connect(inputspec, 'interscan_interval', specifymodel, 'time_repetition')    
    workflow.connect(inputspec, 'bfsl_files', specifymodel, 'event_files')
    workflow.connect(inputspec, 'functional_runs', specifymodel, 'functional_runs')
    workflow.connect(inputspec, 'realignment_parameters', specifymodel, 'realignment_parameters')    
    
    workflow.connect(specifymodel, 'session_info', modelfit_workflow, 'inputspec.session_info')
    workflow.connect(inputspec, 'functional_runs', modelfit_workflow, 'inputspec.functional_data')


    fixedfx = create_fixed_effects_flow()

    workflow.connect(inputspec, 'mask', fixedfx, 'flameo.mask_file')

    def num_copes(files):
        return len(files)

    def transpose_copes(copes):    
        import numpy as np
        return np.array(copes).T.tolist()

    workflow.connect([(modelfit_workflow, fixedfx,
                       [(('outputspec.copes', transpose_copes), 'inputspec.copes'),
                        (('outputspec.varcopes', transpose_copes), 'inputspec.varcopes'),
                        ('outputspec.dof_file', 'inputspec.dof_files'),
                        (('outputspec.copes', num_copes), 'l2model.num_copes')])])


    ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop',
                                                  suffix='_pval'),
                         nested=True,
                         iterfield=['in_file'],
                         name='ztop',)

    fdr_workflow = create_fdr_threshold_workflow()

    workflow.connect([
                      (fixedfx, ztopval,
                       [('outputspec.zstats', 'in_file'),]),
                      (fixedfx, fdr_workflow,
                       [('outputspec.zstats', 'inputspec.z_stats'),]),
                      (ztopval, fdr_workflow,
                       [('out_file', 'inputspec.p_values'),]),
                      (inputspec, fdr_workflow,
                       [('mask', 'inputspec.mask'),]),
                      ])

    outputpsec = pe.Node(util.IdentityInterface(fields=['zstats', 'level2_copes', 'level2_varcopes', 'level2_tdof', 'thresholded_zstats']), name='outputspec')


    workflow.connect(fixedfx, 'outputspec.zstats', outputpsec, 'zstats')
    workflow.connect(fixedfx, 'outputspec.copes', outputpsec, 'level2_copes')
    workflow.connect(fixedfx, 'outputspec.varcopes', outputpsec, 'level2_varcopes')
    workflow.connect(fixedfx, 'flameo.tdof', outputpsec, 'level2_tdof')
    workflow.connect(fdr_workflow, 'outputspec.thresholded_z_stats', outputpsec, 'thresholded_z_stats')

    
    return workflow