Esempio n. 1
0
def test_modelgen1():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, "test1.nii")
    filename2 = os.path.join(tempdir, "test2.nii")
    Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename2)
    s = SpecifyModel()
    s.inputs.input_units = "scans"
    set_output_units = lambda: setattr(s.inputs, "output_units", "scans")
    yield assert_raises, TraitError, set_output_units
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.0
    info = [
        Bunch(
            conditions=["cond1"],
            onsets=[[2, 50, 100, 180]],
            durations=[[1]],
            amplitudes=None,
            pmod=None,
            regressors=None,
            regressor_names=None,
            tmod=None,
        ),
        Bunch(
            conditions=["cond1"],
            onsets=[[30, 40, 100, 150]],
            durations=[[1]],
            amplitudes=None,
            pmod=None,
            regressors=None,
            regressor_names=None,
            tmod=None,
        ),
    ]
    s.inputs.subject_info = info
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 2
    yield assert_equal, len(res.outputs.session_info[0]["regress"]), 0
    yield assert_equal, len(res.outputs.session_info[0]["cond"]), 1
    yield assert_almost_equal, np.array(res.outputs.session_info[0]["cond"][0]["onset"]), np.array([12, 300, 600, 1080])
    info = [
        Bunch(conditions=["cond1"], onsets=[[2]], durations=[[1]]),
        Bunch(conditions=["cond1"], onsets=[[3]], durations=[[1]]),
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([6.0])
    yield assert_almost_equal, np.array(res.outputs.session_info[1]["cond"][0]["duration"]), np.array([6.0])
    info = [
        Bunch(conditions=["cond1", "cond2"], onsets=[[2, 3], [2]], durations=[[1, 1], [1]]),
        Bunch(conditions=["cond1", "cond2"], onsets=[[2, 3], [2, 4]], durations=[[1, 1], [1, 1]]),
    ]
    s.inputs.subject_info = deepcopy(info)
    s.inputs.input_units = "scans"
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]["cond"][0]["duration"]), np.array([6.0, 6.0])
    yield assert_almost_equal, np.array(res.outputs.session_info[0]["cond"][1]["duration"]), np.array([6.0])
    yield assert_almost_equal, np.array(res.outputs.session_info[1]["cond"][1]["duration"]), np.array([6.0, 6.0])
    rmtree(tempdir)
Esempio n. 2
0
def test_SpecifyModel_inputs():
    input_map = dict(
        event_files=dict(
            mandatory=True,
            xor=['subject_info'],
        ),
        functional_runs=dict(
            copyfile=False,
            mandatory=True,
        ),
        high_pass_filter_cutoff=dict(mandatory=True, ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        input_units=dict(mandatory=True, ),
        outlier_files=dict(copyfile=False, ),
        realignment_parameters=dict(copyfile=False, ),
        subject_info=dict(
            mandatory=True,
            xor=['event_files'],
        ),
        time_repetition=dict(mandatory=True, ),
    )
    inputs = SpecifyModel.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Esempio n. 3
0
def test_SpecifyModel_outputs():
    output_map = dict(session_info=dict(), )
    outputs = SpecifyModel.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_SpecifyModel_inputs():
    input_map = dict(event_files=dict(mandatory=True,
    xor=['subject_info', 'event_files'],
    ),
    functional_runs=dict(copyfile=False,
    mandatory=True,
    ),
    high_pass_filter_cutoff=dict(mandatory=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    input_units=dict(mandatory=True,
    ),
    outlier_files=dict(copyfile=False,
    ),
    realignment_parameters=dict(copyfile=False,
    ),
    subject_info=dict(mandatory=True,
    xor=['subject_info', 'event_files'],
    ),
    time_repetition=dict(mandatory=True,
    ),
    )
    inputs = SpecifyModel.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Esempio n. 5
0
def test_model(base_dir, plot=False, workflow_name="test_model_wf"):

	specify_model = pe.Node(interface=SpecifyModel(), name="specify_model")
	specify_model.inputs.input_units = 'secs'
	specify_model.inputs.functional_runs = ["/home/chymera/ni_data/ofM.dr/level1/Preprocessing/_condition_ofM_subject_4011/functional_bandpass/corr_16_trans_filt.nii.gz"]
	specify_model.inputs.time_repetition = 1
	specify_model.inputs.high_pass_filter_cutoff = 0 #switch to 240
	specify_model.inputs.subject_info = subjectinfo(49.55)

	level1design = pe.Node(interface=Level1Design(), name="level1design")
	level1design.inputs.interscan_interval = 1
	level1design.inputs.bases = {'gamma': {'derivs': False}}
	level1design.inputs.model_serial_correlations = True
	level1design.inputs.contrasts = [('allStim','T', ["s1","s2","s3","s4","s5","s6"],[1,1,1,1,1,1])]

	modelgen = pe.Node(interface=FEATModel(), name='modelgen')

	test_model_wf = pe.Workflow(name=workflow_name)
	test_model_wf.base_dir = base_dir

	test_model_wf.connect([
		(specify_model,level1design,[('session_info','session_info')]),
		(level1design, modelgen, [('ev_files', 'ev_files')]),
		(level1design, modelgen, [('fsf_files', 'fsf_file')]),
		])

	# test_model_wf.run(plugin="MultiProc",  plugin_args={'n_procs' : 4})
	test_model_wf.run()
	test_model_wf.write_graph(dotfilename="graph.dot", graph2use="hierarchical", format="png")

	if plot:
		matfile = path.join(base_dir,workflow_name,"modelgen/run0.mat")
		plotmodel(matfile)
def test_SpecifyModel_outputs():
    output_map = dict(session_info=dict(),
    )
    outputs = SpecifyModel.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Esempio n. 7
0
def test_modelgen1():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 200),
                np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 200),
                np.eye(4)).to_filename(filename2)
    s = SpecifyModel()
    s.inputs.input_units = 'scans'
    set_output_units = lambda: setattr(s.inputs, 'output_units', 'scans')
    yield assert_raises, TraitError, set_output_units
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [
        Bunch(conditions=['cond1'],
              onsets=[[2, 50, 100, 180]],
              durations=[[1]],
              amplitudes=None,
              pmod=None,
              regressors=None,
              regressor_names=None,
              tmod=None),
        Bunch(conditions=['cond1'],
              onsets=[[30, 40, 100, 150]],
              durations=[[1]],
              amplitudes=None,
              pmod=None,
              regressors=None,
              regressor_names=None,
              tmod=None)
    ]
    s.inputs.subject_info = info
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 2
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 0
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 1
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['onset']), np.array(
            [12, 300, 600, 1080])
    rmtree(tempdir)
Esempio n. 8
0
def test_modelgen1(tmpdir):
    tempdir = str(tmpdir)
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename2)
    s = SpecifyModel()
    s.inputs.input_units = 'scans'
    set_output_units = lambda: setattr(s.inputs, 'output_units', 'scans')
    with pytest.raises(TraitError): set_output_units()
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], durations=[[1]], amplitudes=None,
                  pmod=None, regressors=None, regressor_names=None, tmod=None),
            Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]], amplitudes=None,
                  pmod=None, regressors=None, regressor_names=None, tmod=None)]
    s.inputs.subject_info = info
    res = s.run()
    assert len(res.outputs.session_info) == 2
    assert len(res.outputs.session_info[0]['regress']) == 0
    assert len(res.outputs.session_info[0]['cond']) == 1
    npt.assert_almost_equal(np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([12, 300, 600, 1080]))
    info = [Bunch(conditions=['cond1'], onsets=[[2]], durations=[[1]]),
            Bunch(conditions=['cond1'], onsets=[[3]], durations=[[1]])]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    npt.assert_almost_equal(np.array(res.outputs.session_info[0]['cond'][0]['duration']), np.array([6.]))
    npt.assert_almost_equal(np.array(res.outputs.session_info[1]['cond'][0]['duration']), np.array([6.]))
    info = [Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2]], durations=[[1, 1], [1]]),
            Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2, 4]], durations=[[1, 1], [1, 1]])]
    s.inputs.subject_info = deepcopy(info)
    s.inputs.input_units = 'scans'
    res = s.run()
    npt.assert_almost_equal(np.array(res.outputs.session_info[0]['cond'][0]['duration']), np.array([6., 6.]))
    npt.assert_almost_equal(np.array(res.outputs.session_info[0]['cond'][1]['duration']), np.array([6., ]))
    npt.assert_almost_equal(np.array(res.outputs.session_info[1]['cond'][1]['duration']), np.array([6., 6.]))
Esempio n. 9
0
def test_modelgen1():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 200), np.eye(4)).to_filename(filename2)
    s = SpecifyModel()
    s.inputs.input_units = 'scans'
    set_output_units = lambda: setattr(s.inputs, 'output_units', 'scans')
    yield assert_raises, TraitError, set_output_units
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], durations=[[1]], amplitudes=None,
                  pmod=None, regressors=None, regressor_names=None, tmod=None),
            Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]], amplitudes=None,
                  pmod=None, regressors=None, regressor_names=None, tmod=None)]
    s.inputs.subject_info = info
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 2
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 0
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 1
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([12, 300, 600, 1080])
    rmtree(tempdir)
Esempio n. 10
0
def create_modelgen_workflow(name='modelgen', skip_specify_model=False):
    input_node = pe.Node(IdentityInterface(fields=[
        'events_file',
        'single_trial',
        'sort_by_onset',
        'exclude',
        'func_file',
        'TR',
        'confound_file',
        'which_confounds',
        'extend_motion_pars',
        'hp_filter',
    ]),
                         name='inputspec')

    output_node = pe.Node(IdentityInterface(fields=['session_info']),
                          name='outputspec')

    events_file_to_bunch = pe.MapNode(Events_file_to_bunch,
                                      iterfield=['in_file'],
                                      name='events_file_to_bunch')

    load_confounds = pe.MapNode(Load_confounds,
                                iterfield=['in_file'],
                                name='load_confounds')

    combine_evs = pe.MapNode(
        Combine_events_and_confounds,
        iterfield=['subject_info', 'confound_names', 'confounds'],
        name='combine_evs')

    modelgen_wf = pe.Workflow(name=name)
    modelgen_wf.connect(input_node, 'events_file', events_file_to_bunch,
                        'in_file')
    modelgen_wf.connect(input_node, 'single_trial', events_file_to_bunch,
                        'single_trial')
    modelgen_wf.connect(input_node, 'sort_by_onset', events_file_to_bunch,
                        'sort_by_onset')
    modelgen_wf.connect(input_node, 'exclude', events_file_to_bunch, 'exclude')

    modelgen_wf.connect(input_node, 'confound_file', load_confounds, 'in_file')
    modelgen_wf.connect(input_node, 'which_confounds', load_confounds,
                        'which_confounds')
    modelgen_wf.connect(input_node, 'extend_motion_pars', load_confounds,
                        'extend_motion_pars')
    modelgen_wf.connect(events_file_to_bunch, 'subject_info', combine_evs,
                        'subject_info')
    modelgen_wf.connect(load_confounds, 'regressor_names', combine_evs,
                        'confound_names')
    modelgen_wf.connect(load_confounds, 'regressors', combine_evs, 'confounds')

    if skip_specify_model:
        modelgen_wf.connect(combine_evs, 'subject_info', output_node,
                            'session_info')
    else:
        specify_model = pe.MapNode(
            SpecifyModel(input_units='secs'),
            iterfield=['subject_info', 'functional_runs'],
            name='specify_model')
        modelgen_wf.connect(input_node, 'hp_filter', specify_model,
                            'high_pass_filter_cutoff')
        modelgen_wf.connect(combine_evs, 'subject_info', specify_model,
                            'subject_info')
        modelgen_wf.connect(input_node, 'func_file', specify_model,
                            'functional_runs')
        modelgen_wf.connect(input_node, 'TR', specify_model, 'time_repetition')
        modelgen_wf.connect(specify_model, 'session_info', output_node,
                            'session_info')

    return modelgen_wf
    sgfilter = Node(Function(
        input_names=['in_file', 'window_length', 'polyorder'],
        output_names=['sg_file'],
        function=savgol_filter),
                    name='sgfilter',
                    iterfield=['in_file'])

    sgfilter.inputs.window_length = analysis_info['sg_filter_window_length']
    sgfilter.inputs.polyorder = analysis_info['sg_filter_order']

    l1 = Node(
        SpecifyModel(
            subject_info=info,
            input_units='secs',
            time_repetition=analysis_info['TR'],
            high_pass_filter_cutoff=analysis_info[
                'highpass_filter']  # set to -1 for no filtering as already have done SG filtering
        ),
        name='l1')

    l1model = Node(Level1Design(interscan_interval=analysis_info['TR'],
                                bases={'dgamma': {
                                    'derivs': True
                                }},
                                model_serial_correlations=True,
                                contrasts=contrasts),
                   name='l1design')

    l1featmodel = Node(FEATModel(), name='l1model')
Esempio n. 12
0
    weights_vector = [float(1)
                      ] + [float(0)] * (num_regressors - 1) + [float(0)]
    contrasts = [('Condition1', 'T', condition_vector, weights_vector)]
    subject_info = [(Bunch(regressors=regressors_file_data,
                           regressor_names=['seed'] + ['nuisance'] *
                           (num_regressors - 1) + ['constant']))]
    return subject_info, contrasts


model_helper = Node(Function(input_names=['regressors_file'],
                             output_names=['subject_info', 'contrasts'],
                             function=model_helper),
                    name='model_helper')

session_info = Node(SpecifyModel(high_pass_filter_cutoff=128,
                                 input_units='secs',
                                 time_repetition=2.0),
                    name='session_info')

model_spec = Node(Level1Design(timing_units='secs',
                               interscan_interval=2.0,
                               microtime_resolution=16,
                               microtime_onset=1,
                               bases={'hrf': {
                                   'derivs': [0, 0]
                               }},
                               global_intensity_normalization='none',
                               mask_threshold=0.8,
                               model_serial_correlations='AR(1)',
                               volterra_expansion_order=2),
                  name='model_spec')
Esempio n. 13
0
def first_level_wf(pipeline, subject_id, task_id, output_dir):
    """
    First level workflow
    """
    workflow = pe.Workflow(name='_'.join((pipeline, subject_id, task_id)))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'bold_preproc', 'contrasts', 'confounds', 'brainmask', 'events_file'
    ]),
                        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['sigma_pre', 'sigma_post', 'out_stats']),
        name='outputnode')

    conf2movpar = pe.Node(niu.Function(function=_confounds2movpar),
                          name='conf2movpar')
    masker = pe.Node(fsl.ApplyMask(), name='masker')
    bim = pe.Node(afni.BlurInMask(fwhm=5.0, outputtype='NIFTI_GZ'),
                  name='bim',
                  mem_gb=20)

    ev = pe.Node(EventsFilesForTask(task=task_id), name='events')

    l1 = pe.Node(SpecifyModel(
        input_units='secs',
        time_repetition=2,
        high_pass_filter_cutoff=100,
        parameter_source='FSL',
    ),
                 name='l1')

    l1model = pe.Node(fsl.Level1Design(interscan_interval=2,
                                       bases={'dgamma': {
                                           'derivs': True
                                       }},
                                       model_serial_correlations=True),
                      name='l1design')

    l1featmodel = pe.Node(fsl.FEATModel(), name='l1model')
    l1estimate = pe.Node(fsl.FEAT(), name='l1estimate', mem_gb=40)

    pre_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                         detrend=True,
                                         args='-ShowMeClassicFWHM'),
                              name='smooth_pre_afni',
                              mem_gb=20)
    post_smooth_afni = pe.Node(afni.FWHMx(combine=True,
                                          detrend=True,
                                          args='-ShowMeClassicFWHM'),
                               name='smooth_post_afni',
                               mem_gb=20)

    pre_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_pre', mem_gb=20)
    post_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_post', mem_gb=20)

    def _resels(val):
        return val**(1 / 3.)

    def _fwhm(fwhm):
        from numpy import mean
        return float(mean(fwhm, dtype=float))

    workflow.connect([
        (inputnode, masker, [('bold_preproc', 'in_file'),
                             ('brainmask', 'mask_file')]),
        (inputnode, ev, [('events_file', 'in_file')]),
        (inputnode, l1model, [('contrasts', 'contrasts')]),
        (inputnode, conf2movpar, [('confounds', 'in_confounds')]),
        (inputnode, bim, [('brainmask', 'mask')]),
        (masker, bim, [('out_file', 'in_file')]),
        (bim, l1, [('out_file', 'functional_runs')]),
        (ev, l1, [('event_files', 'event_files')]),
        (conf2movpar, l1, [('out', 'realignment_parameters')]),
        (l1, l1model, [('session_info', 'session_info')]),
        (ev, l1model, [('orthogonalization', 'orthogonalization')]),
        (l1model, l1featmodel, [('fsf_files', 'fsf_file'),
                                ('ev_files', 'ev_files')]),
        (l1model, l1estimate, [('fsf_files', 'fsf_file')]),
        # Smooth
        (inputnode, pre_smooth, [('bold_preproc', 'zstat_file'),
                                 ('brainmask', 'mask_file')]),
        (bim, post_smooth, [('out_file', 'zstat_file')]),
        (inputnode, post_smooth, [('brainmask', 'mask_file')]),
        (pre_smooth, outputnode, [(('resels', _resels), 'sigma_pre')]),
        (post_smooth, outputnode, [(('resels', _resels), 'sigma_post')]),

        # Smooth with AFNI
        (inputnode, pre_smooth_afni, [('bold_preproc', 'in_file'),
                                      ('brainmask', 'mask')]),
        (bim, post_smooth_afni, [('out_file', 'in_file')]),
        (inputnode, post_smooth_afni, [('brainmask', 'mask')]),
    ])

    # Writing outputs
    csv = pe.Node(AddCSVRow(in_file=str(output_dir / 'smoothness.csv')),
                  name='addcsv_%s_%s' % (subject_id, pipeline))
    csv.inputs.sub_id = subject_id
    csv.inputs.pipeline = pipeline

    # Datasinks
    ds_stats = pe.Node(niu.Function(function=_feat_stats), name='ds_stats')
    ds_stats.inputs.subject_id = subject_id
    ds_stats.inputs.task_id = task_id
    ds_stats.inputs.variant = pipeline
    ds_stats.inputs.out_path = output_dir
    setattr(ds_stats.interface, '_always_run', True)

    workflow.connect([
        (outputnode, csv, [('sigma_pre', 'smooth_pre'),
                           ('sigma_post', 'smooth_post')]),
        (pre_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_pre')]),
        (post_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_post')]),
        (l1estimate, ds_stats, [('feat_dir', 'feat_dir')]),
        (ds_stats, outputnode, [('out', 'out_stats')]),
    ])
    return workflow
Esempio n. 14
0
datasource.inputs.func_scans = [1]
datasource.inputs.template = '*'
datasource.inputs.field_template = dict(func='SID%d/Scans/Scan%d*.nii.gz',
                                        struct='SID%d/Scans/co*.nii.gz',
                                        evs='SID%d/EVfiles/RUN%d/*.txt')
datasource.inputs.template_args = dict(func=[['subject_id', 'func_scans']],
                                       struct=[['subject_id']],
                                       evs=[['subject_id', 'func_scans']])
datasource.inputs.sort_filelist = False
results = datasource.run()

print results.outputs

cont1 = ['Bundling-Control', 'T', ['Bundling', 'Control'], [1, -1]]

s = SpecifyModel()
s.inputs.input_units = 'secs'
s.inputs.functional_runs = results.outputs.func
s.inputs.time_repetition = 2
s.inputs.high_pass_filter_cutoff = 128.
s.inputs.event_files = results.outputs.evs
model = s.run()

level1design = Level1Design()
level1design.inputs.interscan_interval = 2.5
level1design.inputs.bases = {'dgamma': {'derivs': False}}
level1design.inputs.model_serial_correlations = False
level1design.inputs.session_info = model.outputs.session_info
level1design.inputs.contrasts = [cont1]
l1d = level1design.run()
Esempio n. 15
0
    def runglmperun(self, subject, trtimeinsec):
        s = SpecifyModel()
        # loop on all runs and models within each run
        modelfiles = subject._modelfiles

        for model in modelfiles:
            # Make directory results to store the results of the model
            results_dir = os.path.join(subject._path, 'model', model[0],
                                       'results', model[1])
            dir_util.mkpath(results_dir)
            os.chdir(results_dir)

            s.inputs.event_files = model[2]
            s.inputs.input_units = 'secs'
            s.inputs.functional_runs = os.path.join(subject._path, 'BOLD',
                                                    model[1],
                                                    'bold_mcf_hp.nii.gz')
            # use nibable to get the tr of from the .nii file
            s.inputs.time_repetition = trtimeinsec
            s.inputs.high_pass_filter_cutoff = 128.
            # find par file that has motion
            motionfiles = glob(
                os.path.join(subject._path, 'BOLD', model[1], "*.par"))
            s.inputs.realignment_parameters = motionfiles
            #info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]],                      durations=[[1]]),                 Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]],                       durations=[[1]])]
            #s.inputs.subject_info = None

            res = s.run()
            res.runtime.cwd
            print ">>>> preparing evs for model " + model[
                1] + "and run " + model[0]
            sessionInfo = res.outputs.session_info

            level1design = Level1Design()
            level1design.inputs.interscan_interval = trtimeinsec
            level1design.inputs.bases = {'dgamma': {'derivs': False}}
            level1design.inputs.session_info = sessionInfo
            level1design.inputs.model_serial_correlations = True
            #TODO: add contrasts to level 1 design so that I have just condition vs rest for each ev
            #TODO: Look into changign this to FILM instead of FEAT - this also has the option of setting output directory
            # http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FEAT/UserGuide#Contrasts
            #http://nipy.org/nipype/interfaces/generated/nipype.interfaces.fsl.model.html#filmgls
            resLevel = level1design.run()

            featModel = FEATModel()
            featModel.inputs.fsf_file = resLevel.outputs.fsf_files
            featModel.inputs.ev_files = resLevel.outputs.ev_files
            resFeat = featModel.run()

            print ">>>> creating fsf design files for  " + model[
                1] + "and run " + model[0]
            # TODO: give mask here
            glm = fsl.GLM(in_file=s.inputs.functional_runs[0],
                          design=resFeat.outputs.design_file,
                          output_type='NIFTI')

            print ">>>> running glm for  " + model[1] + "and run " + model[0]
            resGlm = glm.run()

            print ">>>> finished running  glm for  " + model[
                1] + "and run " + model[0]
def first_level_wf(in_files, output_dir, fwhm=6.0, name='wf_1st_level'):
    workflow = pe.Workflow(name=name)
    datasource = pe.Node(niu.Function(function=_dict_ds,
                                      output_names=DATA_ITEMS),
                         name='datasource')
    datasource.inputs.in_dict = in_files
    datasource.iterables = ('sub', sorted(in_files.keys()))

    # Extract motion parameters from regressors file
    runinfo = pe.Node(niu.Function(input_names=[
        'in_file', 'events_file', 'regressors_file', 'regressors_names'
    ],
                                   function=_bids2nipypeinfo,
                                   output_names=['info', 'realign_file']),
                      name='runinfo')

    # Set the column names to be used from the confounds file
    runinfo.inputs.regressors_names = ['dvars', 'framewise_displacement'] + \
        ['a_comp_cor_%02d' % i for i in range(6)] + ['cosine%02d' % i for i in range(4)]

    # SUSAN smoothing
    susan = create_susan_smooth()
    susan.inputs.inputnode.fwhm = fwhm

    l1_spec = pe.Node(SpecifyModel(parameter_source='FSL',
                                   input_units='secs',
                                   high_pass_filter_cutoff=100),
                      name='l1_spec')

    # l1_model creates a first-level model design
    l1_model = pe.Node(
        fsl.Level1Design(
            bases={'dgamma': {
                'derivs': True
            }},
            model_serial_correlations=True,
            #ENTER YOUR OWN CONTRAST HERE
            contrasts=[],
            # orthogonalization=orthogonality,
        ),
        name='l1_model')

    # feat_spec generates an fsf model specification file
    feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec')
    # feat_fit actually runs FEAT
    feat_fit = pe.Node(fsl.FEAT(), name='feat_fit', mem_gb=12)

    feat_select = pe.Node(nio.SelectFiles({
        'cope': 'stats/cope1.nii.gz',
        'pe': 'stats/pe[0-9][0-9].nii.gz',
        'tstat': 'stats/tstat1.nii.gz',
        'varcope': 'stats/varcope1.nii.gz',
        'zstat': 'stats/zstat1.nii.gz',
    }),
                          name='feat_select')

    ds_cope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                          keep_dtype=False,
                                          suffix='cope',
                                          desc='intask'),
                      name='ds_cope',
                      run_without_submitting=True)

    ds_varcope = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                             keep_dtype=False,
                                             suffix='varcope',
                                             desc='intask'),
                         name='ds_varcope',
                         run_without_submitting=True)

    ds_zstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                           keep_dtype=False,
                                           suffix='zstat',
                                           desc='intask'),
                       name='ds_zstat',
                       run_without_submitting=True)

    ds_tstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir),
                                           keep_dtype=False,
                                           suffix='tstat',
                                           desc='intask'),
                       name='ds_tstat',
                       run_without_submitting=True)

    workflow.connect([
        (datasource, susan, [('bold', 'inputnode.in_files'),
                             ('mask', 'inputnode.mask_file')]),
        (datasource, runinfo, [('events', 'events_file'),
                               ('regressors', 'regressors_file')]),
        (susan, l1_spec, [('outputnode.smoothed_files', 'functional_runs')]),
        (datasource, l1_spec, [('tr', 'time_repetition')]),
        (datasource, l1_model, [('tr', 'interscan_interval')]),
        (datasource, ds_cope, [('bold', 'source_file')]),
        (datasource, ds_varcope, [('bold', 'source_file')]),
        (datasource, ds_zstat, [('bold', 'source_file')]),
        (datasource, ds_tstat, [('bold', 'source_file')]),
        (susan, runinfo, [('outputnode.smoothed_files', 'in_file')]),
        (runinfo, l1_spec, [('info', 'subject_info'),
                            ('realign_file', 'realignment_parameters')]),
        (l1_spec, l1_model, [('session_info', 'session_info')]),
        (l1_model, feat_spec, [('fsf_files', 'fsf_file'),
                               ('ev_files', 'ev_files')]),
        (l1_model, feat_fit, [('fsf_files', 'fsf_file')]),
        (feat_fit, feat_select, [('feat_dir', 'base_directory')]),
        (feat_select, ds_cope, [('cope', 'in_file')]),
        (feat_select, ds_varcope, [('varcope', 'in_file')]),
        (feat_select, ds_zstat, [('zstat', 'in_file')]),
        (feat_select, ds_tstat, [('tstat', 'in_file')]),
    ])
    return workflow
Esempio n. 17
0
def test_modelgen1(tmpdir):
    filename1 = tmpdir.join("test1.nii").strpath
    filename2 = tmpdir.join("test2.nii").strpath
    Nifti1Image(np.random.rand(10, 10, 10, 200),
                np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 200),
                np.eye(4)).to_filename(filename2)
    s = SpecifyModel()
    s.inputs.input_units = "scans"
    set_output_units = lambda: setattr(s.inputs, "output_units", "scans")
    with pytest.raises(TraitError):
        set_output_units()
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.0
    info = [
        Bunch(
            conditions=["cond1"],
            onsets=[[2, 50, 100, 180]],
            durations=[[1]],
            amplitudes=None,
            pmod=None,
            regressors=None,
            regressor_names=None,
            tmod=None,
        ),
        Bunch(
            conditions=["cond1"],
            onsets=[[30, 40, 100, 150]],
            durations=[[1]],
            amplitudes=None,
            pmod=None,
            regressors=None,
            regressor_names=None,
            tmod=None,
        ),
    ]
    s.inputs.subject_info = info
    res = s.run()
    assert len(res.outputs.session_info) == 2
    assert len(res.outputs.session_info[0]["regress"]) == 0
    assert len(res.outputs.session_info[0]["cond"]) == 1
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["onset"]),
        np.array([12, 300, 600, 1080]),
    )
    info = [
        Bunch(conditions=["cond1"], onsets=[[2]], durations=[[1]]),
        Bunch(conditions=["cond1"], onsets=[[3]], durations=[[1]]),
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["duration"]),
        np.array([6.0]))
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[1]["cond"][0]["duration"]),
        np.array([6.0]))
    info = [
        Bunch(conditions=["cond1", "cond2"],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]]),
        Bunch(
            conditions=["cond1", "cond2"],
            onsets=[[2, 3], [2, 4]],
            durations=[[1, 1], [1, 1]],
        ),
    ]
    s.inputs.subject_info = deepcopy(info)
    s.inputs.input_units = "scans"
    res = s.run()
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["duration"]),
        np.array([6.0, 6.0]),
    )
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][1]["duration"]),
        np.array([6.0]))
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[1]["cond"][1]["duration"]),
        np.array([6.0, 6.0]),
    )
Esempio n. 18
0
def firstlevel_wf(subject_id, sink_directory, name='ds008_R2_frstlvl_wf'):

    frstlvl_wf = Workflow(name='frstlvl_wf')

    info = dict(task_mri_files=[['subject_id', 'stopsignal']],
                motion_noise_files=[['subject_id', 'filter_regressor']])

    # Create a Function node to define stimulus onsets, etc... for each subject
    subject_info = Node(Function(input_names=['subject_id'],
                                 output_names=['output'],
                                 function=subjectinfo),
                        name='subject_info')
    subject_info.inputs.ignore_exception = False
    subject_info.inputs.subject_id = subject_id

    # Create another Function node to define the contrasts for the experiment
    getcontrasts = Node(Function(input_names=['subject_id'],
                                 output_names=['contrasts'],
                                 function=get_contrasts),
                        name='getcontrasts')
    getcontrasts.inputs.ignore_exception = False
    getcontrasts.inputs.subject_id = subject_id

    # Create a Function node to substitute names of files created during pipeline
    getsubs = Node(Function(input_names=['subject_id', 'cons', 'info'],
                            output_names=['subs'],
                            function=get_subs),
                   name='getsubs')
    getsubs.inputs.ignore_exception = False
    getsubs.inputs.subject_id = subject_id
    frstlvl_wf.connect(subject_info, 'output', getsubs, 'info')
    frstlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons')

    # Create a datasource node to get the task_mri and motion-noise files
    datasource = Node(DataGrabber(infields=['subject_id'],
                                  outfields=info.keys()),
                      name='datasource')
    datasource.inputs.template = '*'
    datasource.inputs.subject_id = subject_id
    #datasource.inputs.base_directory = os.path.abspath('/scratch/PSB6351_2017/ds008_R2.0.0/preproc/')
    #datasource.inputs.field_template = dict(task_mri_files='%s/func/realigned/*%s*.nii.gz',
    #                                        motion_noise_files='%s/noise/%s*.txt')
    datasource.inputs.base_directory = os.path.abspath(
        '/scratch/PSB6351_2017/students/salo/data/preproc/')
    datasource.inputs.field_template = dict(
        task_mri_files=
        '%s/preproc/func/smoothed/corr_*_task-%s_*_bold_bet_smooth_mask.nii.gz',
        motion_noise_files='%s/preproc/noise/%s*.txt')
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True
    datasource.inputs.ignore_exception = False
    datasource.inputs.raise_on_empty = True

    # Create a Function node to modify the motion and noise files to be single regressors
    motionnoise = Node(Function(input_names=['subjinfo', 'files'],
                                output_names=['subjinfo'],
                                function=motion_noise),
                       name='motionnoise')
    motionnoise.inputs.ignore_exception = False
    frstlvl_wf.connect(subject_info, 'output', motionnoise, 'subjinfo')
    frstlvl_wf.connect(datasource, 'motion_noise_files', motionnoise, 'files')

    # Create a specify model node
    specify_model = Node(SpecifyModel(), name='specify_model')
    specify_model.inputs.high_pass_filter_cutoff = 128.
    specify_model.inputs.ignore_exception = False
    specify_model.inputs.input_units = 'secs'
    specify_model.inputs.time_repetition = 2.
    frstlvl_wf.connect(datasource, 'task_mri_files', specify_model,
                       'functional_runs')
    frstlvl_wf.connect(motionnoise, 'subjinfo', specify_model, 'subject_info')

    # Create an InputSpec node for the modelfit node
    modelfit_inputspec = Node(IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ],
                                                mandatory_inputs=True),
                              name='modelfit_inputspec')
    modelfit_inputspec.inputs.bases = {'dgamma': {'derivs': False}}
    modelfit_inputspec.inputs.film_threshold = 0.0
    modelfit_inputspec.inputs.interscan_interval = 2.0
    modelfit_inputspec.inputs.model_serial_correlations = True
    frstlvl_wf.connect(datasource, 'task_mri_files', modelfit_inputspec,
                       'functional_data')
    frstlvl_wf.connect(getcontrasts, 'contrasts', modelfit_inputspec,
                       'contrasts')
    frstlvl_wf.connect(specify_model, 'session_info', modelfit_inputspec,
                       'session_info')

    # Create a level1 design node
    level1_design = Node(Level1Design(), name='level1_design')
    level1_design.inputs.ignore_exception = False
    frstlvl_wf.connect(modelfit_inputspec, 'interscan_interval', level1_design,
                       'interscan_interval')
    frstlvl_wf.connect(modelfit_inputspec, 'session_info', level1_design,
                       'session_info')
    frstlvl_wf.connect(modelfit_inputspec, 'contrasts', level1_design,
                       'contrasts')
    frstlvl_wf.connect(modelfit_inputspec, 'bases', level1_design, 'bases')
    frstlvl_wf.connect(modelfit_inputspec, 'model_serial_correlations',
                       level1_design, 'model_serial_correlations')

    # Create a MapNode to generate a model for each run
    generate_model = MapNode(FEATModel(),
                             iterfield=['fsf_file', 'ev_files'],
                             name='generate_model')
    generate_model.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    generate_model.inputs.ignore_exception = False
    generate_model.inputs.output_type = 'NIFTI_GZ'
    generate_model.inputs.terminal_output = 'stream'
    frstlvl_wf.connect(level1_design, 'fsf_files', generate_model, 'fsf_file')
    frstlvl_wf.connect(level1_design, 'ev_files', generate_model, 'ev_files')

    # Create a MapNode to estimate the model using FILMGLS
    estimate_model = MapNode(FILMGLS(),
                             iterfield=['design_file', 'in_file', 'tcon_file'],
                             name='estimate_model')
    frstlvl_wf.connect(generate_model, 'design_file', estimate_model,
                       'design_file')
    frstlvl_wf.connect(generate_model, 'con_file', estimate_model, 'tcon_file')
    frstlvl_wf.connect(modelfit_inputspec, 'functional_data', estimate_model,
                       'in_file')

    # Create a merge node to merge the contrasts - necessary for fsl 5.0.7 and greater
    merge_contrasts = MapNode(Merge(2),
                              iterfield=['in1'],
                              name='merge_contrasts')
    frstlvl_wf.connect(estimate_model, 'zstats', merge_contrasts, 'in1')

    # Create a MapNode to transform the z2pval
    z2pval = MapNode(ImageMaths(), iterfield=['in_file'], name='z2pval')
    z2pval.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    z2pval.inputs.ignore_exception = False
    z2pval.inputs.op_string = '-ztop'
    z2pval.inputs.output_type = 'NIFTI_GZ'
    z2pval.inputs.suffix = '_pval'
    z2pval.inputs.terminal_output = 'stream'
    frstlvl_wf.connect(merge_contrasts, ('out', pop_lambda), z2pval, 'in_file')

    # Create an outputspec node
    modelfit_outputspec = Node(IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'pfiles', 'parameter_estimates',
        'zstats', 'design_image', 'design_file', 'design_cov', 'sigmasquareds'
    ],
                                                 mandatory_inputs=True),
                               name='modelfit_outputspec')
    frstlvl_wf.connect(estimate_model, 'copes', modelfit_outputspec, 'copes')
    frstlvl_wf.connect(estimate_model, 'varcopes', modelfit_outputspec,
                       'varcopes')
    frstlvl_wf.connect(merge_contrasts, 'out', modelfit_outputspec, 'zstats')
    frstlvl_wf.connect(z2pval, 'out_file', modelfit_outputspec, 'pfiles')
    frstlvl_wf.connect(generate_model, 'design_image', modelfit_outputspec,
                       'design_image')
    frstlvl_wf.connect(generate_model, 'design_file', modelfit_outputspec,
                       'design_file')
    frstlvl_wf.connect(generate_model, 'design_cov', modelfit_outputspec,
                       'design_cov')
    frstlvl_wf.connect(estimate_model, 'param_estimates', modelfit_outputspec,
                       'parameter_estimates')
    frstlvl_wf.connect(estimate_model, 'dof_file', modelfit_outputspec,
                       'dof_file')
    frstlvl_wf.connect(estimate_model, 'sigmasquareds', modelfit_outputspec,
                       'sigmasquareds')

    # Create a datasink node
    sinkd = Node(DataSink(), name='sinkd')
    sinkd.inputs.base_directory = sink_directory
    sinkd.inputs.container = subject_id
    frstlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions')
    frstlvl_wf.connect(modelfit_outputspec, 'parameter_estimates', sinkd,
                       'modelfit.estimates')
    frstlvl_wf.connect(modelfit_outputspec, 'sigmasquareds', sinkd,
                       'modelfit.estimates.@sigsq')
    frstlvl_wf.connect(modelfit_outputspec, 'dof_file', sinkd, 'modelfit.dofs')
    frstlvl_wf.connect(modelfit_outputspec, 'copes', sinkd,
                       'modelfit.contrasts.@copes')
    frstlvl_wf.connect(modelfit_outputspec, 'varcopes', sinkd,
                       'modelfit.contrasts.@varcopes')
    frstlvl_wf.connect(modelfit_outputspec, 'zstats', sinkd,
                       'modelfit.contrasts.@zstats')
    frstlvl_wf.connect(modelfit_outputspec, 'design_image', sinkd,
                       'modelfit.design')
    frstlvl_wf.connect(modelfit_outputspec, 'design_cov', sinkd,
                       'modelfit.design.@cov')
    frstlvl_wf.connect(modelfit_outputspec, 'design_file', sinkd,
                       'modelfit.design.@matrix')
    frstlvl_wf.connect(modelfit_outputspec, 'pfiles', sinkd,
                       'modelfit.contrasts.@pstats')

    return frstlvl_wf
Esempio n. 19
0
#workflow.run('MultiProc') #, plugin_args={'n_procs': 3}) # doesn't work on local - not enough memory
#workflow.run() # on local

#%% set contrasts
cont1 = ['Trauma>Sad', 'T', ['trauma', 'sad'], [1, -1]]
cont2 = ['Trauma>Relax', 'T', ['trauma', 'relax'], [1, -1]]
cont3 = ['Sad>Relax', 'T', ['sad', 'relax'], [1, -1]]
cont4 = ['Trauma', 'T',['trauma'], [1]]
cont5 = ['Sad', 'T',['sad'], [1]]
contrasts = [cont1, cont2, cont3, cont4, cont5]


#%%
l1_spec = pe.Node(SpecifyModel(
    parameter_source='FSL',
    input_units='secs',
    high_pass_filter_cutoff=120,
    time_repetition = tr,
), name='l1_spec')

# l1_model creates a first-level model design
l1_model = pe.Node(fsl.Level1Design(
    bases={'dgamma': {'derivs': True}},
    model_serial_correlations=True,
    interscan_interval = tr,
    contrasts=contrasts
    # orthogonalization=orthogonality,
), name='l1_model')

# feat_spec generates an fsf model specification file
feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec')
Esempio n. 20
0
def combine_wkflw(c, prep_c=foo, name='work_dir'):
    import nipype.interfaces.utility as util  # utility
    import nipype.pipeline.engine as pe  # pypeline engine
    import nipype.interfaces.io as nio  # input/output
    from nipype.algorithms.modelgen import SpecifyModel, SpecifySparseModel
    import numpy as np
    modelflow = pe.Workflow(name=name)
    modelflow.base_dir = os.path.join(c.working_dir)

    preproc = c.datagrabber.create_dataflow()  #preproc_datagrabber(prep_c)

    #infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
    #                     name='subject_names')

    #if c.test_mode:
    #    infosource.iterables = ('subject_id', [c.subjects[0]])
    #else:
    #    infosource.iterables = ('subject_id', c.subjects)

    infosource = preproc.get_node('subject_id_iterable')
    #modelflow.connect(infosource,'subject_id',preproc,'subject_id')
    #preproc.iterables = ('fwhm', prep_c.fwhm)

    subjectinfo = pe.Node(util.Function(input_names=['subject_id'],
                                        output_names=['output']),
                          name='subjectinfo')
    subjectinfo.inputs.function_str = c.subjectinfo

    def getsubs(subject_id, cons, info, fwhm):
        #from config import getcontrasts, get_run_numbers, subjectinfo, fwhm
        subs = [('_subject_id_%s/' % subject_id, ''), ('_plot_type_', ''),
                ('_fwhm', 'fwhm'), ('_dtype_mcf_mask_mean', '_mean'),
                ('_dtype_mcf_mask_smooth_mask_gms_tempfilt',
                 '_smoothed_preprocessed'),
                ('_dtype_mcf_mask_gms_tempfilt', '_unsmoothed_preprocessed'),
                ('_dtype_mcf', '_mcf')]

        for i in range(4):
            subs.append(('_plot_motion%d' % i, ''))
            subs.append(('_highpass%d/' % i, ''))
            subs.append(('_realign%d/' % i, ''))
            subs.append(('_meanfunc2%d/' % i, ''))
        runs = range(len(info))
        for i, run in enumerate(runs):
            subs.append(('_modelestimate%d/' % i, '_run_%d_%02d_' % (i, run)))
            subs.append(('_modelgen%d/' % i, '_run_%d_%02d_' % (i, run)))
            subs.append(('_conestimate%d/' % i, '_run_%d_%02d_' % (i, run)))
        for i, con in enumerate(cons):
            subs.append(
                ('cope%d.' % (i + 1), 'cope%02d_%s.' % (i + 1, con[0])))
            subs.append(
                ('varcope%d.' % (i + 1), 'varcope%02d_%s.' % (i + 1, con[0])))
            subs.append(
                ('zstat%d.' % (i + 1), 'zstat%02d_%s.' % (i + 1, con[0])))
            subs.append(
                ('tstat%d.' % (i + 1), 'tstat%02d_%s.' % (i + 1, con[0])))
        """for i, name in enumerate(info[0].conditions):
            subs.append(('pe%d.'%(i+1), 'pe%02d_%s.'%(i+1,name)))
        for i in range(len(info[0].conditions), 256):
            subs.append(('pe%d.'%(i+1), 'others/pe%02d.'%(i+1)))"""
        for i in fwhm:
            subs.append(('_register%d/' % (i), ''))

        return subs

    get_substitutions = pe.Node(util.Function(
        input_names=['subject_id', 'cons', 'info', 'fwhm'],
        output_names=['subs'],
        function=getsubs),
                                name='getsubs')

    # create a node to create the subject info
    if not c.is_sparse:
        s = pe.Node(SpecifyModel(), name='s')
    else:
        s = pe.Node(SpecifySparseModel(
            model_hrf=c.model_hrf,
            stimuli_as_impulses=c.stimuli_as_impulses,
            use_temporal_deriv=c.use_temporal_deriv,
            volumes_in_cluster=c.volumes_in_cluster,
            scan_onset=c.scan_onset,
            scale_regressors=c.scale_regressors),
                    name='s')
        s.inputs.time_acquisition = c.ta
    s.inputs.input_units = c.input_units
    s.inputs.time_repetition = c.tr
    if c.hpcutoff < 0:
        c.hpcutoff = np.inf
    s.inputs.high_pass_filter_cutoff = c.hpcutoff
    #subjinfo =                                          subjectinfo(subj)

    # create a node to add the traditional (MCFLIRT-derived) motion regressors to
    # the subject info
    trad_motn = pe.Node(util.Function(input_names=['subinfo', 'files'],
                                      output_names=['subinfo'],
                                      function=trad_mot),
                        name='trad_motn')

    #subjinfo = pe.Node(interface=util.Function(input_names=['subject_id','get_run_numbers'], output_names=['output'], function = c.subjectinfo), name='subjectinfo')
    #subjinfo.inputs.get_run_numbers = c.get_run_numbers
    #modelflow.connect(infosource,'subject_id',
    #                  subjinfo,'subject_id' )
    #modelflow.connect(subjinfo, 'output',
    #                  trad_motn, 'subinfo')

    #modelflow.connect(infosource, ('subject_id',subjectinfo), trad_motn, 'subinfo')
    modelflow.connect(infosource, 'subject_id', subjectinfo, 'subject_id')
    modelflow.connect(subjectinfo, 'output', trad_motn, 'subinfo')

    # create a node to add the principle components of the noise regressors to
    # the subject info
    noise_motn = pe.Node(util.Function(input_names=[
        'subinfo', 'files', 'num_noise_components', "use_compcor"
    ],
                                       output_names=['subinfo'],
                                       function=noise_mot),
                         name='noise_motn')
    noise_motn.inputs.use_compcor = c.use_compcor
    # generate first level analysis workflow
    modelfit = create_first()
    modelfit.inputs.inputspec.interscan_interval = c.interscan_interval
    modelfit.inputs.inputspec.film_threshold = c.film_threshold

    contrasts = pe.Node(util.Function(input_names=['subject_id'],
                                      output_names=['contrasts']),
                        name='getcontrasts')
    contrasts.inputs.function_str = c.contrasts

    modelflow.connect(infosource, 'subject_id', contrasts, 'subject_id')
    modelflow.connect(contrasts, 'contrasts', modelfit, 'inputspec.contrasts')

    modelfit.inputs.inputspec.bases = c.bases
    modelfit.inputs.inputspec.model_serial_correlations = True
    noise_motn.inputs.num_noise_components = prep_c.num_noise_components

    # make a data sink
    sinkd = pe.Node(nio.DataSink(), name='sinkd')
    sinkd.inputs.base_directory = os.path.join(c.sink_dir)

    modelflow.connect(infosource, 'subject_id', sinkd, 'container')
    #modelflow.connect(infosource, ('subject_id',getsubs, getcontrasts, subjectinfo, prep_c.fwhm), sinkd, 'substitutions')
    modelflow.connect(infosource, 'subject_id', get_substitutions,
                      'subject_id')
    modelflow.connect(contrasts, 'contrasts', get_substitutions, 'cons')
    modelflow.connect(subjectinfo, 'output', get_substitutions, 'info')
    get_substitutions.inputs.fwhm = prep_c.fwhm
    modelflow.connect(get_substitutions, 'subs', sinkd, 'substitutions')

    sinkd.inputs.regexp_substitutions = [
        ('mask/fwhm_%d/_threshold([0-9]*)/.*nii' % x,
         'mask/fwhm_%d/funcmask.nii' % x) for x in prep_c.fwhm
    ]
    sinkd.inputs.regexp_substitutions.append(
        ('realigned/fwhm_([0-9])/_copy_geom([0-9]*)/', 'realigned/'))
    sinkd.inputs.regexp_substitutions.append(
        ('motion/fwhm_([0-9])/', 'motion/'))
    sinkd.inputs.regexp_substitutions.append(('bbreg/fwhm_([0-9])/', 'bbreg/'))

    # make connections
    modelflow.connect(preproc, 'datagrabber.motion_parameters', trad_motn,
                      'files')
    modelflow.connect(preproc, 'datagrabber.noise_components', noise_motn,
                      'files')
    modelflow.connect(preproc, 'datagrabber.highpassed_files', s,
                      'functional_runs')
    modelflow.connect(preproc, 'datagrabber.highpassed_files', modelfit,
                      'inputspec.functional_data')
    modelflow.connect(preproc, 'datagrabber.outlier_files', s, 'outlier_files')
    modelflow.connect(trad_motn, 'subinfo', noise_motn, 'subinfo')
    modelflow.connect(noise_motn, 'subinfo', s, 'subject_info')
    modelflow.connect(s, 'session_info', modelfit, 'inputspec.session_info')
    modelflow.connect(modelfit, 'outputspec.parameter_estimates', sinkd,
                      'modelfit.estimates')
    modelflow.connect(modelfit, 'outputspec.sigmasquareds', sinkd,
                      'modelfit.estimates.@sigsq')
    modelflow.connect(modelfit, 'outputspec.dof_file', sinkd, 'modelfit.dofs')
    modelflow.connect(modelfit, 'outputspec.copes', sinkd,
                      'modelfit.contrasts.@copes')
    modelflow.connect(modelfit, 'outputspec.varcopes', sinkd,
                      'modelfit.contrasts.@varcopes')
    modelflow.connect(modelfit, 'outputspec.zstats', sinkd,
                      'modelfit.contrasts.@zstats')
    modelflow.connect(modelfit, 'outputspec.tstats', sinkd,
                      'modelfit.contrasts.@tstats')
    modelflow.connect(modelfit, 'outputspec.design_image', sinkd,
                      'modelfit.design')
    modelflow.connect(modelfit, 'outputspec.design_cov', sinkd,
                      'modelfit.design.@cov')
    modelflow.connect(modelfit, 'outputspec.design_file', sinkd,
                      'modelfit.design.@matrix')
    modelflow.connect(modelfit, 'outputspec.pfiles', sinkd,
                      'modelfit.contrasts.@pstats')
    return modelflow
Esempio n. 21
0
datasource.inputs.template_args = dict(func=[['subject_id', 'func_scans']],
                                       struct=[['subject_id']],
                                       evs=[['subject_id', 'func_scans']])
datasource.inputs.sort_filelist = False

#Node: Datasink - Create a datasink node to store important outputs
datasink = pe.Node(interface=nio.DataSink(), name="datasink")
datasink.inputs.base_directory = experiment_dir

#Define where the datasink input should be stored at
datasink.inputs.container = 'results/'

# Model Specs

#Use nipype.algorithms.modelgen.SpecifyModel to generate design information.
modelspec = pe.Node(interface=SpecifyModel(), name="modelspec")
modelspec.inputs.input_units = 'secs'
modelspec.inputs.time_repetition = TR
modelspec.inputs.high_pass_filter_cutoff = high_pass

#Use nipype.interfaces.fsl.Level1Design to generate a run specific fsf file for analysis
level1design = pe.Node(
    interface=fsl.Level1Design(
        interscan_interval=TR,
        model_serial_correlations=modelSerialCorrelations,
        bases={'dgamma': {
            'derivs': False
        }},
        contrasts=conts),
    name="level1design",
)
Esempio n. 22
0
def firstlevel_wf(subject_id, sink_directory, name='wmaze_frstlvl_wf'):
    # Create the frstlvl workflow
    frstlvl_wf = Workflow(name='frstlvl_wf')

    # Dictionary holding the wildcard used in datasource
    info = dict(task_mri_files=[['subject_id', 'wmaze']],
                motion_noise_files=[['subject_id']])

    # Calls the subjectinfo function with the name, onset, duration, and amplitude info
    subject_info = Node(Function(input_names=['subject_id'],
                                 output_names=['output'],
                                 function=subjectinfo),
                        name='subject_info')
    subject_info.inputs.ignore_exception = False
    subject_info.inputs.subject_id = subject_id

    # Create another Function node to define the contrasts for the experiment
    getcontrasts = Node(
        Function(
            input_names=['subject_id', 'info'],
            output_names=['contrasts'],
            # Calls the function 'get_contrasts'
            function=get_contrasts),
        name='getcontrasts')
    getcontrasts.inputs.ignore_exception = False
    # Receives subject_id as input
    getcontrasts.inputs.subject_id = subject_id
    frstlvl_wf.connect(subject_info, 'output', getcontrasts, 'info')

    #### subject_info (output) ----> getcontrasts (info)

    # Create a Function node to substitute names of folders and files created during pipeline
    getsubs = Node(
        Function(
            input_names=['cons'],
            output_names=['subs'],
            # Calls the function 'get_subs'
            function=get_subs),
        name='getsubs')
    getsubs.inputs.ignore_exception = False
    # Receives subject_id as input
    getsubs.inputs.subject_id = subject_id
    frstlvl_wf.connect(subject_info, 'output', getsubs, 'info')
    frstlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons')

    # Create a datasource node to get the task_mri and motion-noise files
    datasource = Node(DataGrabber(infields=['subject_id'],
                                  outfields=info.keys()),
                      name='datasource')
    # Indicates the string template to match (in this case, any that match the field template)
    datasource.inputs.template = '*'
    # Receives subject_id as an input
    datasource.inputs.subject_id = subject_id
    # Base directory to allow branching pathways
    datasource.inputs.base_directory = os.path.abspath(
        '/home/data/madlab/data/mri/wmaze/preproc/')
    datasource.inputs.field_template = dict(
        task_mri_files='%s/func/smoothed_fullspectrum/_maskfunc2*/*%s*.nii.gz',
        # Filter regressor noise files
        motion_noise_files='%s/noise/filter_regressor*.txt')
    # Inputs from the infields argument ('subject_id') that satisfy the template
    datasource.inputs.template_args = info
    # Forces DataGrabber to return data in sorted order when using wildcards
    datasource.inputs.sort_filelist = True
    # Do not ignore exceptions
    datasource.inputs.ignore_exception = False
    datasource.inputs.raise_on_empty = True

    # Function to remove last three volumes from functional data
    # Start from the first volume and end on the -3 volume
    fslroi_epi = MapNode(ExtractROI(t_min=0, t_size=197),
                         iterfield=['in_file'],
                         name='fslroi_epi')
    fslroi_epi.output_type = 'NIFTI_GZ'
    fslroi_epi.terminal_output = 'stream'
    frstlvl_wf.connect(datasource, 'task_mri_files', fslroi_epi, 'in_file')

    # Function node to modify the motion and noise files to be single regressors
    motionnoise = Node(
        Function(
            input_names=['subjinfo', 'files'],
            output_names=['subjinfo'],
            # Calls the function 'motion_noise'
            function=motion_noise),
        name='motionnoise')
    motionnoise.inputs.ignore_exception = False
    # The bunch from subject_info function containing regressor names, onsets, durations, and amplitudes
    frstlvl_wf.connect(subject_info, 'output', motionnoise, 'subjinfo')
    frstlvl_wf.connect(datasource, 'motion_noise_files', motionnoise, 'files')

    # Makes a model specification compatible with spm/fsl designers
    # Requires subjectinfo to be received in the form of a Bunch of a list of Bunch
    specify_model = Node(SpecifyModel(), name='specify_model')
    # High-pass filter cutoff in seconds
    specify_model.inputs.high_pass_filter_cutoff = -1.0
    specify_model.inputs.ignore_exception = False
    # input units in either 'secs' or 'scans'
    specify_model.inputs.input_units = 'secs'
    # Time between start of one volume and the start of following volume
    specify_model.inputs.time_repetition = 2.0
    # Editted data files for model -- list of 4D files
    frstlvl_wf.connect(fslroi_epi, 'roi_file', specify_model,
                       'functional_runs')
    # List of event description files in 3 column format corresponding to onsets, durations, and amplitudes
    frstlvl_wf.connect(motionnoise, 'subjinfo', specify_model, 'subject_info')

    # Basic interface class generates identity mappings
    modelfit_inputspec = Node(IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ],
                                                mandatory_inputs=True),
                              name='modelfit_inputspec')
    # Set bases to a dictionary with a second dictionary setting the value of dgamma derivatives as 'False'
    modelfit_inputspec.inputs.bases = {'dgamma': {'derivs': False}}
    # Film threshold
    modelfit_inputspec.inputs.film_threshold = 0.0
    # Interscan_interval
    modelfit_inputspec.inputs.interscan_interval = 2.0
    # Create model serial correlations for Level1Design
    modelfit_inputspec.inputs.model_serial_correlations = True
    frstlvl_wf.connect(fslroi_epi, 'roi_file', modelfit_inputspec,
                       'functional_data')
    frstlvl_wf.connect(getcontrasts, 'contrasts', modelfit_inputspec,
                       'contrasts')
    frstlvl_wf.connect(specify_model, 'session_info', modelfit_inputspec,
                       'session_info')

    # Creates a first level SPM design matrix to demonstrate contrasts and motion/noise regressors
    level1_design = MapNode(Level1Design(),
                            iterfield=['contrasts', 'session_info'],
                            name='level1_design')
    level1_design.inputs.ignore_exception = False
    # Inputs the interscan interval (in secs)
    frstlvl_wf.connect(modelfit_inputspec, 'interscan_interval', level1_design,
                       'interscan_interval')
    # Session specific information generated by ``modelgen.SpecifyModel``
    frstlvl_wf.connect(modelfit_inputspec, 'session_info', level1_design,
                       'session_info')
    # List of contrasts with each contrast being a list of the form -[('name', 'stat', [condition list], [weight list], [session list])].
    # If session list is None or not provided, all sessions are used.
    frstlvl_wf.connect(modelfit_inputspec, 'contrasts', level1_design,
                       'contrasts')
    # Name of basis function and options e.g., {'dgamma': {'derivs': True}}
    frstlvl_wf.connect(modelfit_inputspec, 'bases', level1_design, 'bases')
    # Option to model serial correlations using an autoregressive estimator (order 1)
    # Setting this option is only useful in the context of the fsf file
    frstlvl_wf.connect(modelfit_inputspec, 'model_serial_correlations',
                       level1_design, 'model_serial_correlations')

    # Create a MapNode to generate a design.mat file for each run
    generate_model = MapNode(FEATModel(),
                             iterfield=['fsf_file', 'ev_files'],
                             name='generate_model')
    generate_model.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    generate_model.inputs.ignore_exception = False
    generate_model.inputs.output_type = 'NIFTI_GZ'
    generate_model.inputs.terminal_output = 'stream'
    # File specifying the feat design spec file
    frstlvl_wf.connect(level1_design, 'fsf_files', generate_model, 'fsf_file')
    # Event spec files generated by level1design (condition information files)
    frstlvl_wf.connect(level1_design, 'ev_files', generate_model, 'ev_files')

    # Create a MapNode to estimate the model using FILMGLS -- fits the design matrix to the voxel timeseries
    estimate_model = MapNode(FILMGLS(),
                             iterfield=['design_file', 'in_file', 'tcon_file'],
                             name='estimate_model')
    estimate_model.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    estimate_model.inputs.ignore_exception = False
    # Susan-smooth mask size
    estimate_model.inputs.mask_size = 5
    estimate_model.inputs.output_type = 'NIFTI_GZ'
    estimate_model.inputs.results_dir = 'results'
    # Smooth auto-correlation estimates
    estimate_model.inputs.smooth_autocorr = True
    estimate_model.inputs.terminal_output = 'stream'
    frstlvl_wf.connect(modelfit_inputspec, 'film_threshold', estimate_model,
                       'threshold')
    frstlvl_wf.connect(modelfit_inputspec, 'functional_data', estimate_model,
                       'in_file')
    # Mat file containing ascii matrix for design
    frstlvl_wf.connect(generate_model, 'design_file', estimate_model,
                       'design_file')
    # Contrast file containing contrast vectors
    frstlvl_wf.connect(generate_model, 'con_file', estimate_model, 'tcon_file')

    # Create a merge node to merge the contrasts - necessary for fsl 5.0.7 and greater
    merge_contrasts = MapNode(Merge(2),
                              iterfield=['in1'],
                              name='merge_contrasts')
    frstlvl_wf.connect(estimate_model, 'zstats', merge_contrasts, 'in1')

    # Create a MapNode to transform the z2pval
    z2pval = MapNode(ImageMaths(), iterfield=['in_file'], name='z2pval')
    z2pval.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    # Do not ignore exceptions
    z2pval.inputs.ignore_exception = False
    # Defines the operation used
    z2pval.inputs.op_string = '-ztop'
    # Set the outfile type to nii.gz
    z2pval.inputs.output_type = 'NIFTI_GZ'
    # Out-file suffix
    z2pval.inputs.suffix = '_pval'
    # Set output to stream in terminal
    z2pval.inputs.terminal_output = 'stream'
    frstlvl_wf.connect(merge_contrasts, ('out', pop_lambda), z2pval, 'in_file')

    # Create an outputspec node using IdentityInterface() to receive information from estimate_model,
    # merge_contrasts, z2pval, generate_model, and estimate_model
    modelfit_outputspec = Node(IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'pfiles', 'parameter_estimates',
        'zstats', 'design_image', 'design_file', 'design_cov', 'sigmasquareds'
    ],
                                                 mandatory_inputs=True),
                               name='modelfit_outputspec')
    # All lvl1 cope files
    frstlvl_wf.connect(estimate_model, 'copes', modelfit_outputspec, 'copes')
    # All lvl1 varcope files
    frstlvl_wf.connect(estimate_model, 'varcopes', modelfit_outputspec,
                       'varcopes')
    # All zstats across runs
    frstlvl_wf.connect(merge_contrasts, 'out', modelfit_outputspec, 'zstats')
    #
    frstlvl_wf.connect(z2pval, 'out_file', modelfit_outputspec, 'pfiles')
    # Graphical representation of design matrix
    frstlvl_wf.connect(generate_model, 'design_image', modelfit_outputspec,
                       'design_image')
    # Mat file containing ascii matrix for design
    frstlvl_wf.connect(generate_model, 'design_file', modelfit_outputspec,
                       'design_file')
    # Graphical representation of design covariance
    frstlvl_wf.connect(generate_model, 'design_cov', modelfit_outputspec,
                       'design_cov')
    # Parameter estimates for each column of the design matrix
    frstlvl_wf.connect(estimate_model, 'param_estimates', modelfit_outputspec,
                       'parameter_estimates')
    # Degrees of freedom
    frstlvl_wf.connect(estimate_model, 'dof_file', modelfit_outputspec,
                       'dof_file')
    # Summary of residuals
    frstlvl_wf.connect(estimate_model, 'sigmasquareds', modelfit_outputspec,
                       'sigmasquareds')

    # Create a datasink node to save output from multiple points in the pipeline
    sinkd = MapNode(DataSink(),
                    iterfield=[
                        'substitutions', 'modelfit.contrasts.@copes',
                        'modelfit.contrasts.@varcopes', 'modelfit.estimates',
                        'modelfit.contrasts.@zstats'
                    ],
                    name='sinkd')
    sinkd.inputs.base_directory = sink_directory
    sinkd.inputs.container = subject_id
    frstlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions')
    frstlvl_wf.connect(modelfit_outputspec, 'parameter_estimates', sinkd,
                       'modelfit.estimates')
    frstlvl_wf.connect(modelfit_outputspec, 'sigmasquareds', sinkd,
                       'modelfit.estimates.@sigsq')
    frstlvl_wf.connect(modelfit_outputspec, 'dof_file', sinkd, 'modelfit.dofs')
    frstlvl_wf.connect(modelfit_outputspec, 'copes', sinkd,
                       'modelfit.contrasts.@copes')
    frstlvl_wf.connect(modelfit_outputspec, 'varcopes', sinkd,
                       'modelfit.contrasts.@varcopes')
    frstlvl_wf.connect(modelfit_outputspec, 'zstats', sinkd,
                       'modelfit.contrasts.@zstats')
    frstlvl_wf.connect(modelfit_outputspec, 'design_image', sinkd,
                       'modelfit.design')
    frstlvl_wf.connect(modelfit_outputspec, 'design_cov', sinkd,
                       'modelfit.design.@cov')
    frstlvl_wf.connect(modelfit_outputspec, 'design_file', sinkd,
                       'modelfit.design.@matrix')
    frstlvl_wf.connect(modelfit_outputspec, 'pfiles', sinkd,
                       'modelfit.contrasts.@pstats')

    return frstlvl_wf
Esempio n. 23
0
def mk_workflow_glm_feat():
    """Workflow to compute GLM in FSL

    TODO
    ----
    read events and create contrasts
    """
    input_node = Node(IdentityInterface(fields=[
        'bold',
        'events',
        'TR',
    ]),
                      name='input')

    output_node = Node(IdentityInterface(fields=[
        'zmap',
    ]), name='output')

    model = Node(interface=SpecifyModel(), name='design_matrix')
    model.inputs.bids_condition_column = 'trial_name'
    model.inputs.input_units = 'secs'
    model.inputs.high_pass_filter_cutoff = 128.
    model.inputs.parameter_source = 'FSL'

    design = Node(interface=Level1Design(), name='design')
    design.inputs.bases = {'dgamma': {'derivs': True}}
    design.inputs.model_serial_correlations = True
    design.inputs.contrasts = [  # add temporal derivative with XXXTD
        (
            'gestures',
            'T',
            ['D', 'F', 'V', 'Y'],
            [1, 1, 1, 1],
        ),
    ]
    modelgen = Node(interface=FEATModel(), name='glm')

    estimate = Node(interface=FILMGLS(), name="estimate")
    estimate.inputs.smooth_autocorr = True
    estimate.inputs.mask_size = 5
    estimate.inputs.threshold = 1000

    w = Workflow('glm_feat')
    w.connect([(input_node, model, [
        ('bold', 'functional_runs'),
        ('events', 'bids_event_file'),
        ('TR', 'time_repetition'),
    ]), (input_node, design, [
        ('TR', 'interscan_interval'),
    ]), (model, design, [
        ('session_info', 'session_info'),
    ]),
               (design, modelgen, [
                   ('fsf_files', 'fsf_file'),
                   ('ev_files', 'ev_files'),
               ]),
               (modelgen, estimate, [
                   ('design_file', 'design_file'),
                   ('con_file', 'tcon_file'),
               ]), (input_node, estimate, [
                   ('bold', 'in_file'),
               ]), (estimate, output_node, [
                   ('zstats', 'zmap'),
               ])])

    return w
Esempio n. 24
0
def create_subject_ffx_wf(
        sub_id, bet_fracthr, spatial_fwhm, susan_brightthresh, hp_vols,
        lp_vols, remove_hemi, film_thresh, film_model_autocorr, use_derivs, tr,
        tcon_subtractive, cluster_threshold, cluster_thresh_frac, cluster_p,
        dilate_clusters_voxel, cond_ids, dsdir, work_basedir):
    # todo: new mapnode inputs: cluster_threshold, cluster_p
    """
    Make a workflow including preprocessing, first level, and second level GLM analysis for a given subject.
    This pipeline includes:
    - skull stripping
    - spatial smoothing
    - removing the irrelevant hemisphere
    - temporal band pass filter
    - 1st level GLM
    - averaging f-contrasts from 1st level GLM
    - clustering run-wise f-tests, dilating clusters, and returning binary roi mask
    """

    from nipype.algorithms.modelgen import SpecifyModel
    from nipype.interfaces.fsl import BET, SUSAN, ImageMaths
    from nipype.interfaces.fsl.model import SmoothEstimate, Cluster
    from nipype.interfaces.fsl.maths import TemporalFilter, MathsCommand
    from nipype.interfaces.utility import Function
    from nipype.pipeline.engine import Workflow, Node, MapNode
    from nipype.workflows.fmri.fsl import create_modelfit_workflow
    from nipype.interfaces.fsl.maths import MultiImageMaths
    from nipype.interfaces.utility import IdentityInterface
    import sys
    from os.path import join as pjoin
    import os
    sys.path.insert(
        0, "/data/project/somato/raw/code/roi_glm/custom_node_functions.py")
    # TODO: don't hardcode this
    import custom_node_functions

    # set up sub-workflow
    sub_wf = Workflow(name='subject_%s_wf' % sub_id)
    # set up sub-working-directory
    subwf_wd = pjoin(work_basedir, 'subject_ffx_wfs',
                     'subject_%s_ffx_workdir' % sub_id)
    if not os.path.exists(subwf_wd):
        os.makedirs(subwf_wd)
    sub_wf.base_dir = subwf_wd

    # Grab bold files for all four runs of one subject.
    # in the order [d1_d5, d5_d1, blocked_design1, blocked_design2]
    grab_boldfiles = Node(Function(
        function=custom_node_functions.grab_boldfiles_subject,
        input_names=['sub_id', 'cond_ids', 'ds_dir'],
        output_names=['boldfiles']),
                          name='grab_boldfiles')
    grab_boldfiles.inputs.sub_id = sub_id
    grab_boldfiles.inputs.cond_ids = cond_ids
    grab_boldfiles.inputs.ds_dir = dsdir

    getonsets = Node(Function(
        function=custom_node_functions.grab_blocked_design_onsets_subject,
        input_names=['sub_id', 'prepped_ds_dir'],
        output_names=['blocked_design_onsets_dicts']),
                     name='getonsets')
    getonsets.inputs.sub_id = sub_id
    getonsets.inputs.prepped_ds_dir = dsdir

    # pass bold files through preprocessing pipeline
    bet = MapNode(BET(frac=bet_fracthr, functional=True, mask=True),
                  iterfield=['in_file'],
                  name='bet')

    pick_mask = Node(Function(function=custom_node_functions.pick_first_mask,
                              input_names=['mask_files'],
                              output_names=['first_mask']),
                     name='pick_mask')

    # SUSAN smoothing node
    susan = MapNode(SUSAN(fwhm=spatial_fwhm,
                          brightness_threshold=susan_brightthresh),
                    iterfield=['in_file'],
                    name='susan')

    # bandpass filter node
    bpf = MapNode(TemporalFilter(highpass_sigma=hp_vols / 2.3548,
                                 lowpass_sigma=lp_vols / 2.3548),
                  iterfield=['in_file'],
                  name='bpf')

    # cut away hemisphere node
    if remove_hemi == 'r':
        roi_args = '-roi 96 -1 0 -1 0 -1 0 -1'
    elif remove_hemi == 'l':
        roi_args = '-roi 0 96 0 -1 0 -1 0 -1'
    else:
        raise IOError('did not recognite value of remove_hemi %s' %
                      remove_hemi)

    cut_hemi_func = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_func')
    cut_hemi_func.inputs.args = roi_args

    cut_hemi_mask = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_mask')
    cut_hemi_mask.inputs.args = roi_args

    # Make Design and Contrasts for that subject
    # subject_info ist a list of two "Bunches", each for one run, containing conditions, onsets, durations
    designgen = Node(Function(
        input_names=['subtractive_contrast', 'blocked_design_onsets_dicts'],
        output_names=['subject_info', 'contrasts'],
        function=custom_node_functions.make_bunch_and_contrasts),
                     name='designgen')
    designgen.inputs.subtractive_contrasts = tcon_subtractive

    # create 'session_info' for modelfit
    modelspec = MapNode(SpecifyModel(input_units='secs'),
                        name='modelspec',
                        iterfield=['functional_runs', 'subject_info'])
    modelspec.inputs.high_pass_filter_cutoff = hp_vols * tr
    modelspec.inputs.time_repetition = tr

    flatten_session_infos = Node(Function(
        input_names=['nested_list'],
        output_names=['flat_list'],
        function=custom_node_functions.flatten_nested_list),
                                 name='flatten_session_infos')

    # Fist-level workflow
    modelfit = create_modelfit_workflow(f_contrasts=True)
    modelfit.inputs.inputspec.interscan_interval = tr
    modelfit.inputs.inputspec.film_threshold = film_thresh
    modelfit.inputs.inputspec.model_serial_correlations = film_model_autocorr
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivs}}

    # node that reshapes list of copes returned from modelfit
    cope_sorter = Node(Function(input_names=['copes', 'varcopes', 'contrasts'],
                                output_names=['copes', 'varcopes', 'n_runs'],
                                function=custom_node_functions.sort_copes),
                       name='cope_sorter')

    # average zfstats from both runs
    split_zfstats = Node(Function(
        function=custom_node_functions.split_zfstats_runs,
        input_names=['zfstats_list'],
        output_names=['zfstat_run1', 'zfstat_run2']),
                         name='split_zfstats')
    average_zfstats = Node(MultiImageMaths(op_string='-add %s -div 2'),
                           name='mean_images')

    # estimate smoothness of 1st lvl zf-files
    smoothest = MapNode(SmoothEstimate(),
                        name='smoothest',
                        iterfield=['mask_file', 'zstat_file'])

    cluster = MapNode(Cluster(),
                      name='cluster',
                      iterfield=['in_file', 'volume', 'dlh'])
    cluster.inputs.threshold = cluster_threshold
    cluster.inputs.pthreshold = cluster_p
    cluster.inputs.fractional = cluster_thresh_frac
    cluster.inputs.no_table = True
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_localmax_vol_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_size_file = True

    # dilate clusters
    dilate = MapNode(MathsCommand(args='-kernel sphere %i -dilD' %
                                  dilate_clusters_voxel),
                     iterfield=['in_file'],
                     name='dilate')

    # binarize the result to a mask
    binarize_roi = MapNode(ImageMaths(op_string='-nan -thr 0.001 -bin'),
                           iterfield=['in_file'],
                           name='binarize_roi')

    # connect preprocessing
    sub_wf.connect(grab_boldfiles, 'boldfiles', bet, 'in_file')
    sub_wf.connect(bet, 'out_file', susan, 'in_file')
    sub_wf.connect(susan, 'smoothed_file', bpf, 'in_file')
    sub_wf.connect(bpf, 'out_file', cut_hemi_func, 'in_file')
    sub_wf.connect(bet, 'mask_file', cut_hemi_mask, 'in_file')
    # connect to 1st level model
    sub_wf.connect(cut_hemi_func, 'out_file', modelspec, 'functional_runs')
    sub_wf.connect(getonsets, 'blocked_design_onsets_dicts', designgen,
                   'blocked_design_onsets_dicts')
    sub_wf.connect(designgen, 'subject_info', modelspec, 'subject_info')
    sub_wf.connect(modelspec, 'session_info', flatten_session_infos,
                   'nested_list')
    sub_wf.connect(flatten_session_infos, 'flat_list', modelfit,
                   'inputspec.session_info')
    sub_wf.connect(designgen, 'contrasts', modelfit, 'inputspec.contrasts')
    sub_wf.connect(cut_hemi_func, 'out_file', modelfit,
                   'inputspec.functional_data')
    # connect to cluster thresholding
    sub_wf.connect(cut_hemi_mask, 'out_file', smoothest, 'mask_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', smoothest,
                   'zstat_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', cluster,
                   'in_file')
    sub_wf.connect(smoothest, 'dlh', cluster, 'dlh')
    sub_wf.connect(smoothest, 'volume', cluster, 'volume')
    sub_wf.connect(cluster, 'threshold_file', dilate, 'in_file')
    sub_wf.connect(dilate, 'out_file', binarize_roi, 'in_file')
    # connect to averaging f-contrasts
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats',
                   split_zfstats, 'zfstats_list')
    sub_wf.connect(split_zfstats, 'zfstat_run1', average_zfstats, 'in_file')
    sub_wf.connect(split_zfstats, 'zfstat_run2', average_zfstats,
                   'operand_files')
    # redirect to outputspec
    # TODO: redirekt outputspec to datasink in meta-wf
    outputspec = Node(IdentityInterface(fields=[
        'threshold_file', 'index_file', 'pval_file', 'localmax_txt_file'
    ]),
                      name='outputspec')
    sub_wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')
    sub_wf.connect(cluster, 'index_file', outputspec, 'index_file')
    sub_wf.connect(cluster, 'pval_file', outputspec, 'pval_file')
    sub_wf.connect(cluster, 'localmax_txt_file', outputspec,
                   'localmax_txt_file')
    sub_wf.connect(binarize_roi, 'out_file', outputspec, 'roi')

    # run subject-lvl workflow
    # sub_wf.write_graph(graph2use='colored', dotfilename='./subwf_graph.dot')
    # sub_wf.run(plugin='MultiProc', plugin_args={'n_procs': 6})
    # sub_wf.run(plugin='CondorDAGMan')
    # sub_wf.run()

    return sub_wf
Esempio n. 25
0
def firstlevel_wf(subject_id, sink_directory, name='wmaze_frstlvl_wf'):
    frstlvl_wf = Workflow(name='frstlvl_wf')

    info = dict(
        task_mri_files=[['subject_id',
                         'wmaze']],  #dictionary used in datasource
        motion_noise_files=[['subject_id']])

    #function node to call subjectinfo function with name, onset, duration, and amplitude info
    subject_info = Node(Function(input_names=['subject_id'],
                                 output_names=['output'],
                                 function=subjectinfo),
                        name='subject_info')
    subject_info.inputs.ignore_exception = False
    subject_info.inputs.subject_id = subject_id

    #function node to define contrasts
    getcontrasts = Node(Function(input_names=['subject_id', 'info'],
                                 output_names=['contrasts'],
                                 function=get_contrasts),
                        name='getcontrasts')
    getcontrasts.inputs.ignore_exception = False
    getcontrasts.inputs.subject_id = subject_id
    frstlvl_wf.connect(subject_info, 'output', getcontrasts, 'info')

    #function node to substitute names of folders and files created during pipeline
    getsubs = Node(
        Function(
            input_names=['cons'],
            output_names=['subs'],
            # Calls the function 'get_subs'
            function=get_subs),
        name='getsubs')
    getsubs.inputs.ignore_exception = False
    getsubs.inputs.subject_id = subject_id
    frstlvl_wf.connect(subject_info, 'output', getsubs, 'info')
    frstlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons')

    #datasource node to get task_mri and motion-noise files
    datasource = Node(DataGrabber(infields=['subject_id'],
                                  outfields=info.keys()),
                      name='datasource')
    datasource.inputs.template = '*'
    datasource.inputs.subject_id = subject_id
    datasource.inputs.base_directory = os.path.abspath(
        '/home/data/madlab/data/mri/wmaze/preproc/')
    datasource.inputs.field_template = dict(
        task_mri_files=
        '%s/func/smoothed_fullspectrum/_maskfunc2*/*%s*.nii.gz',  #functional files
        motion_noise_files='%s/noise/filter_regressor??.txt'
    )  #filter regressor noise files
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True
    datasource.inputs.ignore_exception = False
    datasource.inputs.raise_on_empty = True

    #function node to remove last three volumes from functional data
    fslroi_epi = MapNode(
        ExtractROI(t_min=0,
                   t_size=197),  #start from first volume and end on -3
        iterfield=['in_file'],
        name='fslroi_epi')
    fslroi_epi.output_type = 'NIFTI_GZ'
    fslroi_epi.terminal_output = 'stream'
    frstlvl_wf.connect(datasource, 'task_mri_files', fslroi_epi, 'in_file')

    #function node to modify the motion and noise files to be single regressors
    motionnoise = Node(Function(input_names=['subjinfo', 'files'],
                                output_names=['subjinfo'],
                                function=motion_noise),
                       name='motionnoise')
    motionnoise.inputs.ignore_exception = False
    frstlvl_wf.connect(subject_info, 'output', motionnoise, 'subjinfo')
    frstlvl_wf.connect(datasource, 'motion_noise_files', motionnoise, 'files')

    #node to create model specifications compatible with spm/fsl designers (requires subjectinfo to be received in the form of a Bunch)
    specify_model = Node(SpecifyModel(), name='specify_model')
    specify_model.inputs.high_pass_filter_cutoff = -1.0  #high-pass filter cutoff in seconds
    specify_model.inputs.ignore_exception = False
    specify_model.inputs.input_units = 'secs'  #input units in either 'secs' or 'scans'
    specify_model.inputs.time_repetition = 2.0  #TR
    frstlvl_wf.connect(
        fslroi_epi, 'roi_file', specify_model,
        'functional_runs')  #editted data files for model -- list of 4D files
    #list of event description files in 3 column format corresponding to onsets, durations, and amplitudes
    frstlvl_wf.connect(motionnoise, 'subjinfo', specify_model, 'subject_info')

    #node for basic interface class generating identity mappings
    modelfit_inputspec = Node(IdentityInterface(fields=[
        'session_info', 'interscan_interval', 'contrasts', 'film_threshold',
        'functional_data', 'bases', 'model_serial_correlations'
    ],
                                                mandatory_inputs=True),
                              name='modelfit_inputspec')
    modelfit_inputspec.inputs.bases = {'dgamma': {'derivs': False}}
    modelfit_inputspec.inputs.film_threshold = 0.0
    modelfit_inputspec.inputs.interscan_interval = 2.0
    modelfit_inputspec.inputs.model_serial_correlations = True
    frstlvl_wf.connect(fslroi_epi, 'roi_file', modelfit_inputspec,
                       'functional_data')
    frstlvl_wf.connect(getcontrasts, 'contrasts', modelfit_inputspec,
                       'contrasts')
    frstlvl_wf.connect(specify_model, 'session_info', modelfit_inputspec,
                       'session_info')

    #node for first level SPM design matrix to demonstrate contrasts and motion/noise regressors
    level1_design = MapNode(Level1Design(),
                            iterfield=['contrasts', 'session_info'],
                            name='level1_design')
    level1_design.inputs.ignore_exception = False
    frstlvl_wf.connect(modelfit_inputspec, 'interscan_interval', level1_design,
                       'interscan_interval')
    frstlvl_wf.connect(modelfit_inputspec, 'session_info', level1_design,
                       'session_info')
    frstlvl_wf.connect(modelfit_inputspec, 'contrasts', level1_design,
                       'contrasts')
    frstlvl_wf.connect(modelfit_inputspec, 'bases', level1_design, 'bases')
    frstlvl_wf.connect(modelfit_inputspec, 'model_serial_correlations',
                       level1_design, 'model_serial_correlations')

    #MapNode to generate a design.mat file for each run
    generate_model = MapNode(FEATModel(),
                             iterfield=['fsf_file', 'ev_files'],
                             name='generate_model')
    generate_model.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    generate_model.inputs.ignore_exception = False
    generate_model.inputs.output_type = 'NIFTI_GZ'
    generate_model.inputs.terminal_output = 'stream'
    frstlvl_wf.connect(level1_design, 'fsf_files', generate_model, 'fsf_file')
    frstlvl_wf.connect(level1_design, 'ev_files', generate_model, 'ev_files')

    #MapNode to estimate the model using FILMGLS -- fits the design matrix to the voxel timeseries
    estimate_model = MapNode(FILMGLS(),
                             iterfield=['design_file', 'in_file', 'tcon_file'],
                             name='estimate_model')
    estimate_model.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    estimate_model.inputs.ignore_exception = False
    estimate_model.inputs.mask_size = 5  #Susan-smooth mask size
    estimate_model.inputs.output_type = 'NIFTI_GZ'
    estimate_model.inputs.results_dir = 'results'
    estimate_model.inputs.smooth_autocorr = True  #smooth auto-correlation estimates
    estimate_model.inputs.terminal_output = 'stream'
    frstlvl_wf.connect(modelfit_inputspec, 'film_threshold', estimate_model,
                       'threshold')
    frstlvl_wf.connect(modelfit_inputspec, 'functional_data', estimate_model,
                       'in_file')
    frstlvl_wf.connect(
        generate_model, 'design_file', estimate_model,
        'design_file')  #mat file containing ascii matrix for design
    frstlvl_wf.connect(generate_model, 'con_file', estimate_model,
                       'tcon_file')  #contrast file containing contrast vectors

    #merge node to merge the contrasts - necessary for fsl 5.0.7 and greater
    merge_contrasts = MapNode(Merge(2),
                              iterfield=['in1'],
                              name='merge_contrasts')
    frstlvl_wf.connect(estimate_model, 'zstats', merge_contrasts, 'in1')

    #MapNode to transform the z2pval
    z2pval = MapNode(ImageMaths(), iterfield=['in_file'], name='z2pval')
    z2pval.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    z2pval.inputs.ignore_exception = False
    z2pval.inputs.op_string = '-ztop'  #defines the operation used
    z2pval.inputs.output_type = 'NIFTI_GZ'
    z2pval.inputs.suffix = '_pval'
    z2pval.inputs.terminal_output = 'stream'
    frstlvl_wf.connect(merge_contrasts, ('out', pop_lambda), z2pval, 'in_file')

    #outputspec node using IdentityInterface() to receive information from estimate_model, merge_contrasts, z2pval, generate_model, and estimate_model
    modelfit_outputspec = Node(IdentityInterface(fields=[
        'copes', 'varcopes', 'dof_file', 'pfiles', 'parameter_estimates',
        'zstats', 'design_image', 'design_file', 'design_cov', 'sigmasquareds'
    ],
                                                 mandatory_inputs=True),
                               name='modelfit_outputspec')
    frstlvl_wf.connect(estimate_model, 'copes', modelfit_outputspec,
                       'copes')  #lvl1 cope files
    frstlvl_wf.connect(estimate_model, 'varcopes', modelfit_outputspec,
                       'varcopes')  #lvl1 varcope files
    frstlvl_wf.connect(merge_contrasts, 'out', modelfit_outputspec,
                       'zstats')  #zstats across runs
    frstlvl_wf.connect(z2pval, 'out_file', modelfit_outputspec, 'pfiles')
    frstlvl_wf.connect(
        generate_model, 'design_image', modelfit_outputspec,
        'design_image')  #graphical representation of design matrix
    frstlvl_wf.connect(
        generate_model, 'design_file', modelfit_outputspec,
        'design_file')  #mat file containing ascii matrix for design
    frstlvl_wf.connect(
        generate_model, 'design_cov', modelfit_outputspec,
        'design_cov')  #graphical representation of design covariance
    frstlvl_wf.connect(estimate_model, 'param_estimates', modelfit_outputspec,
                       'parameter_estimates'
                       )  #parameter estimates for columns of design matrix
    frstlvl_wf.connect(estimate_model, 'dof_file', modelfit_outputspec,
                       'dof_file')  #degrees of freedom
    frstlvl_wf.connect(estimate_model, 'sigmasquareds', modelfit_outputspec,
                       'sigmasquareds')  #summary of residuals

    #datasink node to save output from multiple points in the pipeline
    sinkd = MapNode(DataSink(),
                    iterfield=[
                        'substitutions', 'modelfit.contrasts.@copes',
                        'modelfit.contrasts.@varcopes', 'modelfit.estimates',
                        'modelfit.contrasts.@zstats'
                    ],
                    name='sinkd')
    sinkd.inputs.base_directory = sink_directory
    sinkd.inputs.container = subject_id
    frstlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions')
    frstlvl_wf.connect(modelfit_outputspec, 'parameter_estimates', sinkd,
                       'modelfit.estimates')
    frstlvl_wf.connect(modelfit_outputspec, 'sigmasquareds', sinkd,
                       'modelfit.estimates.@sigsq')
    frstlvl_wf.connect(modelfit_outputspec, 'dof_file', sinkd, 'modelfit.dofs')
    frstlvl_wf.connect(modelfit_outputspec, 'copes', sinkd,
                       'modelfit.contrasts.@copes')
    frstlvl_wf.connect(modelfit_outputspec, 'varcopes', sinkd,
                       'modelfit.contrasts.@varcopes')
    frstlvl_wf.connect(modelfit_outputspec, 'zstats', sinkd,
                       'modelfit.contrasts.@zstats')
    frstlvl_wf.connect(modelfit_outputspec, 'design_image', sinkd,
                       'modelfit.design')
    frstlvl_wf.connect(modelfit_outputspec, 'design_cov', sinkd,
                       'modelfit.design.@cov')
    frstlvl_wf.connect(modelfit_outputspec, 'design_file', sinkd,
                       'modelfit.design.@matrix')
    frstlvl_wf.connect(modelfit_outputspec, 'pfiles', sinkd,
                       'modelfit.contrasts.@pstats')

    return frstlvl_wf
Esempio n. 26
0
            in_file=cf_files['bold'],
            out_file=cf_files['masked'],
            mask_file=cf_files['standard_mask']
            ), name='masker')


    bim = Node(afni.BlurInMask(
        out_file=cf_files['smoothed'],
        mask = cf_files['standard_mask'],
        fwhm=5.0
    ), name='bim')

    l1 = Node(SpecifyModel(
        event_files=EVfiles,
        realignment_parameters=confoundsfile,
        input_units='secs',
        time_repetition=2,
        high_pass_filter_cutoff=100
    ), name='l1')

    l1model = Node(Level1Design(
        interscan_interval=2,
        bases={'dgamma': {'derivs': True}},
        model_serial_correlations=True,
        orthogonalization=orthogonality,
        contrasts=contrasts
    ), name='l1design')

    l1featmodel = Node(FEATModel(), name='l1model')

    l1estimate = Node(FEAT(), name='l1estimate')