Example #1
0
def test_L2Model_outputs():
    output_map = dict(design_grp=dict(),
    design_con=dict(),
    design_mat=dict(),
    )
    outputs = L2Model.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Example #2
0
def test_L2Model_outputs():
    output_map = dict(
        design_con=dict(),
        design_grp=dict(),
        design_mat=dict(),
    )
    outputs = L2Model.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Example #3
0
def test_L2Model_inputs():
    input_map = dict(ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    num_copes=dict(mandatory=True,
    ),
    )
    inputs = L2Model.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Example #4
0
def test_L2Model_inputs():
    input_map = dict(
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        num_copes=dict(mandatory=True, ),
    )
    inputs = L2Model.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Example #5
0
def secondlevel_wf(subject_id, sink_directory, name='GLM1_scndlvl_wf'):
    scndlvl_wf = Workflow(name='scndlvl_wf')
    base_dir = os.path.abspath('/home/data/madlab/data/mri/wmaze/')

    contrasts = [
        'all_before_B_corr', 'all_before_B_incorr', 'all_remaining',
        'all_corr_minus_all_incorr', 'all_incorr_minus_all_corr'
    ]

    cnt_file_list = []
    for curr_contrast in contrasts:
        cnt_file_list.append(
            glob(
                os.path.join(
                    base_dir,
                    'frstlvl/model_GLM1/{0}/modelfit/contrasts/_estimate_model*/cope??_{1}.nii.gz'
                    .format(subject_id, curr_contrast))))

    dof_runs = [[], [], [], [], []]
    for i, curr_file_list in enumerate(cnt_file_list):
        if not isinstance(curr_file_list, list):
            curr_file_list = [curr_file_list]
        for curr_file in curr_file_list:
            dof_runs[i].append(
                curr_file.split('/')[-2][-1])  #grabs the estimate_model #

    info = dict(copes=[['subject_id', contrasts]],
                varcopes=[['subject_id', contrasts]],
                mask_file=[['subject_id', 'aparc+aseg_thresh']],
                dof_files=[['subject_id', dof_runs, 'dof']])

    #datasource node to get task_mri and motion-noise files
    datasource = Node(DataGrabber(infields=['subject_id'],
                                  outfields=info.keys()),
                      name='datasource')
    datasource.inputs.template = '*'
    datasource.inputs.subject_id = subject_id
    datasource.inputs.base_directory = os.path.abspath(
        '/home/data/madlab/data/mri/wmaze/')
    datasource.inputs.field_template = dict(
        copes=
        'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/cope*_%s.nii.gz',
        varcopes=
        'frstlvl/model_GLM1/%s/modelfit/contrasts/_estimate_model*/varcope*_%s.nii.gz',
        mask_file='preproc/%s/ref/_fs_threshold20/%s*_thresh.nii',
        dof_files='frstlvl/model_GLM1/%s/modelfit/dofs/_estimate_model%s/%s')
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True
    datasource.inputs.ignore_exception = False
    datasource.inputs.raise_on_empty = True

    #inputspec to deal with copes and varcopes doublelist issues
    fixedfx_inputspec = Node(IdentityInterface(
        fields=['copes', 'varcopes', 'dof_files'], mandatory_inputs=True),
                             name='fixedfx_inputspec')
    scndlvl_wf.connect(datasource, ('copes', doublelist), fixedfx_inputspec,
                       'copes')
    scndlvl_wf.connect(datasource, ('varcopes', doublelist), fixedfx_inputspec,
                       'varcopes')
    scndlvl_wf.connect(datasource, ('dof_files', doublelist),
                       fixedfx_inputspec, 'dof_files')

    #merge all of copes into a single matrix across subject runs
    copemerge = MapNode(Merge(), iterfield=['in_files'], name='copemerge')
    copemerge.inputs.dimension = 't'
    copemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    copemerge.inputs.ignore_exception = False
    copemerge.inputs.output_type = 'NIFTI_GZ'
    copemerge.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(fixedfx_inputspec, 'copes', copemerge, 'in_files')

    #generate DOF volume for second level
    gendofvolume = Node(Function(input_names=['dof_files', 'cope_files'],
                                 output_names=['dof_volumes'],
                                 function=get_dofvolumes),
                        name='gendofvolume')
    gendofvolume.inputs.ignore_exception = False
    scndlvl_wf.connect(fixedfx_inputspec, 'dof_files', gendofvolume,
                       'dof_files')
    scndlvl_wf.connect(copemerge, 'merged_file', gendofvolume, 'cope_files')

    #merge all of the varcopes into a single matrix across subject runs per voxel
    varcopemerge = MapNode(Merge(),
                           iterfield=['in_files'],
                           name='varcopemerge')
    varcopemerge.inputs.dimension = 't'
    varcopemerge.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    varcopemerge.inputs.ignore_exception = False
    varcopemerge.inputs.output_type = 'NIFTI_GZ'
    varcopemerge.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(fixedfx_inputspec, 'varcopes', varcopemerge, 'in_files')

    #define contrasts from the names of the copes
    getcontrasts = Node(Function(input_names=['data_inputs'],
                                 output_names=['contrasts'],
                                 function=get_contrasts),
                        name='getcontrasts')
    getcontrasts.inputs.ignore_exception = False
    scndlvl_wf.connect(datasource, ('copes', doublelist), getcontrasts,
                       'data_inputs')

    #rename output files to be more descriptive
    getsubs = Node(Function(input_names=['subject_id', 'cons'],
                            output_names=['subs'],
                            function=get_subs),
                   name='getsubs')
    getsubs.inputs.ignore_exception = False
    getsubs.inputs.subject_id = subject_id
    scndlvl_wf.connect(getcontrasts, 'contrasts', getsubs, 'cons')

    #l2model node for fixed effects analysis (aka within subj across runs)
    l2model = MapNode(L2Model(), iterfield=['num_copes'], name='l2model')
    l2model.inputs.ignore_exception = False
    scndlvl_wf.connect(datasource, ('copes', num_copes), l2model, 'num_copes')

    #FLAMEO Node to run the fixed effects analysis
    flameo_fe = MapNode(FLAMEO(),
                        iterfield=[
                            'cope_file', 'var_cope_file', 'dof_var_cope_file',
                            'design_file', 't_con_file', 'cov_split_file'
                        ],
                        name='flameo_fe')
    flameo_fe.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
    flameo_fe.inputs.ignore_exception = False
    flameo_fe.inputs.log_dir = 'stats'
    flameo_fe.inputs.output_type = 'NIFTI_GZ'
    flameo_fe.inputs.run_mode = 'fe'
    flameo_fe.inputs.terminal_output = 'stream'
    scndlvl_wf.connect(varcopemerge, 'merged_file', flameo_fe, 'var_cope_file')
    scndlvl_wf.connect(l2model, 'design_mat', flameo_fe, 'design_file')
    scndlvl_wf.connect(l2model, 'design_con', flameo_fe, 't_con_file')
    scndlvl_wf.connect(l2model, 'design_grp', flameo_fe, 'cov_split_file')
    scndlvl_wf.connect(gendofvolume, 'dof_volumes', flameo_fe,
                       'dof_var_cope_file')
    scndlvl_wf.connect(datasource, 'mask_file', flameo_fe, 'mask_file')
    scndlvl_wf.connect(copemerge, 'merged_file', flameo_fe, 'cope_file')

    #outputspec node
    scndlvl_outputspec = Node(IdentityInterface(
        fields=['res4d', 'copes', 'varcopes', 'zstats', 'tstats'],
        mandatory_inputs=True),
                              name='scndlvl_outputspec')
    scndlvl_wf.connect(flameo_fe, 'res4d', scndlvl_outputspec, 'res4d')
    scndlvl_wf.connect(flameo_fe, 'copes', scndlvl_outputspec, 'copes')
    scndlvl_wf.connect(flameo_fe, 'var_copes', scndlvl_outputspec, 'varcopes')
    scndlvl_wf.connect(flameo_fe, 'zstats', scndlvl_outputspec, 'zstats')
    scndlvl_wf.connect(flameo_fe, 'tstats', scndlvl_outputspec, 'tstats')

    #datasink node
    sinkd = Node(DataSink(), name='sinkd')
    sinkd.inputs.base_directory = sink_directory
    sinkd.inputs.container = subject_id
    scndlvl_wf.connect(scndlvl_outputspec, 'copes', sinkd, 'fixedfx.@copes')
    scndlvl_wf.connect(scndlvl_outputspec, 'varcopes', sinkd,
                       'fixedfx.@varcopes')
    scndlvl_wf.connect(scndlvl_outputspec, 'tstats', sinkd, 'fixedfx.@tstats')
    scndlvl_wf.connect(scndlvl_outputspec, 'zstats', sinkd, 'fixedfx.@zstats')
    scndlvl_wf.connect(scndlvl_outputspec, 'res4d', sinkd, 'fixedfx.@pvals')
    scndlvl_wf.connect(getsubs, 'subs', sinkd, 'substitutions')

    return scndlvl_wf
inputspec.inputs.brain_mask = '/home/data/madlab/data/mri/wmaze/wmaze_T1_template/wmaze_grptemplate_mask.nii.gz'  #anatomical group template mask
inputspec.inputs.run_mode = 'flame1'
group_wf.connect(datasource, 'copes', inputspec, 'copes')
group_wf.connect(datasource, 'varcopes', inputspec, 'varcopes')

#node to concatenate varcopes into single image across time
grp_merge_varcopes = Node(fsl.utils.Merge(), name='grp_merge_varcopes')
grp_merge_varcopes.inputs.dimension = 't'  #concatenate across time
grp_merge_varcopes.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
grp_merge_varcopes.inputs.ignore_exception = False
grp_merge_varcopes.inputs.output_type = 'NIFTI_GZ'
grp_merge_varcopes.inputs.terminal_output = 'stream'
group_wf.connect(inputspec, 'varcopes', grp_merge_varcopes, 'in_files')

#node for group-specific level 2 model
grp_l2model = Node(L2Model(), name='grp_l2model')
grp_l2model.inputs.ignore_exception = False
group_wf.connect(inputspec, ('copes', get_len), grp_l2model, 'num_copes')

#node to concatenate copes into single image across time
grp_merge_copes = Node(fsl.utils.Merge(), name='grp_merge_copes')
grp_merge_copes.inputs.dimension = 't'  #concatenate across time
grp_merge_copes.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}
grp_merge_copes.inputs.ignore_exception = False
grp_merge_copes.inputs.output_type = 'NIFTI_GZ'
grp_merge_copes.inputs.terminal_output = 'stream'
group_wf.connect(inputspec, 'copes', grp_merge_copes, 'in_files')

#node for Randomise
grp_randomise = Node(fsl.Randomise(), name='grp_randomise')
grp_randomise.inputs.environ = {'FSLOUTPUTTYPE': 'NIFTI_GZ'}