コード例 #1
0
def test_BinaryMaths_outputs():
    output_map = dict(out_file=dict(), )
    outputs = BinaryMaths.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
コード例 #2
0
def test_BinaryMaths_inputs():
    input_map = dict(
        args=dict(argstr='%s', ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_file=dict(
            argstr='%s',
            mandatory=True,
            position=2,
        ),
        internal_datatype=dict(
            argstr='-dt %s',
            position=1,
        ),
        nan2zeros=dict(
            argstr='-nan',
            position=3,
        ),
        operand_file=dict(
            argstr='%s',
            mandatory=True,
            position=5,
            xor=['operand_value'],
        ),
        operand_value=dict(
            argstr='%.8f',
            mandatory=True,
            position=5,
            xor=['operand_file'],
        ),
        operation=dict(
            argstr='-%s',
            mandatory=True,
            position=4,
        ),
        out_file=dict(
            argstr='%s',
            genfile=True,
            hash_files=False,
            position=-2,
        ),
        output_datatype=dict(
            argstr='-odt %s',
            position=-1,
        ),
        output_type=dict(),
        terminal_output=dict(nohash=True, ),
    )
    inputs = BinaryMaths.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
コード例 #3
0
def test_BinaryMaths_outputs():
    output_map = dict(out_file=dict(),
    )
    outputs = BinaryMaths.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
コード例 #4
0
def test_BinaryMaths_inputs():
    input_map = dict(args=dict(argstr='%s',
    ),
    environ=dict(nohash=True,
    usedefault=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_file=dict(argstr='%s',
    mandatory=True,
    position=2,
    ),
    internal_datatype=dict(argstr='-dt %s',
    position=1,
    ),
    nan2zeros=dict(argstr='-nan',
    position=3,
    ),
    operand_file=dict(argstr='%s',
    mandatory=True,
    position=5,
    xor=['operand_value'],
    ),
    operand_value=dict(argstr='%.8f',
    mandatory=True,
    position=5,
    xor=['operand_file'],
    ),
    operation=dict(argstr='-%s',
    mandatory=True,
    position=4,
    ),
    out_file=dict(argstr='%s',
    genfile=True,
    hash_files=False,
    position=-2,
    ),
    output_datatype=dict(argstr='-odt %s',
    position=-1,
    ),
    output_type=dict(),
    terminal_output=dict(mandatory=True,
    nohash=True,
    ),
    )
    inputs = BinaryMaths.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
コード例 #5
0
def group_multregress_openfmri(dataset_dir,
                               model_id=None,
                               task_id=None,
                               l1output_dir=None,
                               out_dir=None,
                               no_reversal=False,
                               plugin=None,
                               plugin_args=None,
                               flamemodel='flame1',
                               nonparametric=False,
                               use_spm=False):

    meta_workflow = Workflow(name='mult_regress')
    meta_workflow.base_dir = work_dir
    for task in task_id:
        task_name = get_taskname(dataset_dir, task)
        cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir)
        regressors_needed, contrasts, groups, subj_list = get_sub_vars(
            dataset_dir, task_name, model_id)
        for idx, contrast in enumerate(contrasts):
            wk = Workflow(name='model_%03d_task_%03d_contrast_%s' %
                          (model_id, task, contrast[0][0]))

            info = Node(util.IdentityInterface(
                fields=['model_id', 'task_id', 'dataset_dir', 'subj_list']),
                        name='infosource')
            info.inputs.model_id = model_id
            info.inputs.task_id = task
            info.inputs.dataset_dir = dataset_dir

            dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                                  outfields=['copes', 'varcopes']),
                      name='grabber')
            dg.inputs.template = os.path.join(
                l1output_dir,
                'model%03d/task%03d/%s/%scopes/%smni/%scope%02d.nii%s')
            if use_spm:
                dg.inputs.template_args['copes'] = [[
                    'model_id', 'task_id', subj_list, '', 'spm/', '',
                    'cope_id', ''
                ]]
                dg.inputs.template_args['varcopes'] = [[
                    'model_id', 'task_id', subj_list, 'var', 'spm/', 'var',
                    'cope_id', '.gz'
                ]]
            else:
                dg.inputs.template_args['copes'] = [[
                    'model_id', 'task_id', subj_list, '', '', '', 'cope_id',
                    '.gz'
                ]]
                dg.inputs.template_args['varcopes'] = [[
                    'model_id', 'task_id', subj_list, 'var', '', 'var',
                    'cope_id', '.gz'
                ]]
            dg.iterables = ('cope_id', cope_ids)
            dg.inputs.sort_filelist = False

            wk.connect(info, 'model_id', dg, 'model_id')
            wk.connect(info, 'task_id', dg, 'task_id')

            model = Node(MultipleRegressDesign(), name='l2model')
            model.inputs.groups = groups
            model.inputs.contrasts = contrasts[idx]
            model.inputs.regressors = regressors_needed[idx]

            mergecopes = Node(Merge(dimension='t'), name='merge_copes')
            wk.connect(dg, 'copes', mergecopes, 'in_files')

            if flamemodel != 'ols':
                mergevarcopes = Node(Merge(dimension='t'),
                                     name='merge_varcopes')
                wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')

            mask_file = fsl.Info.standard_image(
                'MNI152_T1_2mm_brain_mask.nii.gz')
            flame = Node(FLAMEO(), name='flameo')
            flame.inputs.mask_file = mask_file
            flame.inputs.run_mode = flamemodel
            #flame.inputs.infer_outliers = True

            wk.connect(model, 'design_mat', flame, 'design_file')
            wk.connect(model, 'design_con', flame, 't_con_file')
            wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
            if flamemodel != 'ols':
                wk.connect(mergevarcopes, 'merged_file', flame,
                           'var_cope_file')
            wk.connect(model, 'design_grp', flame, 'cov_split_file')

            if nonparametric:
                palm = Node(Function(input_names=[
                    'cope_file', 'design_file', 'contrast_file', 'group_file',
                    'mask_file', 'cluster_threshold'
                ],
                                     output_names=['palm_outputs'],
                                     function=run_palm),
                            name='palm')
                palm.inputs.cluster_threshold = 3.09
                palm.inputs.mask_file = mask_file
                palm.plugin_args = {
                    'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G',
                    'overwrite': True
                }
                wk.connect(model, 'design_mat', palm, 'design_file')
                wk.connect(model, 'design_con', palm, 'contrast_file')
                wk.connect(mergecopes, 'merged_file', palm, 'cope_file')
                wk.connect(model, 'design_grp', palm, 'group_file')

            smoothest = Node(SmoothEstimate(), name='smooth_estimate')
            wk.connect(flame, 'zstats', smoothest, 'zstat_file')
            smoothest.inputs.mask_file = mask_file

            cluster = Node(Cluster(), name='cluster')
            wk.connect(smoothest, 'dlh', cluster, 'dlh')
            wk.connect(smoothest, 'volume', cluster, 'volume')
            cluster.inputs.connectivity = 26
            cluster.inputs.threshold = 2.3
            cluster.inputs.pthreshold = 0.05
            cluster.inputs.out_threshold_file = True
            cluster.inputs.out_index_file = True
            cluster.inputs.out_localmax_txt_file = True

            wk.connect(flame, 'zstats', cluster, 'in_file')

            ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                           name='z2pval')
            wk.connect(flame, 'zstats', ztopval, 'in_file')

            sinker = Node(DataSink(), name='sinker')
            sinker.inputs.base_directory = os.path.join(
                out_dir, 'task%03d' % task, contrast[0][0])
            sinker.inputs.substitutions = [('_cope_id', 'contrast'),
                                           ('_maths_', '_reversed_')]

            wk.connect(flame, 'zstats', sinker, 'stats')
            wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
            wk.connect(cluster, 'index_file', sinker, 'stats.@index')
            wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')
            if nonparametric:
                wk.connect(palm, 'palm_outputs', sinker, 'stats.palm')

            if not no_reversal:
                zstats_reverse = Node(BinaryMaths(), name='zstats_reverse')
                zstats_reverse.inputs.operation = 'mul'
                zstats_reverse.inputs.operand_value = -1
                wk.connect(flame, 'zstats', zstats_reverse, 'in_file')

                cluster2 = cluster.clone(name='cluster2')
                wk.connect(smoothest, 'dlh', cluster2, 'dlh')
                wk.connect(smoothest, 'volume', cluster2, 'volume')
                wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')

                ztopval2 = ztopval.clone(name='ztopval2')
                wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')

                wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg')
                wk.connect(cluster2, 'threshold_file', sinker,
                           'stats.@neg_thr')
                wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index')
                wk.connect(cluster2, 'localmax_txt_file', sinker,
                           'stats.@neg_localmax')
            meta_workflow.add_nodes([wk])
    return meta_workflow
コード例 #6
0
    def backwrap_to_ute_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='backwrap_to_ute',
            inputs=[
                DatasetSpec('ute1_registered', nifti_gz_format),
                DatasetSpec('ute_echo1', dicom_format),
                DatasetSpec('umap_ute', dicom_format),
                DatasetSpec('template_to_ute_mat', text_matrix_format),
                DatasetSpec('sute_cont_template', nifti_gz_format),
                DatasetSpec('sute_fix_template', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('sute_cont_ute', nifti_gz_format),
                DatasetSpec('sute_fix_ute', nifti_gz_format)
            ],
            desc="Moving umaps back to the UTE space",
            version=1,
            citations=(matlab_cite),
            **kwargs)

        echo1_conv = pipeline.create_node(MRConvert(), name='echo1_conv')
        echo1_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('ute_echo1', echo1_conv, 'in_file')

        umap_conv = pipeline.create_node(MRConvert(), name='umap_conv')
        umap_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('umap_ute', umap_conv, 'in_file')

        zero_template_mask = pipeline.create_node(BinaryMaths(),
                                                  name='zero_template_mask',
                                                  requirements=[fsl5_req],
                                                  wall_time=3)
        pipeline.connect_input('ute1_registered', zero_template_mask,
                               'in_file')
        zero_template_mask.inputs.operation = "mul"
        zero_template_mask.inputs.operand_value = 0
        zero_template_mask.inputs.output_type = 'NIFTI_GZ'

        region_template_mask = pipeline.create_node(
            FLIRT(),
            name='region_template_mask',
            requirements=[fsl5_req],
            wall_time=5)
        region_template_mask.inputs.apply_xfm = True
        region_template_mask.inputs.bgvalue = 1
        region_template_mask.inputs.interp = 'nearestneighbour'
        region_template_mask.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(zero_template_mask, 'out_file', region_template_mask,
                         'in_file')
        pipeline.connect(echo1_conv, 'out_file', region_template_mask,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', region_template_mask,
                               'in_matrix_file')

        fill_in_umap = pipeline.create_node(MultiImageMaths(),
                                            name='fill_in_umap',
                                            requirements=[fsl5_req],
                                            wall_time=3)
        fill_in_umap.inputs.op_string = "-mul %s "
        fill_in_umap.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(region_template_mask, 'out_file', fill_in_umap,
                         'in_file')
        pipeline.connect(umap_conv, 'out_file', fill_in_umap, 'operand_files')

        sute_fix_ute_space = pipeline.create_node(FLIRT(),
                                                  name='sute_fix_ute_space',
                                                  requirements=[fsl5_req],
                                                  wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_fix_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_fix_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_fix_template', sute_fix_ute_space,
                               'in_file')
        sute_fix_ute_space.inputs.apply_xfm = True
        sute_fix_ute_space.inputs.bgvalue = 0
        sute_fix_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_cont_ute_space = pipeline.create_node(FLIRT(),
                                                   name='sute_cont_ute_space',
                                                   requirements=[fsl5_req],
                                                   wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_cont_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_cont_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_cont_template', sute_cont_ute_space,
                               'in_file')
        sute_cont_ute_space.inputs.apply_xfm = True
        sute_cont_ute_space.inputs.bgvalue = 0
        sute_cont_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_fix_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_fix_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_fix_ute_space, 'out_file',
                         sute_fix_ute_background, 'in_file')
        sute_fix_ute_background.inputs.op_string = "-add %s "
        sute_fix_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_fix_ute_background,
                         'operand_files')

        sute_cont_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_cont_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_cont_ute_space, 'out_file',
                         sute_cont_ute_background, 'in_file')
        sute_cont_ute_background.inputs.op_string = "-add %s "
        sute_cont_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_cont_ute_background,
                         'operand_files')

        smooth_sute_fix = pipeline.create_node(Smooth(),
                                               name='smooth_sute_fix',
                                               requirements=[fsl5_req],
                                               wall_time=5)
        smooth_sute_fix.inputs.sigma = 2.
        pipeline.connect(sute_fix_ute_background, 'out_file', smooth_sute_fix,
                         'in_file')

        smooth_sute_cont = pipeline.create_node(Smooth(),
                                                name='smooth_sute_cont',
                                                requirements=[fsl5_req],
                                                wall_time=5)
        smooth_sute_cont.inputs.sigma = 2.
        pipeline.connect(sute_cont_ute_background, 'out_file',
                         smooth_sute_cont, 'in_file')

        pipeline.connect_output('sute_fix_ute', smooth_sute_fix,
                                'smoothed_file')
        pipeline.connect_output('sute_cont_ute', smooth_sute_cont,
                                'smoothed_file')
        pipeline.assert_connected()

        return pipeline
コード例 #7
0
def group_onesample_openfmri(dataset_dir,
                             model_id=None,
                             task_id=None,
                             l1output_dir=None,
                             out_dir=None,
                             no_reversal=False):

    wk = Workflow(name='one_sample')
    wk.base_dir = os.path.abspath(work_dir)

    info = Node(
        util.IdentityInterface(fields=['model_id', 'task_id', 'dataset_dir']),
        name='infosource')
    info.inputs.model_id = model_id
    info.inputs.task_id = task_id
    info.inputs.dataset_dir = dataset_dir

    num_copes = contrasts_num(model_id, task_id, dataset_dir)

    dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                          outfields=['copes', 'varcopes']),
              name='grabber')
    dg.inputs.template = os.path.join(
        l1output_dir, 'model%03d/task%03d/*/%scopes/mni/%scope%02d.nii.gz')
    dg.inputs.template_args['copes'] = [[
        'model_id', 'task_id', '', '', 'cope_id'
    ]]
    dg.inputs.template_args['varcopes'] = [[
        'model_id', 'task_id', 'var', 'var', 'cope_id'
    ]]
    dg.iterables = ('cope_id', num_copes)

    dg.inputs.sort_filelist = True

    wk.connect(info, 'model_id', dg, 'model_id')
    wk.connect(info, 'task_id', dg, 'task_id')

    model = Node(L2Model(), name='l2model')

    wk.connect(dg, ('copes', get_len), model, 'num_copes')

    mergecopes = Node(Merge(dimension='t'), name='merge_copes')
    wk.connect(dg, 'copes', mergecopes, 'in_files')

    mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
    wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')

    mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
    flame = Node(FLAMEO(), name='flameo')
    flame.inputs.mask_file = mask_file
    flame.inputs.run_mode = 'flame1'

    wk.connect(model, 'design_mat', flame, 'design_file')
    wk.connect(model, 'design_con', flame, 't_con_file')
    wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
    wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
    wk.connect(model, 'design_grp', flame, 'cov_split_file')

    smoothest = Node(SmoothEstimate(), name='smooth_estimate')
    wk.connect(flame, 'zstats', smoothest, 'zstat_file')
    smoothest.inputs.mask_file = mask_file

    cluster = Node(Cluster(), name='cluster')
    wk.connect(smoothest, 'dlh', cluster, 'dlh')
    wk.connect(smoothest, 'volume', cluster, 'volume')
    cluster.inputs.connectivity = 26
    cluster.inputs.threshold = 2.3
    cluster.inputs.pthreshold = 0.05
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_index_file = True
    cluster.inputs.out_localmax_txt_file = True

    wk.connect(flame, 'zstats', cluster, 'in_file')

    ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                   name='z2pval')
    wk.connect(flame, 'zstats', ztopval, 'in_file')

    sinker = Node(DataSink(), name='sinker')
    sinker.inputs.base_directory = os.path.abspath(out_dir)
    sinker.inputs.substitutions = [('_cope_id', 'contrast'),
                                   ('_maths__', '_reversed_')]

    wk.connect(flame, 'zstats', sinker, 'stats')
    wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
    wk.connect(cluster, 'index_file', sinker, 'stats.@index')
    wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')

    if no_reversal == False:
        zstats_reverse = Node(BinaryMaths(), name='zstats_reverse')
        zstats_reverse.inputs.operation = 'mul'
        zstats_reverse.inputs.operand_value = -1
        wk.connect(flame, 'zstats', zstats_reverse, 'in_file')

        cluster2 = cluster.clone(name='cluster2')
        wk.connect(smoothest, 'dlh', cluster2, 'dlh')
        wk.connect(smoothest, 'volume', cluster2, 'volume')
        wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')

        ztopval2 = ztopval.clone(name='ztopval2')
        wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')

        wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg')
        wk.connect(cluster2, 'threshold_file', sinker, 'stats.@neg_thr')
        wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index')
        wk.connect(cluster2, 'localmax_txt_file', sinker,
                   'stats.@neg_localmax')

    return wk
コード例 #8
0
import sys
from nipype.interfaces import afni
from nipype.interfaces.fsl.maths import BinaryMaths

### despikes fMRI timeseries for subjects with > 1mm motion peak

# inputname = 'sub-05_ses-03_task-future_bold_space-MNI152NLin2009cAsym_preproc.nii.gz'

inputname = sys.argv[1]
outputname = inputname[:-7] + '_despiked.nii.gz'

# afni 3dDespike
despike = afni.Despike()
despike.inputs.in_file = inputname
despike.inputs.out_file = outputname
despike.inputs.args = '-cut 1.0 4.0'
print(despike.cmdline)
despike.run()

# subtract despiked image from the original to check the diff
subtract = BinaryMaths()
subtract.inputs.in_file = inputname
subtract.inputs.operand_file = outputname
subtract.inputs.operation = 'sub'
subtract.inputs.out_file = outputname[:-7] + '_diff.nii.gz'
print(subtract.cmdline)
subtract.run()