def make_neuromet1_workflow(self):

        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = ('subject_id', self.subject_list)

        # unidensource, return for every subject uni and den
        unidensource = Node(interface=IdentityInterface(fields=['uniden']),
                            name="unidensource")
        unidensource.iterables = ('uniden', ['UNI', 'DEN'])

        info = dict(t1=[['subject_id', 'subject_id', 'uniden']])

        datasource = Node(interface=DataGrabber(
            infields=['subject_id', 'uniden'], outfields=['t1']),
                          name='datasource')
        datasource.inputs.base_directory = self.w_dir
        datasource.inputs.template = '{prefix}%s/{prefix}%s.%s_mp2rage_orig.nii.gz'.format(
            prefix=self.subject_prefix)
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()

        segment = self.make_segment()

        mask = self.make_mask()

        neuromet = Workflow(name=self.subject_prefix, base_dir=self.temp_dir)
        neuromet.connect(infosource, 'subject_id', datasource, 'subject_id')
        neuromet.connect(unidensource, 'uniden', datasource, 'uniden')
        neuromet.connect(datasource, 't1', segment, 'ro.in_file')

        # neuromet.connect()
        neuromet.connect(segment, 'spm_tissues_split.gm', mask,
                         'sum_tissues1.in_file')
        neuromet.connect(segment, 'spm_tissues_split.wm', mask,
                         'sum_tissues1.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.csf', mask,
                         'sum_tissues2.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.gm', sink, '@gm')
        neuromet.connect(segment, 'spm_tissues_split.wm', sink, '@wm')
        neuromet.connect(segment, 'spm_tissues_split.csf', sink, '@csf')
        neuromet.connect(segment, 'seg.bias_corrected_images', sink,
                         '@biascorr')

        # neuromet.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink, '@img')
        neuromet.connect(mask, 'gen_mask.out_file', sink, '@mask')
        neuromet.connect(segment, 'ro.out_file', sink, '@ro')

        return neuromet
Esempio n. 2
0
def test_input_wrapper():

    wf, node1, node2, node3 = make_simple_workflow()

    s_list = ['s1', 's2']
    s_node = gu.make_subject_source(s_list)

    g_node = Node(DataGrabber(in_fields=["foo"], out_fields=["bar"]),
                  name="g_node")

    iw = gu.InputWrapper(wf, s_node, g_node, node1)

    yield nt.assert_equal, iw.wf, wf
    yield nt.assert_equal, iw.subj_node, s_node
    yield nt.assert_equal, iw.grab_node, g_node
    yield nt.assert_equal, iw.in_node, node1

    iw.connect_inputs()

    g = wf._graph
    yield nt.assert_true, s_node in g.nodes()
    yield nt.assert_true, g_node in g.nodes()
    yield nt.assert_true, (s_node, g_node) in g.edges()
def group_multregress_openfmri(dataset_dir,
                               model_id=None,
                               task_id=None,
                               l1output_dir=None,
                               out_dir=None,
                               no_reversal=False,
                               plugin=None,
                               plugin_args=None,
                               flamemodel='flame1',
                               nonparametric=False,
                               use_spm=False):

    meta_workflow = Workflow(name='mult_regress')
    meta_workflow.base_dir = work_dir
    for task in task_id:
        task_name = get_taskname(dataset_dir, task)
        cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir)
        regressors_needed, contrasts, groups, subj_list = get_sub_vars(
            dataset_dir, task_name, model_id)
        for idx, contrast in enumerate(contrasts):
            wk = Workflow(name='model_%03d_task_%03d_contrast_%s' %
                          (model_id, task, contrast[0][0]))

            info = Node(util.IdentityInterface(
                fields=['model_id', 'task_id', 'dataset_dir', 'subj_list']),
                        name='infosource')
            info.inputs.model_id = model_id
            info.inputs.task_id = task
            info.inputs.dataset_dir = dataset_dir

            dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                                  outfields=['copes', 'varcopes']),
                      name='grabber')
            dg.inputs.template = os.path.join(
                l1output_dir,
                'model%03d/task%03d/%s/%scopes/%smni/%scope%02d.nii%s')
            if use_spm:
                dg.inputs.template_args['copes'] = [[
                    'model_id', 'task_id', subj_list, '', 'spm/', '',
                    'cope_id', ''
                ]]
                dg.inputs.template_args['varcopes'] = [[
                    'model_id', 'task_id', subj_list, 'var', 'spm/', 'var',
                    'cope_id', '.gz'
                ]]
            else:
                dg.inputs.template_args['copes'] = [[
                    'model_id', 'task_id', subj_list, '', '', '', 'cope_id',
                    '.gz'
                ]]
                dg.inputs.template_args['varcopes'] = [[
                    'model_id', 'task_id', subj_list, 'var', '', 'var',
                    'cope_id', '.gz'
                ]]
            dg.iterables = ('cope_id', cope_ids)
            dg.inputs.sort_filelist = False

            wk.connect(info, 'model_id', dg, 'model_id')
            wk.connect(info, 'task_id', dg, 'task_id')

            model = Node(MultipleRegressDesign(), name='l2model')
            model.inputs.groups = groups
            model.inputs.contrasts = contrasts[idx]
            model.inputs.regressors = regressors_needed[idx]

            mergecopes = Node(Merge(dimension='t'), name='merge_copes')
            wk.connect(dg, 'copes', mergecopes, 'in_files')

            if flamemodel != 'ols':
                mergevarcopes = Node(Merge(dimension='t'),
                                     name='merge_varcopes')
                wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')

            mask_file = fsl.Info.standard_image(
                'MNI152_T1_2mm_brain_mask.nii.gz')
            flame = Node(FLAMEO(), name='flameo')
            flame.inputs.mask_file = mask_file
            flame.inputs.run_mode = flamemodel
            #flame.inputs.infer_outliers = True

            wk.connect(model, 'design_mat', flame, 'design_file')
            wk.connect(model, 'design_con', flame, 't_con_file')
            wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
            if flamemodel != 'ols':
                wk.connect(mergevarcopes, 'merged_file', flame,
                           'var_cope_file')
            wk.connect(model, 'design_grp', flame, 'cov_split_file')

            if nonparametric:
                palm = Node(Function(input_names=[
                    'cope_file', 'design_file', 'contrast_file', 'group_file',
                    'mask_file', 'cluster_threshold'
                ],
                                     output_names=['palm_outputs'],
                                     function=run_palm),
                            name='palm')
                palm.inputs.cluster_threshold = 3.09
                palm.inputs.mask_file = mask_file
                palm.plugin_args = {
                    'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G',
                    'overwrite': True
                }
                wk.connect(model, 'design_mat', palm, 'design_file')
                wk.connect(model, 'design_con', palm, 'contrast_file')
                wk.connect(mergecopes, 'merged_file', palm, 'cope_file')
                wk.connect(model, 'design_grp', palm, 'group_file')

            smoothest = Node(SmoothEstimate(), name='smooth_estimate')
            wk.connect(flame, 'zstats', smoothest, 'zstat_file')
            smoothest.inputs.mask_file = mask_file

            cluster = Node(Cluster(), name='cluster')
            wk.connect(smoothest, 'dlh', cluster, 'dlh')
            wk.connect(smoothest, 'volume', cluster, 'volume')
            cluster.inputs.connectivity = 26
            cluster.inputs.threshold = 2.3
            cluster.inputs.pthreshold = 0.05
            cluster.inputs.out_threshold_file = True
            cluster.inputs.out_index_file = True
            cluster.inputs.out_localmax_txt_file = True

            wk.connect(flame, 'zstats', cluster, 'in_file')

            ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                           name='z2pval')
            wk.connect(flame, 'zstats', ztopval, 'in_file')

            sinker = Node(DataSink(), name='sinker')
            sinker.inputs.base_directory = os.path.join(
                out_dir, 'task%03d' % task, contrast[0][0])
            sinker.inputs.substitutions = [('_cope_id', 'contrast'),
                                           ('_maths_', '_reversed_')]

            wk.connect(flame, 'zstats', sinker, 'stats')
            wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
            wk.connect(cluster, 'index_file', sinker, 'stats.@index')
            wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')
            if nonparametric:
                wk.connect(palm, 'palm_outputs', sinker, 'stats.palm')

            if not no_reversal:
                zstats_reverse = Node(BinaryMaths(), name='zstats_reverse')
                zstats_reverse.inputs.operation = 'mul'
                zstats_reverse.inputs.operand_value = -1
                wk.connect(flame, 'zstats', zstats_reverse, 'in_file')

                cluster2 = cluster.clone(name='cluster2')
                wk.connect(smoothest, 'dlh', cluster2, 'dlh')
                wk.connect(smoothest, 'volume', cluster2, 'volume')
                wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')

                ztopval2 = ztopval.clone(name='ztopval2')
                wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')

                wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg')
                wk.connect(cluster2, 'threshold_file', sinker,
                           'stats.@neg_thr')
                wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index')
                wk.connect(cluster2, 'localmax_txt_file', sinker,
                           'stats.@neg_localmax')
            meta_workflow.add_nodes([wk])
    return meta_workflow
Esempio n. 4
0
    derivatives_names = temps_dict.keys()
    derivatives_names.remove('mean')
    if 'bias_mask' in derivatives_names:
        derivatives_names.remove('bias_mask')

    temp_args = args['template_arguments']
    # This formats the above list into a dictionary where we can much more easily associate keywords to their lists
    template_arguments = {
        temp_args[::2][i]: temp_args[1::2][i]
        for i in range(len(temp_args) / 2)
    }

    data_grabber_node = Node(DataGrabber(base_directory=args['directory'],
                                         sort_filelist=True,
                                         raise_on_empty=False,
                                         infields=template_arguments.keys(),
                                         outfields=[tmp
                                                    for tmp in temps_dict]),
                             name='data_grabber')
    data_grabber_node.inputs.template = '*'
    data_grabber_node.inputs.raise_on_empty = True
    data_grabber_node.inputs.drop_blank_outputs = True
    data_grabber_node.inputs.field_template = temps_dict
    data_grabber_node.inputs.template_args = temp_args_dict
    data_grabber_node.iterables = [
        (key,
         [thing.rstrip('\n') for thing in open(template_arguments[key], 'r')])
        for key in template_arguments.keys()
    ]

template = abspath(args['template'])
Esempio n. 5
0
"""Modified script to run antsBrainExtraction on meningioma T1-contrast data.
"""
from glob import glob
import os.path as op
from nipype import DataGrabber, DataSink, IdentityInterface, Node, Workflow
from nipype.interfaces import ants

data_dir = '/om/user/jakubk/meningioma/data'
template_dir = '/om/user/jakubk/meningioma/ants_templates/OASIS-30_Atropos_template'

subjects = [op.split(path)[-1][:10] for path in glob('data/*.nii.gz')]

datasource = Node(DataGrabber(outfields=['subject_id', 't1c']),
                  name='datasource')
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '*'
datasource.inputs.field_template = {'t1c': '%s*.nii.gz'}
datasource.inputs.template_args = {'t1c': [['subject_id']]}
datasource.inputs.sort_filelist = True
datasource.inputs.subject_id = subjects

seg = Node(ants.BrainExtraction(), name='seg', synchronize=True)
seg.inputs.dimension = 3
seg.inputs.keep_temporary_files = 1
seg.inputs.brain_template = op.join(template_dir, 'T_template0.nii.gz')
seg.inputs.brain_probability_mask = op.join(
    template_dir, 'T_template0_BrainCerebellumProbabilityMask.nii.gz')

# Node to save output files. This does not work. Why?
sinker = Node(DataSink(), name='sinker')
# Add container.
    def make_neuromet_fs_workflow(self):

        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = ('subject_id', self.subject_list)

        mask_source = Node(interface=GetMaskValue(
            csv_file=
            '/media/drive_s/AG/AG-Floeel-Imaging/02-User/NEUROMET2/Structural_Analysis_fs7/List_UNI_DEN_Mask.xlsx'
        ),
                           name='get_mask')

        # Datasource: Build subjects' filenames from IDs
        info = dict(mask=[[
            'subject_id', '', 'subject_id', 'mask', '_brain_bin.nii.gz'
        ]],
                    uni_bias_corr=[[
                        'subject_id', 'm', 'subject_id', '',
                        'UNI_mp2rage_orig_reoriented.nii'
                    ]],
                    den_ro=[[
                        'subject_id', '', 'subject_id', '',
                        'DEN_mp2rage_orig_reoriented.nii.gz'
                    ]])

        datasource = Node(interface=DataGrabber(
            infields=['subject_id', 'mask'],
            outfields=['mask', 'uni_bias_corr', 'den_ro']),
                          name='datasource')
        datasource.inputs.base_directory = self.w_dir
        datasource.inputs.template = '{pref}%s/%s{pref}%s.%s%s'.format(
            pref=self.subject_prefix)
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()

        comb_imgs = self.make_comb_imgs()

        freesurfer = self.make_freesurfer()

        neuromet_fs = Workflow(
            name='{pref}_fs'.format(pref=self.subject_prefix),
            base_dir=self.temp_dir)
        neuromet_fs.connect(infosource, 'subject_id', datasource, 'subject_id')
        neuromet_fs.connect(infosource, 'subject_id', mask_source,
                            'subject_id')
        neuromet_fs.connect(mask_source, 'mask_value', datasource, 'mask')
        neuromet_fs.connect(datasource, 'uni_bias_corr', comb_imgs,
                            'mask_uni_bias.in_file')
        neuromet_fs.connect(datasource, 'mask', comb_imgs,
                            'mask_uni_bias.mask_file')
        neuromet_fs.connect(datasource, 'den_ro', comb_imgs,
                            'uni_brain_den_surr_mas.in_file')

        neuromet_fs.connect(comb_imgs, 'uni_brain_den_surr_add.out_file',
                            freesurfer, 'fs_recon1.T1_files')
        neuromet_fs.connect(datasource, 'mask', freesurfer,
                            'fs_mriconv.in_file')

        out_dir_source = Node(interface=IdentityInterface(
            fields=['out_dir'], mandatory_inputs=True),
                              name='out_dir_source')
        out_dir_source.inputs.out_dir = self.w_dir

        copy_freesurfer_dir = Node(Function(['in_dir', 'sub_id', 'out_dir'],
                                            ['out_dir'],
                                            self.copy_freesurfer_dir),
                                   name='copy_freesurfer_dir')

        qdec = Node(interface=QDec(), name='qdec')

        adj_vol = Node(interface=AdjustVolume(
            diag_csv=
            '/media/drive_s/AG/AG-Floeel-Imaging/02-User/NEUROMET2/Structural_Analysis_fs7/Diagnosen.csv'
        ),
                       name='adj_vol')

        neuromet_fs.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink,
                            '@img')
        #neuromet_fs.connect(infosource, 'subject_id', copy_freesurfer_dir, 'sub_id')
        #neuromet_fs.connect(freesurfer, 'segment_hp.subjects_dir', copy_freesurfer_dir, 'in_dir')
        neuromet_fs.connect(freesurfer, 'segment_hp.subjects_dir', sink,
                            '@recon_all')
        #neuromet_fs.connect(out_dir_source, 'out_dir', copy_freesurfer_dir, 'out_dir')

        neuromet_fs.connect(freesurfer, 'segment_hp.subject_id', qdec,
                            'devnull')
        neuromet_fs.connect(datasource, 'base_directory', qdec, 'basedir')
        neuromet_fs.connect(qdec, 'stats_directory', adj_vol,
                            'stats_directory')
        neuromet_fs.connect(qdec, 'stats_directory', sink, '@stat_dir')
        neuromet_fs.connect(adj_vol, 'adjusted_stats', sink, '@adj_stats')

        return neuromet_fs
Esempio n. 7
0
''' define subjects '''
#sub_list = [os.path.basename(x) for x in sorted(glob(project_dir+'/data/13*'))]
sub_list = ['131217','131722','132118'] # test on one subject


''' set up nodes '''
# set up iterables
infosource = Node(IdentityInterface(fields=['subject_id']),#, 'acq_dir', 'fwhm']),
                                    name='infosource')
infosource.iterables = [('subject_id', sub_list)]#,
                        #('acq_dir', acq_dir_list),
                        #('fwhm', fwhm_list)]

# Create DataGrabber node
dg = Node(DataGrabber(infields=['subject_id'],#, 'acq_dir'],
                      outfields=['anat', 'atlas', 'func']),
          name='datagrabber')

# Location of the dataset folder
dg.inputs.base_directory = os.path.join(project_dir, 'data')

# Necessary default parameters
dg.inputs.template = '*'
dg.inputs.sort_filelist = True

dg.inputs.template_args = {'anat': [['subject_id']],
                           'atlas' : [['subject_id']],
                           'func': [['subject_id']]}
dg.inputs.field_template = {'anat': '%s/MNINonLinear/T1w_restore.1.60.nii.gz',
                            'atlas': '%s/MNINonLinear/ROIs/Atlas_ROIs.1.60.nii.gz',
                            'func': '%s/MNINonLinear/Results/rfMRI_REST*_7T_*/'\
"""Script to run antsBrainExtraction on meningioma T1-contrast data.
"""
import os.path as op
from nipype import Node, Workflow, DataGrabber, DataSink, MapNode
from nipype.interfaces import ants

# Node to grab data.
grab = Node(DataGrabber(outfields=['t1c']), name='grabber')
grab.inputs.base_directory = op.abspath('data')
grab.inputs.template = '*.nii.gz'
grab.inputs.field_template = {'t1c': '*.nii.gz'}
grab.inputs.sort_filelist = True

# Node to run ants.BrainExtraction.
# Segments the anatomical image and should extract brain.
template_dir = op.abspath('ants_templates/OASIS-30_Atropos_template')
seg = MapNode(ants.BrainExtraction(),
              iterfield=['anatomical_image'],
              name='seg')
seg.inputs.dimension = 3
seg.inputs.keep_temporary_files = 1
seg.inputs.brain_template = op.join(template_dir, 'T_template0.nii.gz')
seg.inputs.brain_probability_mask = op.join(
    template_dir, 'T_template0_BrainCerebellumProbabilityMask.nii.gz')

# Node to save output files. This does not work. Why?
sinker = Node(DataSink(), name='sinker')
sinker.inputs.base_directory = op.abspath('antsBrainExtraction_output')

# Workflow.
wf = Workflow(name='antsBrainExtraction', base_dir='/om/scratch/Wed/jakubk')
Esempio n. 9
0
from os import path as op

from nipype import DataGrabber, DataSink, Node, MapNode, Workflow
from nipype.interfaces import fsl

# Node to grab data.
grab = Node(DataGrabber(outfields=['t1', 'brain']), name='grabber')
grab.inputs.base_directory = '/om/user/jakubk/meningioma/'
grab.inputs.template = '*.nii.gz'
# Change filenames later to specify T1.
grab.inputs.field_template = {
    't1': 'data/*.nii.gz',
    'brain': 'ants_seg_output/brain/*.nii.gz'
}
grab.inputs.sort_filelist = True

fast = MapNode(fsl.FAST(), iterfield=['in_files'], name='fast')
fast.inputs.img_type = 1
fast.inputs.probability_maps = True
fast.iterables = ('number_classes', [3, 4, 5])

sinker = Node(DataSink(), name='sinker')
sinker.inputs.base_directory = op.abspath('fast_output')

# How can we iterate over original NIFTI files and extracted brains together?
# Run original NIFTI files.
wf = Workflow(name='fast_brain', base_dir='/om/scratch/Wed/jakubk/')
wf.connect(grab, 'brain', fast, 'in_files')
wf.connect(fast, 'probability_maps', sinker, 'prob')
wf.connect(fast, 'restored_image', sinker, 'restored')
wf.connect(fast, 'tissue_class_files', sinker, 'tissue_files')
Esempio n. 10
0
def group_onesample_openfmri(dataset_dir,
                             model_id=None,
                             task_id=None,
                             l1output_dir=None,
                             out_dir=None,
                             no_reversal=False):

    wk = Workflow(name='one_sample')
    wk.base_dir = os.path.abspath(work_dir)

    info = Node(
        util.IdentityInterface(fields=['model_id', 'task_id', 'dataset_dir']),
        name='infosource')
    info.inputs.model_id = model_id
    info.inputs.task_id = task_id
    info.inputs.dataset_dir = dataset_dir

    num_copes = contrasts_num(model_id, task_id, dataset_dir)

    dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                          outfields=['copes', 'varcopes']),
              name='grabber')
    dg.inputs.template = os.path.join(
        l1output_dir, 'model%03d/task%03d/*/%scopes/mni/%scope%02d.nii.gz')
    dg.inputs.template_args['copes'] = [[
        'model_id', 'task_id', '', '', 'cope_id'
    ]]
    dg.inputs.template_args['varcopes'] = [[
        'model_id', 'task_id', 'var', 'var', 'cope_id'
    ]]
    dg.iterables = ('cope_id', num_copes)

    dg.inputs.sort_filelist = True

    wk.connect(info, 'model_id', dg, 'model_id')
    wk.connect(info, 'task_id', dg, 'task_id')

    model = Node(L2Model(), name='l2model')

    wk.connect(dg, ('copes', get_len), model, 'num_copes')

    mergecopes = Node(Merge(dimension='t'), name='merge_copes')
    wk.connect(dg, 'copes', mergecopes, 'in_files')

    mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
    wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')

    mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
    flame = Node(FLAMEO(), name='flameo')
    flame.inputs.mask_file = mask_file
    flame.inputs.run_mode = 'flame1'

    wk.connect(model, 'design_mat', flame, 'design_file')
    wk.connect(model, 'design_con', flame, 't_con_file')
    wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
    wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
    wk.connect(model, 'design_grp', flame, 'cov_split_file')

    smoothest = Node(SmoothEstimate(), name='smooth_estimate')
    wk.connect(flame, 'zstats', smoothest, 'zstat_file')
    smoothest.inputs.mask_file = mask_file

    cluster = Node(Cluster(), name='cluster')
    wk.connect(smoothest, 'dlh', cluster, 'dlh')
    wk.connect(smoothest, 'volume', cluster, 'volume')
    cluster.inputs.connectivity = 26
    cluster.inputs.threshold = 2.3
    cluster.inputs.pthreshold = 0.05
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_index_file = True
    cluster.inputs.out_localmax_txt_file = True

    wk.connect(flame, 'zstats', cluster, 'in_file')

    ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                   name='z2pval')
    wk.connect(flame, 'zstats', ztopval, 'in_file')

    sinker = Node(DataSink(), name='sinker')
    sinker.inputs.base_directory = os.path.abspath(out_dir)
    sinker.inputs.substitutions = [('_cope_id', 'contrast'),
                                   ('_maths__', '_reversed_')]

    wk.connect(flame, 'zstats', sinker, 'stats')
    wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
    wk.connect(cluster, 'index_file', sinker, 'stats.@index')
    wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')

    if no_reversal == False:
        zstats_reverse = Node(BinaryMaths(), name='zstats_reverse')
        zstats_reverse.inputs.operation = 'mul'
        zstats_reverse.inputs.operand_value = -1
        wk.connect(flame, 'zstats', zstats_reverse, 'in_file')

        cluster2 = cluster.clone(name='cluster2')
        wk.connect(smoothest, 'dlh', cluster2, 'dlh')
        wk.connect(smoothest, 'volume', cluster2, 'volume')
        wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')

        ztopval2 = ztopval.clone(name='ztopval2')
        wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')

        wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg')
        wk.connect(cluster2, 'threshold_file', sinker, 'stats.@neg_thr')
        wk.connect(cluster2, 'index_file', sinker, 'stats.@neg_index')
        wk.connect(cluster2, 'localmax_txt_file', sinker,
                   'stats.@neg_localmax')

    return wk
    def make_neuromet2_workflow(self):

        infosource = self.make_infosource()

        # Datasource: Build subjects' filenames from IDs
        info = dict(
            mask=[['subject_id', '', 'subject_id', 'SPMbrain_bin.nii.gz']],
            uni_bias_corr=[[
                'subject_id', 'm', 'subject_id',
                'UNI_mp2rage_orig_reoriented.nii'
            ]],
            den_ro=[[
                'subject_id', '', 'subject_id',
                'DEN_mp2rage_orig_reoriented.nii.gz'
            ]])

        datasource = Node(interface=DataGrabber(
            infields=['subject_id'],
            outfields=['mask', 'uni_bias_corr', 'den_ro']),
                          name='datasource')
        datasource.inputs.base_directory = self.w_dir
        datasource.inputs.template = 'NeuroMet%s/%sNeuroMet%s.%s'
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()

        comb_imgs = self.make_comb_imgs()

        freesurfer = self.make_freesurfer()

        neuromet2 = Workflow(name='Neuromet2', base_dir=self.temp_dir)
        neuromet2.connect(infosource, 'subject_id', datasource, 'subject_id')
        neuromet2.connect(datasource, 'uni_bias_corr', comb_imgs,
                          'mask_uni_bias.in_file')
        neuromet2.connect(datasource, 'mask', comb_imgs,
                          'mask_uni_bias.mask_file')
        neuromet2.connect(datasource, 'den_ro', comb_imgs,
                          'uni_brain_den_surr_mas.in_file')

        neuromet2.connect(comb_imgs, 'uni_brain_den_surr_add.out_file',
                          freesurfer, 'fs_recon1.T1_files')
        neuromet2.connect(datasource, 'mask', freesurfer, 'fs_mriconv.in_file')

        out_dir_source = Node(interface=IdentityInterface(
            fields=['out_dir'], mandatory_inputs=True),
                              name='out_dir_source')
        out_dir_source.inputs.out_dir = self.w_dir

        copy_freesurfer_dir = Node(Function(['in_dir', 'sub_id', 'out_dir'],
                                            ['out_dir'],
                                            self.copy_freesurfer_dir),
                                   name='copy_freesurfer_dir')

        neuromet2.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink,
                          '@img')
        neuromet2.connect(infosource, 'subject_id', copy_freesurfer_dir,
                          'sub_id')
        neuromet2.connect(freesurfer, 'fs_recon3.subjects_dir',
                          copy_freesurfer_dir, 'in_dir')
        neuromet2.connect(out_dir_source, 'out_dir', copy_freesurfer_dir,
                          'out_dir')

        return neuromet2
    def make_neuromet1_workflow(self):

        infosource = self.make_infosource()

        info = dict(uni=[['subject_id', 'subject_id', 'UNI']],
                    den=[['subject_id', 'subject_id', 'DEN']])

        datasource = Node(interface=DataGrabber(infields=['subject_id'],
                                                outfields=['uni', 'den']),
                          name='datasource')
        datasource.inputs.base_directory = self.w_dir
        datasource.inputs.template = 'NeuroMet%s/NeuroMet%s.%s_mp2rage_orig.nii.gz'
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()

        segment_uni = self.make_segment_uni()

        segment_den = self.make_segment_den()

        mask_uni = self.make_mask_uni()

        mask_den = self.make_mask_den()

        neuromet = Workflow(name='NeuroMet', base_dir=self.temp_dir)
        neuromet.connect(infosource, 'subject_id', datasource, 'subject_id')
        neuromet.connect(datasource, 'uni', segment_uni, 'ro.in_file')
        neuromet.connect(datasource, 'den', segment_den, 'ro.in_file')

        # neuromet.connect()
        neuromet.connect(segment_uni, 'spm_tissues_split_uni.gm', mask_uni,
                         'sum_tissues1.in_file')
        neuromet.connect(segment_uni, 'spm_tissues_split_uni.wm', mask_uni,
                         'sum_tissues1.operand_files')
        neuromet.connect(segment_uni, 'spm_tissues_split_uni.csf', mask_uni,
                         'sum_tissues2.operand_files')
        neuromet.connect(segment_uni, 'spm_tissues_split_uni.gm', sink,
                         '@gm_uni')
        neuromet.connect(segment_uni, 'spm_tissues_split_uni.wm', sink,
                         '@wm_uni')
        neuromet.connect(segment_uni, 'spm_tissues_split_uni.csf', sink,
                         '@csf_uni')
        neuromet.connect(segment_uni, 'seg_uni.bias_corrected_images', sink,
                         '@biascorr_uni')

        neuromet.connect(segment_den, 'spm_tissues_split_den.gm', mask_den,
                         'sum_tissues1.in_file')
        neuromet.connect(segment_den, 'spm_tissues_split_den.wm', mask_den,
                         'sum_tissues1.operand_files')
        neuromet.connect(segment_den, 'spm_tissues_split_den.csf', mask_den,
                         'sum_tissues2.operand_files')
        neuromet.connect(segment_den, 'spm_tissues_split_den.gm', sink,
                         '@gm_den')
        neuromet.connect(segment_den, 'spm_tissues_split_den.wm', sink,
                         '@wm_den')
        neuromet.connect(segment_den, 'spm_tissues_split_den.csf', sink,
                         '@csf_den')
        neuromet.connect(segment_den, 'seg_den.bias_corrected_images', sink,
                         '@biascorr_den')

        # neuromet.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink, '@img')
        neuromet.connect(mask_uni, 'gen_mask.out_file', sink, '@mask_uni')
        neuromet.connect(mask_den, 'gen_mask.out_file', sink, '@mask_den')
        neuromet.connect(segment_den, 'ro.out_file', sink, '@ro_den')
        # neuromet.connect(segment_uni, 'ro.out_file', sink, '@ro_uni')

        return neuromet
# Two subjects minimum
subject_list = ['control01', 'control02']
runs = [1,2,3,4,5]

# A IdentityInterface node helps to iterate over list of subject names and run: grabs anatomical images
info_subj = Node(IdentityInterface(fields=['subject_id']), name="info_subj")
info_subj.iterables = [('subject_id', subject_list)]

# Same as above: grabs functional images
info_run = Node(IdentityInterface(fields=['run_id']), name="info_run")
info_run.iterables = [('run_id', runs)]

# DataGrabber 1 - grab_anat
data_dir = '/Users/vgonzenb/Python/nipy/fmri-rep/data/ds000171/'

grab_anat = Node(DataGrabber(infields=['subject_id'],outfields=['anat']), name = 'grab_anat', nested = True)
grab_anat.inputs.base_directory = data_dir
grab_anat.inputs.template = '*'
grab_anat.inputs.sort_filelist = True
grab_anat.inputs.template_args = {'anat': [['subject_id']]}
grab_anat.inputs.field_template = {'anat': 'sub-%s/anat/s*T1w.nii.gz'}

# DataGrabber 2 - func

grab_func = Node(DataGrabber(infields=['subject_id', 'runs'],outfields=['func']), name = 'grab_func', nested = True)
grab_func.inputs.base_directory = data_dir
grab_func.inputs.template = '*'
grab_func.inputs.sort_filelist = True
grab_func.inputs.template_args = {'func': [['subject_id', 'runs']]}
grab_func.inputs.field_template = {'func': 'sub-%s/func/sub-*run-%d_bold.nii.gz'}
    def make_neuromet1_workflow(self):

        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = ('subject_id', self.subject_list)

        #unidensource, return for every subject uni and den
        unidensource = Node(interface=IdentityInterface(
            fields=['uniden_prefix', 'uniden_suffix']),
                            name="unidensource")
        unidensource.iterables = [
            ('uniden_prefix', ['', 'derivatives/Siemens/']),
            ('uniden_suffix', ['T1w', 'desc-UNIDEN_MP2RAGE'])
        ]
        unidensource.synchronize = True

        split_sub_str = Node(Function(['subject_str'],
                                      ['subject_id', 'session_id'],
                                      self.split_subject_ses),
                             name='split_sub_str')

        info = dict(T1w=[[
            'uniden_prefix', 'subject_id', 'session_id', 'anat', 'subject_id',
            'session_id', 'uniden_suffix'
        ]])

        datasource = Node(interface=DataGrabber(infields=[
            'subject_id', 'session_id', 'uniden_prefix', 'uniden_suffix'
        ],
                                                outfields=['T1w']),
                          name='datasource')
        datasource.inputs.base_directory = self.bids_root
        datasource.inputs.template = '%ssub-NeuroMET%s/ses-0%s/%s/sub-NeuroMET%s_ses-0%s_%s.nii.gz'
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()
        segment = self.make_segment()
        mask = self.make_mask()

        neuromet = Workflow(name='NeuroMET', base_dir=self.temp_dir)
        neuromet.connect(infosource, 'subject_id', split_sub_str,
                         'subject_str')
        neuromet.connect(split_sub_str, 'subject_id', datasource, 'subject_id')
        neuromet.connect(split_sub_str, 'session_id', datasource, 'session_id')
        neuromet.connect(unidensource, 'uniden_prefix', datasource,
                         'uniden_prefix')
        neuromet.connect(unidensource, 'uniden_suffix', datasource,
                         'uniden_suffix')
        neuromet.connect(datasource, 'T1w', segment, 'ro.in_file')

        # neuromet.connect()
        neuromet.connect(segment, 'spm_tissues_split.gm', mask,
                         'sum_tissues1.in_file')
        neuromet.connect(segment, 'spm_tissues_split.wm', mask,
                         'sum_tissues1.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.csf', mask,
                         'sum_tissues2.operand_files')
        neuromet.connect(segment, 'spm_tissues_split.gm', sink, '@gm')
        neuromet.connect(segment, 'spm_tissues_split.wm', sink, '@wm')
        neuromet.connect(segment, 'spm_tissues_split.csf', sink, '@csf')
        neuromet.connect(segment, 'seg.bias_corrected_images', sink,
                         '@biascorr')
        # neuromet.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink, '@img')
        neuromet.connect(mask, 'gen_mask.out_file', sink, '@mask')
        neuromet.connect(segment, 'ro.out_file', sink, '@ro')

        return neuromet
work_dir = os.path.join('/om2/scratch/ksitek/hcp/', analysis)

''' define subjects '''
sub_list = [os.path.basename(x) for x in sorted(glob(project_dir+'/data/13*'))]
#sub_list = ['100610'] # test on one subject

''' set up nodes '''
# set up iterables
infosource = Node(IdentityInterface(fields=['subject_id']),
                                    name='infosource')
infosource.iterables = [('subject_id', sub_list)]

# Create DataGrabber node
dg = Node(DataGrabber(infields=[],
                      outfields=['dwi_file',
                                 'bval_file',
                                 'bvec_file',
                                 'atlas',
                                 'mask']),
          name='datagrabber')

# Location of the dataset folder
dg.inputs.base_directory = project_dir

# Necessary default parameters
dg.inputs.template = '*'
dg.inputs.sort_filelist = True

dg.inputs.template_args = {'dwi_file': [['subject_id']],
                           'bval_file' : [['subject_id']],
                           'bvec_file': [['subject_id']],
                           'atlas': [['subject_id','subject_id']],
    def make_neuromet_fs_workflow(self):

        # Infosource: Iterate through subject names
        infosource = Node(interface=IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = ('subject_id', self.subject_list)

        mask_source = Node(interface=GetMaskValue(csv_file=self.mask_file),
                           name='get_mask')

        split_sub_str = Node(Function(['subject_str'],
                                      ['subject_id', 'session_id'],
                                      self.split_subject_ses),
                             name='split_sub_str')

        # Datasource: Build subjects' filenames from IDs
        info = dict(mask=[[
            'subject_id', 'session_id', 'anat', 'subject_id', 'session_id',
            'mask', 'ro_brain_bin.nii.gz'
        ]],
                    uni_bias_corr=[[
                        'subject_id', 'session_id', 'anat', 'subject_id',
                        'session_id', 'UNI', 'ro_bfcorr.nii'
                    ]],
                    den_ro=[[
                        'subject_id', 'session_id', 'anat', 'subject_id',
                        'session_id', 'UNIDEN', 'ro_bfcorr.nii'
                    ]])

        datasource = Node(interface=DataGrabber(
            infields=['subject_id', 'session_id', 'mask'],
            outfields=['mask', 'uni_bias_corr', 'den_ro']),
                          name='datasource')
        datasource.inputs.base_directory = self.derivatives_dir
        datasource.inputs.template = 'sub-NeuroMET%s/ses-0%s/%s/sub-NeuroMET%s_ses-0%s_desc-%s_%s'
        datasource.inputs.template_args = info
        datasource.inputs.sort_filelist = False

        sink = self.make_sink()

        comb_imgs = self.make_comb_imgs()

        freesurfer = self.make_freesurfer()

        neuromet_fs = Workflow(name='NeuroMET', base_dir=self.temp_dir)
        neuromet_fs.connect(infosource, 'subject_id', split_sub_str,
                            'subject_str')
        neuromet_fs.connect(split_sub_str, 'subject_id', datasource,
                            'subject_id')
        neuromet_fs.connect(split_sub_str, 'session_id', datasource,
                            'session_id')
        neuromet_fs.connect(infosource, 'subject_id', mask_source,
                            'subject_id')
        neuromet_fs.connect(mask_source, 'mask_value', datasource, 'mask')
        neuromet_fs.connect(datasource, 'uni_bias_corr', comb_imgs,
                            'mask_uni_bias.in_file')
        neuromet_fs.connect(datasource, 'mask', comb_imgs,
                            'mask_uni_bias.mask_file')
        neuromet_fs.connect(datasource, 'den_ro', comb_imgs,
                            'uni_brain_den_surr_mas.in_file')

        neuromet_fs.connect(comb_imgs, 'uni_brain_den_surr_add.out_file',
                            freesurfer, 'fs_recon1.T1_files')
        neuromet_fs.connect(datasource, 'mask', freesurfer,
                            'fs_mriconv.in_file')

        out_dir_source = Node(interface=IdentityInterface(
            fields=['out_dir'], mandatory_inputs=True),
                              name='out_dir_source')
        out_dir_source.inputs.out_dir = self.bids_root
        make_list_str = Node(interface=Merge(2), name='make_list_of_paths')
        merge_strs = Node(interface=OsPathJoin(), name='merge_sub_id_dir')

        neuromet_fs.connect(comb_imgs, 'uni_brain_den_surr_add.out_file', sink,
                            '@img')
        #neuromet_fs.connect(infosource, 'subject_id', copy_freesurfer_dir, 'sub_id')
        #neuromet_fs.connect(freesurfer, 'segment_hp.subjects_dir', copy_freesurfer_dir, 'in_dir')
        neuromet_fs.connect(freesurfer, 'segment_hp.subjects_dir',
                            make_list_str, 'in1')
        neuromet_fs.connect(freesurfer, 'segment_hp.subject_id', make_list_str,
                            'in2')
        neuromet_fs.connect(make_list_str, 'out', merge_strs, 'str_list')
        neuromet_fs.connect(merge_strs, 'out_path', sink, '@recon_all')
        #neuromet_fs.connect(out_dir_source, 'out_dir', copy_freesurfer_dir, 'out_dir')

        #ToDo:
        # 04.12.2020 QDec + Adjust volumes. It hangs if qdec is in a workflow, works a single interface
        #neuromet_fs.connect(freesurfer, 'segment_hp.subject_id', qdec, 'devnull')
        #neuromet_fs.connect(datasource, 'base_directory', qdec, 'basedir')
        #neuromet_fs.connect(qdec, 'stats_directory', adj_vol, 'stats_directory')
        #neuromet_fs.connect(qdec, 'stats_directory', sink, '@stat_dir')
        #neuromet_fs.connect(adj_vol, 'adjusted_stats', sink, '@adj_stats')

        return neuromet_fs
Esempio n. 17
0
infosource_sub = Node(IdentityInterface(fields=["subject_id", "contrasts"]),
                      name="Infosource_Sub")

infosource_sub.inputs.contrasts = 1

x_dummy, dirs, y_dummy = os.walk(input_info_dir).next()
subject_list = dirs

#Split into parallel tasks per ID.
infosource_sub.iterables = [('subject_id', subject_list)]

##Part 2: DataGrab:
#Get WF1 MRI and Tissue files using the ID provided by Infosource
datagrab_sub = Node(DataGrabber(infields=["subject_id"],
                                outfields=[
                                    "t1", "flair", "swi", "c1", "c2", "c3",
                                    "c4", "c5", "bct1", "sim"
                                ]),
                    name="DataGrabber_Sub")

datagrab_sub.inputs.base_directory = input_grab_dir

datagrab_sub.inputs.template = "*"
datagrab_sub.inputs.sort_filelist = False

#Should vary depending on the file(s) available
datagrab_sub.inputs.template_args = {
    't1': [['subject_id']],
    'flair': [['subject_id']],
    'swi': [['subject_id']],
    'c1': [['subject_id']],