예제 #1
0
    correlation3.inputs.in_mat = mat3
    correlation3.inputs.typeF = 'Maps'
    correlation3.inputs.kind = 'correlation'

    Integ2 = Node(Function(input_names=['t1', 't2', 't3'],
                           output_names=['Corre_files'],
                           function=Integrate),
                  name='Correlation_files')

    Graph = MapNode(Function(
        input_names=['Mat_D', 'Threshold', 'percentageConnections', 'complet'],
        output_names=['out_data', 'out_mat'],
        function=get_graph),
                    name='Graph_Metricts',
                    iterfield=['Mat_D'])
    Graph.iterables = ("Threshold", [0.6])
    Graph.inputs.percentageConnections = False  #Porcentaje de conexiones  utilizadas

    ALFF_fALFF = MapNode(Function(
        input_names=['slow', 'ASamplePeriod', 'Time_s', 'plots'],
        output_names=['out_mat'],
        function=Calculate_ALFF_fALFF),
                         name='ALFF_and_fALFF',
                         iterfield=['Time_s'])
    ALFF_fALFF.iterables = ("slow", [2, 3, 4, 5])
    ALFF_fALFF.inputs.ASamplePeriod = 1.6  #Time repetition

    ReHo = Node(Function(input_names=['func', 'nneigh', 'help_reho'],
                         output_names=['out_ReHo'],
                         function=Calculate_ReHo),
                name='Regional_homogeneity')
예제 #2
0
from nipype.interfaces import fsl

# Node to grab data.
grab = Node(DataGrabber(outfields=['t1', 'brain']), name='grabber')
grab.inputs.base_directory = '/om/user/jakubk/meningioma/'
grab.inputs.template = '*.nii.gz'
# Change filenames later to specify T1.
grab.inputs.field_template = {
    't1': 'data/*.nii.gz',
    'brain': 'ants_seg_output/brain/*.nii.gz'
}
grab.inputs.sort_filelist = True

fast = MapNode(fsl.FAST(), iterfield=['in_files'], name='fast')
fast.inputs.img_type = 1
fast.inputs.probability_maps = True
fast.iterables = ('number_classes', [3, 4, 5])

sinker = Node(DataSink(), name='sinker')
sinker.inputs.base_directory = op.abspath('fast_output')

# How can we iterate over original NIFTI files and extracted brains together?
# Run original NIFTI files.
wf = Workflow(name='fast_brain', base_dir='/om/scratch/Wed/jakubk/')
wf.connect(grab, 'brain', fast, 'in_files')
wf.connect(fast, 'probability_maps', sinker, 'prob')
wf.connect(fast, 'restored_image', sinker, 'restored')
wf.connect(fast, 'tissue_class_files', sinker, 'tissue_files')
wf.connect(fast, 'tissue_class_map', sinker, 'tissue_map')
wf.run(plugin='SLURM', plugin_args={'sbatch_args': '--mem=50GB'})