Exemplo n.º 1
0
def test_s3datagrabber_communication():
    dg = nio.S3DataGrabber(infields=['subj_id', 'run_num'], outfields=['func', 'struct'])
    dg.inputs.anon = True
    dg.inputs.bucket = 'openfmri'
    dg.inputs.bucket_path = 'ds001/'
    tempdir = mkdtemp()
    dg.inputs.local_directory = tempdir
    dg.inputs.sort_filelist = True
    dg.inputs.template = '*'
    dg.inputs.field_template = dict(func='%s/BOLD/task001_%s/bold.nii.gz',
                                    struct='%s/anatomy/highres001_brain.nii.gz')
    dg.inputs.subj_id = ['sub001', 'sub002']
    dg.inputs.run_num = ['run001', 'run003']
    dg.inputs.template_args = dg.inputs.template_args = dict(
        func=[['subj_id', 'run_num']], struct=[['subj_id']])
    res = dg.run()
    func_outfiles = res.outputs.func
    struct_outfiles = res.outputs.struct

    # check for all files
    yield assert_true, '/sub001/BOLD/task001_run001/bold.nii.gz' in func_outfiles[0]
    yield assert_true, os.path.exists(func_outfiles[0])
    yield assert_true, '/sub001/anatomy/highres001_brain.nii.gz' in struct_outfiles[0]
    yield assert_true, os.path.exists(struct_outfiles[0])
    yield assert_true, '/sub002/BOLD/task001_run003/bold.nii.gz' in func_outfiles[1]
    yield assert_true, os.path.exists(func_outfiles[1])
    yield assert_true, '/sub002/anatomy/highres001_brain.nii.gz' in struct_outfiles[1]
    yield assert_true, os.path.exists(struct_outfiles[1])

    shutil.rmtree(tempdir)
Exemplo n.º 2
0
def test_s3datagrabber_communication(tmpdir):
    dg = nio.S3DataGrabber(infields=["subj_id", "run_num"],
                           outfields=["func", "struct"])
    dg.inputs.anon = True
    dg.inputs.bucket = "openfmri"
    dg.inputs.bucket_path = "ds001/"
    dg.inputs.local_directory = tmpdir.strpath
    dg.inputs.sort_filelist = True
    dg.inputs.template = "*"
    dg.inputs.field_template = dict(
        func="%s/BOLD/task001_%s/bold.nii.gz",
        struct="%s/anatomy/highres001_brain.nii.gz",
    )
    dg.inputs.subj_id = ["sub001", "sub002"]
    dg.inputs.run_num = ["run001", "run003"]
    dg.inputs.template_args = dict(func=[["subj_id", "run_num"]],
                                   struct=[["subj_id"]])
    res = dg.run()
    func_outfiles = res.outputs.func
    struct_outfiles = res.outputs.struct

    # check for all files
    assert (os.path.join(dg.inputs.local_directory,
                         "/sub001/BOLD/task001_run001/bold.nii.gz")
            in func_outfiles[0])
    assert os.path.exists(func_outfiles[0])
    assert (os.path.join(dg.inputs.local_directory,
                         "/sub001/anatomy/highres001_brain.nii.gz")
            in struct_outfiles[0])
    assert os.path.exists(struct_outfiles[0])
    assert (os.path.join(dg.inputs.local_directory,
                         "/sub002/BOLD/task001_run003/bold.nii.gz")
            in func_outfiles[1])
    assert os.path.exists(func_outfiles[1])
    assert (os.path.join(dg.inputs.local_directory,
                         "/sub002/anatomy/highres001_brain.nii.gz")
            in struct_outfiles[1])
    assert os.path.exists(struct_outfiles[1])
Exemplo n.º 3
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.fsl as fsl
import nipype.interfaces.io as io

#Wraps command **bet**
my_fsl_BET = pe.Node(interface=fsl.BET(), name='my_fsl_BET', iterfield=[''])

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["out_file, func"]),
                              name='my_io_S3DataGrabber')

#Generic datasink module to store structured outputs
my_io_DataSink = pe.Node(interface=io.DataSink(),
                         name='my_io_DataSink',
                         iterfield=[''])

#Wraps command **epi_reg**
my_fsl_EpiReg = pe.Node(interface=fsl.EpiReg(),
                        name='my_fsl_EpiReg',
                        iterfield=[''])

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(my_io_S3DataGrabber, "out_file", my_fsl_BET, "in_file")
analysisflow.connect(my_fsl_BET, "out_file", my_fsl_EpiReg, "t1_brain")
Exemplo n.º 4
0
#This is a Nipype generator. Warning, here be dragons.
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Generic datagrabber module that wraps around glob in an
NodeHash_17c5c70 = pe.Node(io.S3DataGrabber(outfields=['outfiles']),
                           name='NodeName_17c5c70')
NodeHash_17c5c70.inputs.bucket = 'openneuro'
NodeHash_17c5c70.inputs.sort_filelist = True
NodeHash_17c5c70.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
NodeHash_17c5c70.inputs.anon = True
NodeHash_17c5c70.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_17c5c70.inputs.local_directory = '/tmp'

#Wraps command **bet**
NodeHash_211a5f0 = pe.Node(interface=fsl.BET(), name='NodeName_211a5f0')

#Generic datasink module to store structured outputs
NodeHash_236ab50 = pe.Node(interface=io.DataSink(), name='NodeName_236ab50')
NodeHash_236ab50.inputs.base_directory = '/tmp'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_211a5f0, 'out_file', NodeHash_236ab50,
                     'BET_results')
analysisflow.connect(NodeHash_17c5c70, 'outfiles', NodeHash_211a5f0, 'in_file')
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.io as io
import nipype.interfaces.afni as afni

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Basic interface class generates identity mappings
NodeHash_24ff4a0 = pe.Node(
    utility.IdentityInterface(fields=['sub_id', 'run_id']),
    name='NodeName_24ff4a0')
NodeHash_24ff4a0.inputs.run_id = ['run-1', 'run-2']
NodeHash_24ff4a0.iterables = [('sub_id', ['sub-01', 'sub-02'])]

#Generic datagrabber module that wraps around glob in an
NodeHash_1e88370 = pe.Node(io.S3DataGrabber(infields=['sub_id', 'run_id'],
                                            outfields=['func']),
                           name='NodeName_1e88370')
NodeHash_1e88370.inputs.bucket = 'openneuro'
NodeHash_1e88370.inputs.sort_filelist = True
NodeHash_1e88370.inputs.template = '%s/func/%s_task-simon_%s_bold.nii.gz'
NodeHash_1e88370.inputs.anon = True
NodeHash_1e88370.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_1e88370.inputs.local_directory = '/tmp'
NodeHash_1e88370.inputs.template_args = dict(
    func=[['sub_id', 'sub_id', 'run_id']])

#Wraps command **3dvolreg**
NodeHash_19153b0 = pe.MapNode(interface=afni.Volreg(),
                              name='NodeName_19153b0',
                              iterfield=['in_file'])
NodeHash_19153b0.inputs.outputtype = 'NIFTI_GZ'
Exemplo n.º 6
0
#This is a Nipype generator. Warning, here be dragons.
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import firstlevelhelpers
import nipype.algorithms.modelgen as modelgen

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Generic datagrabber module that wraps around glob in an
NodeHash_32c4e30 = pe.Node(io.S3DataGrabber(infields=['field_template'],
                                            outfields=['func', 'events']),
                           name='NodeName_32c4e30')
NodeHash_32c4e30.inputs.anon = True
NodeHash_32c4e30.inputs.bucket = 'openneuro'
NodeHash_32c4e30.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_32c4e30.inputs.local_directory = '/tmp'
NodeHash_32c4e30.inputs.sort_filelist = True
NodeHash_32c4e30.inputs.template = '*'
NodeHash_32c4e30.inputs.template_args = dict(func=[['bold.nii.gz']],
                                             events=[['events.tsv']])
NodeHash_32c4e30.inputs.field_template = dict(
    func='sub-01/func/sub-01_task-simon_run-1_%s',
    events='sub-01/func/sub-01_task-simon_run-1_%s')

#Wraps command **bet**
NodeHash_3443a20 = pe.Node(interface=fsl.BET(), name='NodeName_3443a20')
NodeHash_3443a20.inputs.frac = 0.3
NodeHash_3443a20.inputs.mask = True
Exemplo n.º 7
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.interfaces.utility as utility

#Generic datagrabber module that wraps around glob in an
anat_from_openneuro = pe.Node(io.S3DataGrabber(outfields=["anat"]),
                              name='anat_from_openneuro')
anat_from_openneuro.inputs.bucket = 'openneuro'
anat_from_openneuro.inputs.sort_filelist = True
anat_from_openneuro.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
anat_from_openneuro.inputs.anon = True
anat_from_openneuro.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
anat_from_openneuro.inputs.local_directory = '/tmp'

#Wraps command **bet**
brain_extraction = pe.Node(interface=fsl.BET(),
                           name='brain_extraction',
                           iterfield=[''])

#Generic datagrabber module that wraps around glob in an
func_from_openneuro = pe.Node(io.S3DataGrabber(outfields=["func"]),
                              name='func_from_openneuro')
func_from_openneuro.inputs.bucket = 'openneuro'
func_from_openneuro.inputs.sort_filelist = True
Exemplo n.º 8
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.interfaces.afni as afni

#Generic datagrabber module that wraps around glob in an
io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles"]),
                           name='io_S3DataGrabber')
io_S3DataGrabber.inputs.bucket = 'openneuro'
io_S3DataGrabber.inputs.sort_filelist = True
io_S3DataGrabber.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
io_S3DataGrabber.inputs.anon = True
io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **bet**
fsl_BET = pe.Node(interface=fsl.BET(), name='fsl_BET', iterfield=[''])

#Generic datasink module to store structured outputs
io_DataSink = pe.Node(interface=io.DataSink(),
                      name='io_DataSink',
                      iterfield=[''])
io_DataSink.inputs.base_directory = '/tmp'

#Wraps command **3dAllineate**
Exemplo n.º 9
0
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import firstlevelhelpers
import nipype.algorithms.modelgen as modelgen

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Basic interface class generates identity mappings
NodeHash_2c4dda0 = pe.Node(utility.IdentityInterface(fields=['sub_id']), name = 'NodeName_2c4dda0')
NodeHash_2c4dda0.inputs.sub_id = ['sub-02', 'sub-03', 'sub-04', 'sub-05', 'sub-06', 'sub-07', 'sub-08', 'sub-09', 'sub-10', 'sub-11', 'sub-12', 'sub-13', 'sub-14', 'sub-15', 'sub-16', 'sub-17', 'sub-18', 'sub-19', 'sub-20', 'sub-21']

#Generic datagrabber module that wraps around glob in an
NodeHash_17173a00 = pe.MapNode(io.S3DataGrabber(infields=['field_template','sub_id'], outfields=['func','events','anat']), name = 'NodeName_17173a00', iterfield = ['sub_id'])
NodeHash_17173a00.inputs.anon = True
NodeHash_17173a00.inputs.bucket = 'openneuro'
NodeHash_17173a00.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_17173a00.inputs.local_directory = '/tmp'
NodeHash_17173a00.inputs.sort_filelist = True
NodeHash_17173a00.inputs.template = '*'
NodeHash_17173a00.inputs.template_args = dict(func=[['sub_id', 'sub_id']], events=[['sub_id', 'sub_id']], anat=[['sub_id', 'sub_id']])
NodeHash_17173a00.inputs.field_template = dict(func='%s/func/%s_task-simon_run-1_bold.nii.gz', events='%s/func/%s_task-simon_run-1_events.tsv', anat='%s/anat/%s_T1w.nii.gz')

#Wraps command **bet**
NodeHash_20af2180 = pe.MapNode(interface = fsl.BET(), name = 'NodeName_20af2180', iterfield = ['in_file'])
NodeHash_20af2180.inputs.frac = 0.3
NodeHash_20af2180.inputs.robust = True

#Wraps command **fast**
import nipype.algorithms.modelgen as modelgen

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Basic interface class generates identity mappings
NodeHash_30ba470 = pe.Node(utility.IdentityInterface(fields=['sub_id']),
                           name='NodeName_30ba470')
NodeHash_30ba470.inputs.sub_id = [
    'sub-02', 'sub-03', 'sub-04', 'sub-05', 'sub-06', 'sub-07', 'sub-08',
    'sub-09', 'sub-10', 'sub-11', 'sub-12', 'sub-13', 'sub-14', 'sub-15',
    'sub-16', 'sub-17', 'sub-18', 'sub-19', 'sub-20', 'sub-21'
]

#Generic datagrabber module that wraps around glob in an
NodeHash_34d5650 = pe.MapNode(io.S3DataGrabber(
    infields=['field_template', 'sub_id'],
    outfields=['func', 'events', 'anat']),
                              name='NodeName_34d5650',
                              iterfield=['sub_id'])
NodeHash_34d5650.inputs.anon = True
NodeHash_34d5650.inputs.bucket = 'openneuro'
NodeHash_34d5650.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_34d5650.inputs.local_directory = '/tmp'
NodeHash_34d5650.inputs.sort_filelist = True
NodeHash_34d5650.inputs.template = '*'
NodeHash_34d5650.inputs.template_args = dict(func=[['sub_id', 'sub_id']],
                                             events=[['sub_id', 'sub_id']],
                                             anat=[['sub_id', 'sub_id']])
NodeHash_34d5650.inputs.field_template = dict(
    func='%s/func/%s_task-simon_run-1_bold.nii.gz',
    events='%s/func/%s_task-simon_run-1_events.tsv',
Exemplo n.º 11
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.interfaces.afni as afni
import nipype.interfaces.ants as ants

#Generic datagrabber module that wraps around glob in an
io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles"]),
                           name='io_S3DataGrabber')
io_S3DataGrabber.inputs.bucket = 'openneuro'
io_S3DataGrabber.inputs.sort_filelist = True
io_S3DataGrabber.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
io_S3DataGrabber.inputs.anon = True
io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **bet**
fsl_BET = pe.Node(interface=fsl.BET(), name='fsl_BET', iterfield=[''])

#Wraps command **3dTshift**
afni_TShift = pe.Node(interface=afni.TShift(),
                      name='afni_TShift',
                      iterfield=[''])

#Wraps command **3dUnifize**
Exemplo n.º 12
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(infields=["field_template, subj_id"], outfields=["outfiles"]), name = 'my_io_S3DataGrabber')
my_io_S3DataGrabber.inputs.bucket = 'openneuro'
my_io_S3DataGrabber.inputs.sort_filelist = True
my_io_S3DataGrabber.inputs.template = '*'
my_io_S3DataGrabber.inputs.anon = True
my_io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
my_io_S3DataGrabber.inputs.local_directory = '/tmp'
my_io_S3DataGrabber.inputs.template_args = {'anat': [['subj_id', 'subj_id']], 'func': [['subj_id', 'subj_id']]}
my_io_S3DataGrabber.inputs.field_template = {'anat': '%s/anat/%s_T1w.nii.gz', 'func': '%s/func/%s_task-simon_run-1_bold.nii.gz'}
my_io_S3DataGrabber.inputs.subj_id = sub-01

#Wraps command **bet**
my_fsl_BET = pe.Node(interface = fsl.BET(), name='my_fsl_BET', iterfield = [''])

#Generic datasink module to store structured outputs
my_io_DataSink = pe.Node(interface = io.DataSink(), name='my_io_DataSink', iterfield = [''])
my_io_DataSink.inputs.base_directory = '/tmp'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
Exemplo n.º 13
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["func, anat"]),
                              name='my_io_S3DataGrabber')
my_io_S3DataGrabber.inputs.bucket = 'openneuro'
my_io_S3DataGrabber.inputs.sort_filelist = True
my_io_S3DataGrabber.inputs.template = ''
my_io_S3DataGrabber.inputs.anon = True
my_io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
my_io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **bet**
my_fsl_BET = pe.Node(interface=fsl.BET(), name='my_fsl_BET', iterfield=[''])

#Generic datasink module to store structured outputs
my_io_DataSink = pe.Node(interface=io.DataSink(),
                         name='my_io_DataSink',
                         iterfield=[''])
my_io_DataSink.inputs.base_directory = '/tmp'

#Wraps command **epi_reg**
my_fsl_EpiReg = pe.Node(interface=fsl.EpiReg(),
Exemplo n.º 14
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.algorithms.confounds as confounds
import nipype.interfaces.utility as utility

#Generic datagrabber module that wraps around glob in an
DataFromOpenNeuro = pe.Node(io.S3DataGrabber(
    infields=["subj_id", "run_num", "field_template"],
    outfields=["func", "struct"]),
                            name='DataFromOpenNeuro')
DataFromOpenNeuro.inputs.bucket = 'openfmri'
DataFromOpenNeuro.inputs.sort_filelist = True
DataFromOpenNeuro.inputs.template = '*'
DataFromOpenNeuro.inputs.anon = True
DataFromOpenNeuro.inputs.bucket_path = 'ds001/'
DataFromOpenNeuro.inputs.local_directory = '/tmp'
DataFromOpenNeuro.inputs.field_template = dict(
    func='%s/BOLD/task001_%s/bold.nii.gz',
    struct='%s/anatomy/highres001_brain.nii.gz')
DataFromOpenNeuro.inputs.template_args = dict(func=[['subj_id', 'run_num']],
                                              struct=[['subj_id']])

#Wraps command **slicetimer**
SliceTimer = pe.MapNode(interface=fsl.SliceTimer(),
Exemplo n.º 15
0
#This is a Nipype generator. Warning, here be dragons.
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl
import nipype.algorithms.confounds as confounds

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Generic datagrabber module that wraps around glob in an
NodeHash_30f69e0 = pe.Node(io.S3DataGrabber(outfields=['outfiles']),
                           name='NodeName_30f69e0')
NodeHash_30f69e0.inputs.bucket = 'openneuro'
NodeHash_30f69e0.inputs.sort_filelist = True
NodeHash_30f69e0.inputs.template = 'sub-01/func/sub-01_task-simon_run-1_bold.nii.gz'
NodeHash_30f69e0.inputs.anon = True
NodeHash_30f69e0.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_30f69e0.inputs.local_directory = '/tmp'

#Wraps command **slicetimer**
NodeHash_1d000c0 = pe.Node(interface=fsl.SliceTimer(), name='NodeName_1d000c0')

#Wraps command **mcflirt**
NodeHash_22f2e80 = pe.Node(interface=fsl.MCFLIRT(), name='NodeName_22f2e80')

#Computes the time-course SNR for a time series
NodeHash_50c02c0 = pe.Node(interface=confounds.TSNR(), name='NodeName_50c02c0')
NodeHash_50c02c0.inputs.regress_poly = 3

#Wraps command **fslstats**
Exemplo n.º 16
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles"]), name = 'my_io_S3DataGrabber')
my_io_S3DataGrabber.inputs.bucket = 'openneuro'
my_io_S3DataGrabber.inputs.sort_filelist = True
my_io_S3DataGrabber.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
my_io_S3DataGrabber.inputs.anon = True
my_io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
my_io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **bet**
my_fsl_BET = pe.Node(interface = fsl.BET(), name='my_fsl_BET', iterfield = [''])

#Generic datasink module to store structured outputs
my_io_DataSink = pe.Node(interface = io.DataSink(), name='my_io_DataSink', iterfield = [''])
my_io_DataSink.inputs.base_directory = './output_dir'

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(my_io_S3DataGrabber, "outfiles", my_fsl_BET, "in_file")
analysisflow.connect(my_fsl_BET, "out_file", my_io_DataSink, "BET_results")
Exemplo n.º 17
0
def test_s3datagrabber():
    dg = nio.S3DataGrabber()
    yield assert_equal, dg.inputs.template, Undefined
    yield assert_equal, dg.inputs.local_directory, Undefined
    yield assert_equal, dg.inputs.template_args, {'outfiles': []}
Exemplo n.º 18
0
#This is a Nipype generator. Warning, here be dragons.
#!/usr/bin/env python

import sys
import nipype
import nipype.pipeline as pe

import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles, func, anat"]), name = 'my_io_S3DataGrabber')
my_io_S3DataGrabber.inputs.bucket = 'openneuro'
my_io_S3DataGrabber.inputs.sort_filelist = True
my_io_S3DataGrabber.inputs.template = 'sub-01/anat/sub-01_T1w.nii.gz'
my_io_S3DataGrabber.inputs.anon = True
my_io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
my_io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **bet**
my_fsl_BET = pe.Node(interface = fsl.BET(), name='my_fsl_BET', iterfield = [''])

#Generic datasink module to store structured outputs
my_io_DataSink = pe.Node(interface = io.DataSink(), name='my_io_DataSink', iterfield = [''])
my_io_DataSink.inputs.base_directory = '/tmp'

#Wraps command **epi_reg**
my_fsl_EpiReg = pe.Node(interface = fsl.EpiReg(), name='my_fsl_EpiReg', iterfield = [''])

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(), name = 'my_io_S3DataGrabber')
Exemplo n.º 19
0
def test_s3datagrabber():
    dg = nio.S3DataGrabber()
    assert dg.inputs.template == Undefined
    assert dg.inputs.local_directory == Undefined
    assert dg.inputs.template_args == {'outfiles': []}
Exemplo n.º 20
0
import sys
import nipype
import nipype.pipeline as pe
import nipype.interfaces.utility as utility
import nipype.interfaces.io as io
import nipype.interfaces.fsl as fsl

WorkingDirectory = "~/Porcupipelines/ThisStudy"

#Basic interface class generates identity mappings
NodeHash_1c35840 = pe.Node(utility.IdentityInterface(fields=['sub_id']),
                           name='NodeName_1c35840')
NodeHash_1c35840.iterables = [('sub_id', ['sub-01', 'sub-02'])]

#Generic datagrabber module that wraps around glob in an
NodeHash_1d9b790 = pe.Node(io.S3DataGrabber(infields=['sub_id'],
                                            outfields=['anat']),
                           name='NodeName_1d9b790')
NodeHash_1d9b790.inputs.bucket = 'openneuro'
NodeHash_1d9b790.inputs.sort_filelist = True
NodeHash_1d9b790.inputs.template = '%s/anat/%s_T1w.nii.gz'
NodeHash_1d9b790.inputs.anon = True
NodeHash_1d9b790.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
NodeHash_1d9b790.inputs.local_directory = '/tmp'
NodeHash_1d9b790.inputs.template_args = dict(anat=[['sub_id', 'sub_id']])

#Wraps command **bet**
NodeHash_28c60a0 = pe.Node(interface=fsl.BET(), name='NodeName_28c60a0')

#Generic datasink module to store structured outputs
NodeHash_308ebc0 = pe.Node(interface=io.DataSink(), name='NodeName_308ebc0')