Ejemplo n.º 1
0
def mni_tmplt(db_path, img_list):
    merger = pe.Node(Merge(), name='merger')
    # merger = Merge()
    # merger.inputs.merged_file = os.path.join(db_path, 'extras', 'merged.nii')
    merger.inputs.in_files = img_list
    merger.inputs.dimension = 't'
    merger.inputs.output_type = 'NIFTI'
    # merger.run()
    mean = pe.Node(MeanImage(), name='mean')
    mean.inputs.output_type = 'NIFTI'
    sm = pe.Node(Smooth(), name='sm')
    sm.inputs.fwhm = 8
    # sm.inputs.output_type = 'NIFTI'
    mean.inputs.out_file = os.path.join(db_path, 'extra', 'mean.nii')

    ppln = pe.Workflow(name='ppln')
    ppln.connect([
        (merger, mean, [('merged_file', 'in_file')]),
        (mean, sm, [('out_file', 'in_files')]),
    ])
    ppln.run()

    img = nib.load(os.path.join(db_path, 'extra', 'mean.nii'))
    scld_vox = (img.get_data() / img.get_data().max())
    new_img = nib.Nifti1Image(scld_vox, img.affine, img.header)
    nib.save(new_img, os.path.join(db_path, 'extra', 'st_sp_tmpl.nii'))
Ejemplo n.º 2
0
def define_workflow(subject_list, run_list, experiment_dir, output_dir):
    """run the smooth workflow given subject and runs"""
    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=4, t_size=-1, output_type='NIFTI'),
                   name="extract")

    # Smooth - image smoothing
    smooth = Node(Smooth(fwhm=[8, 8, 8]), name="smooth")

    # Mask - applying mask to smoothed
    # mask_func = Node(ApplyMask(output_type='NIFTI'),
    # name="mask_func")

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'run_num']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list),
                            ('run_num', run_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    func_file = opj(
        'sub-{subject_id}', 'func',
        'sub-{subject_id}_task-tsl_run-{run_num}_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz'
    )
    templates = {'func': func_file}
    selectfiles = Node(SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    ## Use the following DataSink output substitutions
    substitutions = [('_subject_id_', 'sub-'), ('ssub', 'sub'),
                     ('_space-MNI152NLin2009cAsym_desc-preproc_', '_fwhm-8_'),
                     ('_fwhm_', ''), ('_roi', '')]
    substitutions += [('_run_num_%s' % r, '') for r in run_list]
    datasink.inputs.substitutions = substitutions

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the preprocessing workflow (spm smooth)
    preproc.connect([(infosource, selectfiles, [('subject_id', 'subject_id'),
                                                ('run_num', 'run_num')]),
                     (selectfiles, extract, [('func', 'in_file')]),
                     (extract, smooth, [('roi_file', 'in_files')]),
                     (smooth, datasink, [('smoothed_files', 'preproc.@smooth')
                                         ])])
    return preproc
Ejemplo n.º 3
0
# Smoothing widths to apply
fwhm = [2, 3, 4]

# main workflow

# Reorient
reorient = Node(fsl.Reorient2Std(output_type='NIFTI_GZ',
                                 ignore_exception=True),
                name='reorient')

# Bias Field Correction
N4_BFC = Node(ants.N4BiasFieldCorrection(dimension=3), name='N4_BFC')

# Smooth - image smoothing
smooth = Node(Smooth(), name="smooth")
smooth.iterables = ("fwhm", fwhm)


# coregistration Workflow
def coreg_workflow():

    # BET - Skull-stripping
    bet_anat = Node(fsl.BET(frac=0.2,
                            robust=True,
                            vertical_gradient=0.7,
                            output_type='NIFTI_GZ'),
                    name="bet_anat")

    # FAST - tissue Segmentation
    segmentation = Node(fsl.FAST(no_bias=True,
Ejemplo n.º 4
0
                               ref_slice=19),
                   name="sliceTiming")

# Realign - correct for motion
realign = Node(Realign(register_to_mean=True), name="realign")

# Artifact Detection - determine which of the images in the functional series
#   are outliers. This is based on deviation in intensity or movement.
art = Node(ArtifactDetect(norm_threshold=1,
                          zintensity_threshold=3,
                          mask_type='spm_global',
                          parameter_source='SPM'),
           name="art")

# Smooth - to smooth the images with a given kernel
smooth = Node(Smooth(fwhm=smoothing_size), name="smooth")

coregister = Node(Coregister(), name='coregister')

#replaces volume transformation
normalize = Node(interface=Normalize(), name="normalize")
normalize.inputs.template = TPMLocation

print("finished nodes")
###
# Specify Preprocessing Workflow & Connect Nodes

# Create a preprocessing workflow
preproc = Workflow(name='preproc')

# Connect all components of the preprocessing workflow
Ejemplo n.º 5
0
def preprocessing(*argu):

    argu = argu[0]
    json_file = argu[1]

    with open(json_file, 'r') as jsonfile:
        info = json.load(jsonfile, object_pairs_hook=OrderedDict)

    subject_list = info["subject_list"]
    experiment_dir = info["experiment_dir"]
    output_dir = 'datasink'
    working_dir = 'workingdir'

    task_list = info["task_list"]

    fwhm = [*map(int, info["fwhm"])]
    TR = float(info["TR"])
    iso_size = 4
    slice_list = [*map(int, info["slice order"])]

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=int(info["dummy scans"]),
                              t_size=-1,
                              output_type='NIFTI'),
                   name="extract")

    slicetime = Node(SliceTiming(num_slices=len(slice_list),
                                 ref_slice=int(median(slice_list)),
                                 slice_order=slice_list,
                                 time_repetition=TR,
                                 time_acquisition=TR - (TR / len(slice_list))),
                     name="slicetime")

    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="mcflirt")

    # Smooth - image smoothing
    smooth = Node(Smooth(), name="smooth")
    smooth.iterables = ("fwhm", fwhm)

    # Artifact Detection - determines outliers in functional images
    art = Node(ArtifactDetect(norm_threshold=2,
                              zintensity_threshold=3,
                              mask_type='spm_global',
                              parameter_source='FSL',
                              use_differences=[True, False],
                              plot_type='svg'),
               name="art")

    # BET - Skullstrip anatomical Image
    bet_anat = Node(BET(frac=0.5, robust=True, output_type='NIFTI_GZ'),
                    name="bet_anat")

    # FAST - Image Segmentation
    segmentation = Node(FAST(output_type='NIFTI_GZ'),
                        name="segmentation",
                        mem_gb=4)

    # Select WM segmentation file from segmentation output
    def get_wm(files):
        return files[-1]

    # Threshold - Threshold WM probability image
    threshold = Node(Threshold(thresh=0.5, args='-bin',
                               output_type='NIFTI_GZ'),
                     name="threshold")

    # FLIRT - pre-alignment of functional images to anatomical images
    coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'), name="coreg_pre")

    # FLIRT - coregistration of functional images to anatomical images with BBR
    coreg_bbr = Node(FLIRT(dof=6,
                           cost='bbr',
                           schedule=opj(os.getenv('FSLDIR'),
                                        'etc/flirtsch/bbr.sch'),
                           output_type='NIFTI_GZ'),
                     name="coreg_bbr")

    # Apply coregistration warp to functional images
    applywarp = Node(FLIRT(interp='spline',
                           apply_isoxfm=iso_size,
                           output_type='NIFTI'),
                     name="applywarp")

    # Apply coregistration warp to mean file
    applywarp_mean = Node(FLIRT(interp='spline',
                                apply_isoxfm=iso_size,
                                output_type='NIFTI_GZ'),
                          name="applywarp_mean")

    # Create a coregistration workflow
    coregwf = Workflow(name='coregwf')
    coregwf.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the coregistration workflow
    coregwf.connect([
        (bet_anat, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', get_wm),
                                    'in_file')]),
        (bet_anat, coreg_pre, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (bet_anat, applywarp, [('out_file', 'reference')]),
        (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')]),
        (bet_anat, applywarp_mean, [('out_file', 'reference')]),
    ])

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'task_name']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list),
                            ('task_name', task_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    anat_file = opj('sub-{subject_id}', 'anat', 'sub-{subject_id}_T1w.nii.gz')
    func_file = opj('sub-{subject_id}', 'func',
                    'sub-{subject_id}_task-{task_name}_bold.nii.gz')

    templates = {'anat': anat_file, 'func': func_file}
    selectfiles = Node(SelectFiles(templates,
                                   base_directory=info["base directory"]),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    ## Use the following DataSink output substitutions
    substitutions = [
        ('_subject_id_', 'sub-'),
        ('_task_name_', '/task-'),
        ('_fwhm_', 'fwhm-'),
        ('_roi', ''),
        ('_mcf', ''),
        ('_st', ''),
        ('_flirt', ''),
        ('.nii_mean_reg', '_mean'),
        ('.nii.par', '.par'),
    ]
    subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]
    substitutions.extend(subjFolders)
    datasink.inputs.substitutions = substitutions

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the preprocessing workflow
    preproc.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id'),
                                   ('task_name', 'task_name')]),
        (selectfiles, extract, [('func', 'in_file')]),
        (extract, slicetime, [('roi_file', 'in_files')]),
        (slicetime, mcflirt, [('timecorrected_files', 'in_file')]),
        (selectfiles, coregwf, [('anat', 'bet_anat.in_file'),
                                ('anat', 'coreg_bbr.reference')]),
        (mcflirt, coregwf, [('mean_img', 'coreg_pre.in_file'),
                            ('mean_img', 'coreg_bbr.in_file'),
                            ('mean_img', 'applywarp_mean.in_file')]),
        (mcflirt, coregwf, [('out_file', 'applywarp.in_file')]),
        (coregwf, smooth, [('applywarp.out_file', 'in_files')]),
        (mcflirt, datasink, [('par_file', 'preproc.@par')]),
        (smooth, datasink, [('smoothed_files', 'preproc.@smooth')]),
        (coregwf, datasink, [('applywarp_mean.out_file', 'preproc.@mean')]),
        (coregwf, art, [('applywarp.out_file', 'realigned_files')]),
        (mcflirt, art, [('par_file', 'realignment_parameters')]),
        (coregwf, datasink, [('coreg_bbr.out_matrix_file',
                              'preproc.@mat_file'),
                             ('bet_anat.out_file', 'preproc.@brain')]),
        (art, datasink, [('outlier_files', 'preproc.@outlier_files'),
                         ('plot_files', 'preproc.@plot_files')]),
    ])
    # Create preproc output graph# Creat # Create
    preproc.write_graph(graph2use='colored', format='png', simple_form=True)

    # Visualize the graph
    img1 = imread(opj(preproc.base_dir, 'preproc', 'graph.png'))
    plt.imshow(img1)
    plt.xticks([]), plt.yticks([])
    plt.show()

    # Visualize the detailed graph# Visua # Visual
    preproc.write_graph(graph2use='flat', format='png', simple_form=True)
    img2 = imread(opj(preproc.base_dir, 'preproc', 'graph_detailed.png'))
    plt.imshow(img2)
    plt.xticks([]), plt.yticks([])
    plt.show()

    print("Workflow all set. Check the workflow image :)")

    response = input('Should run the workflow? Enter yes or no :')

    if response == 'yes':
        preproc.run('MultiProc', plugin_args={'n_procs': 10})
    elif response == 'no':
        print('Exits the program since you entered no')
    else:
        raise RuntimeError('Should enter either yes or no')
Ejemplo n.º 6
0
 def __init__(self, fwhm=[4, 8]):
     from nipype.interfaces.spm import Smooth
     from nipype import Workflow, Node
     smooth = Node(Smooth(), name="smooth")
     smooth.iterables = ("fwhm", fwhm)
Ejemplo n.º 7
0
from nipype.interfaces.spm import Smooth
from glob import glob


in_files=glob('/home/julia/projects/real_data/mouse_visual/*/processed/func/*/*/func_moco.nii')

smooth = Smooth(in_files=in_files,
                fwhm=[0.145, 0.145, 0.145],
                paths=['/home/julia/software/spm/spm12'])
res = smooth.run()
Ejemplo n.º 8
0
firstlev = Workflow(name='firstlev',
                    base_dir=out_dir+'/tmp')
firstlev.config['execution']['crashdump_dir'] = base_dir = out_dir + \
    '/tmp/crash_files'
firstlev.connect(infosource, 'subject_id',
                 selectderivs, 'subject_id')

'''---------------------
|   First level setup  |
---------------------'''

gunzip = MapNode(Gunzip(), name='gunzip', iterfield=['in_file'])
firstlev.connect(selectderivs, 'func', gunzip, 'in_file')

# Smooth warped functionals. Watch out it smoothes again if you stop here!
smooth = Node(Smooth(),
              overwrite=False,
              name="smooth")
smooth.iterables = ("fwhm", fwhmlist)
firstlev.connect(gunzip, 'out_file', smooth, 'in_files')

getsubinforuns = Node(Function(input_names=["subject_id"],
                               output_names=["subject_info"],
                               function=pick_onsets),
                      name='getsubinforuns')

modelspec = Node(SpecifySPMModel(),
                 overwrite=False,
                 name='modelspec')
modelspec.inputs.concatenate_runs = False
modelspec.inputs.input_units = 'secs'
Ejemplo n.º 9
0
    Tis3 = ((tpm_path, 3), 2, (True, False), (True, True))
    Tis4 = ((tpm_path, 4), 3, (True, False), (True, True))
    Tis5 = ((tpm_path, 5), 4, (True, False), (True, True))
    Tis6 = ((tpm_path, 6), 2, (True, False), (True, True))
    segmentation.inputs.tissues = [Tis1, Tis2, Tis3, Tis4, Tis5, Tis6]

    # Function & Node to transform the list of normalized class images to a compatible version for smoothing
    def transform_list(normalized_class_images):
        return [each[0] for each in normalized_class_images]

    list_normalized_images = pe.Node(interface=Function(input_names='normalized_class_images', \
                                                        output_names='list_norm_images', function=transform_list), \
                                     name='list_normalized_images')

    # Smoothing Node & Settings
    smoothing = pe.Node(interface=Smooth(), name='smoothing')
    smoothing.inputs.fwhm = [10, 10, 10]

    # Datsink Node that collects segmented, smoothed files and writes to out_path
    datasink = pe.Node(interface=DataSink(), name='sinker')
    datasink.inputs.base_directory = vbm_out

    # Workflow and it's connections
    vbm_preprocess = pe.Workflow(name="vbm_preprocess")
    vbm_preprocess.connect([(reorient, segmentation, [('out_file', 'channel_files')]), \
                            (segmentation, list_normalized_images,
                             [('normalized_class_images', 'normalized_class_images')]), \
                            (list_normalized_images, smoothing, [('list_norm_images', 'in_files')]), \
                            (segmentation, datasink,
                             [('modulated_class_images', 'vbm_spm12'), ('native_class_images', 'vbm_spm12.@1'), \
                              ('normalized_class_images', 'vbm_spm12.@2'), ('transformation_mat', 'vbm_spm12.@3')]), \
Ejemplo n.º 10
0
# Artifact Detection - determine which of the images in the functional series
#   are outliers. This is based on deviation in intensity or movement.
art = Node(ArtifactDetect(norm_threshold=1,
                          zintensity_threshold=3,
                          mask_type='file',
                          parameter_source='SPM',
                          use_differences=[True, False]),
           name="art")

# Gunzip - unzip functional
gunzip = MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

# Smooth - to smooth the images with a given kernel
#I will not be using this if I'm doing MVPA analysis, but may use it for GLM analysis
smooth = Node(Smooth(fwhm=fwhm_size), name="smooth")

# FreeSurferSource - Data grabber specific for FreeSurfer data
fssource = Node(FreeSurferSource(subjects_dir=fs_dir),
                run_without_submitting=True,
                name='fssource')

# BBRegister - coregister a volume to the Freesurfer anatomical
bbregister = Node(BBRegister(init='header',
                             contrast_type='t2',
                             out_fsl_file=True),
                  name='bbregister')

# Volume Transformation - transform the brainmask into functional space
applyVolTrans = Node(ApplyVolTransform(inverse=True), name='applyVolTrans')
Ejemplo n.º 11
0
# DataSink Substitutions
substitutions = [('_subject_id', ''), ('_subject','subject'), # Substitutions have an order!!!
                  ('_in_matrix_file_', ''), # Turning a node into an iterable defaults to naming the output file to input variable name
                  ('%s' % outpath_mat.replace('/','..'), ''), # When creating own nodes, output files are (by default) named according to the absolute path, however Nipype replaces '/' with '..' to avoid creating directories. It's ugly. This is my 'make-do' workaround.
                  ('.mat', ''),
                  ('_voxel_size_','vox_dims_'), 
                  ('anat_flirt','anat_tform'), # Output files are (by default) named according to the input function or variable name. Each additional node will add '_functionname'. Be careful when creating own nodes. Nipype gets confused. Overwritten or misrecognised in 'processing' folder.
                  ('anat_resample','anat_rsmpl'),
                  ('_var_','var_')
                  ]
datasink.inputs.substitutions = substitutions

# Smooth
smooth = Node(ImageMaths(op_string='-fmean -s 2'), 
                name="smooth")
smooth_spm = Node(Smooth(fwhm=smoothing_size),
                    name="smooth")

# Resample
resample = Node(Resample(outputtype='NIFTI',
                          resample_mode='Li'), 
                  name="resample")
resample.iterables = ('voxel_size', vox_lst(min_dim,max_dim,step_dim)) # Example of turning a node (function) into an iterable, e.g. nodename.iterables = ('parameter',list_of_iterables). Depending on parameter, list may need to be tuple, etc.

# Noise
noise = Node(interface=Function(input_names=['base_dir','output_dir','in_file','var'], # Self-created node. Needs improvement.
                                output_names=['out_file'], 
                                function=snr_img), 
              name="noise")
noise.iterables = ('var', var_lst(min_var,max_var,samp_var)) # List of values (iterables).
noise.inputs.base_dir = base_dir
Ejemplo n.º 12
0
# TSNR - remove polynomials 2nd order
tsnr = MapNode(TSNR(regress_poly=2),
               name='tsnr', iterfield=['in_file'])

# Artifact Detection - determine which of the images in the functional series
#   are outliers. This is based on deviation in intensity or movement.
art = Node(ArtifactDetect(norm_threshold=1,
                          zintensity_threshold=3,
                          mask_type='file',
                          parameter_source='SPM',
                          use_differences=[True, False]),
           name="art")

# Smooth - to smooth the images with a given kernel
smooth = Node(Smooth(fwhm=fwhm_size),
              name="smooth")

# FreeSurferSource - Data grabber specific for FreeSurfer data
fssource = Node(FreeSurferSource(subjects_dir=fs_dir),
                run_without_submitting=True,
                name='fssource')

# BBRegister - coregister a volume to the Freesurfer anatomical
bbregister = Node(BBRegister(init='fsl',
                             contrast_type='t2',
                             out_fsl_file=True),
                  name='bbregister')

# Volume Transformation - transform the brainmask into functional space
applyVolTrans = Node(ApplyVolTransform(inverse=True),