Ejemplo n.º 1
0
def adjust_masks(masks):
    from os.path import abspath
    from nipype import config, logging
    config.enable_debug_mode()
    logging.update_logging(config)

    from nipype.interfaces.freesurfer.model import Binarize
    #pve0 = csf, pve1 = gm, pve2 = wm

    origvols = sorted(masks)
    csf = origvols[0]
    wm = origvols[2]

    erode = Binarize()
    erode.inputs.in_file = wm
    erode.inputs.erode = 1
    erode.inputs.min = 0.5
    erode.inputs.max = 1000
    erode.inputs.binary_file = 'WM_seg.nii'
    erode.run()

    wm_new = abspath(erode.inputs.binary_file)

    vols = []
    vols.append(wm_new)
    vols.append(csf)

    return (vols)
Ejemplo n.º 2
0
def binarize(launcher, in_file, binary_file):
    binvol = Binarize()
    binvol.inputs.in_file = in_file
    binvol.inputs.min = 1
    binvol.inputs.binary_file = binary_file
    binvol.inputs.dilate = 1
    binvol.inputs.erode = 1

    launcher.run(binvol.cmdline.replace("mri_binarize", "mri_binarize.bin"))
Ejemplo n.º 3
0
def mri_label2vol(label,
                  subject,
                  freesurfer_dir,
                  wf_base_dir,
                  wf_name,
                  proj=(u'frac', 0, 1, 0.01),
                  identity=True,
                  tidy_up=True,
                  **kwargs):

    wf = pe.Workflow(name=wf_name, base_dir=wf_base_dir)

    # save label first
    label_file = '{wf_base_dir}/{label_name}-{hemi}.label'.format(
        wf_base_dir=wf_base_dir, label_name=label.name, hemi=label.hemi)
    label.save(label_file)

    label2vol = pe.Node(Label2Vol(
        label_file=label_file,
        template_file=os.path.join(freesurfer_dir, subject, 'mri/T1.mgz'),
        hemi=label.hemi,
        proj=proj,
        identity=identity,
        subject_id=subject,
        vol_label_file='{label_name}-{hemi}.nii.gz'.format(
            label_name=label.name, hemi=label.hemi)),
                        name='label2vol')

    if tidy_up:

        mask_dilate = pe.Node(Binarize(dilate=1, erode=1, min=1),
                              name='dilate_label_vol')
        mris_calc = pe.Node(MRIsCalc(), name='mask_with_gm')
        mris_calc.inputs.in_file2 = os.path.join(
            freesurfer_dir, subject,
            'mri/{hemi}.ribbon.mgz'.format(hemi=label.hemi))
        mris_calc.inputs.action = 'mul'
        mris_calc.inputs.out_file = label.name + '-' + label.hemi + '.nii.gz'
        wf.add_nodes([label2vol, mask_dilate, mris_calc])
        wf.connect([
            (label2vol, mask_dilate, [("vol_label_file", "in_file")]),
            (mask_dilate, mris_calc, [('binary_file', 'in_file1')]),
        ])

        out_vol = '{wf_base_dir}/{wf_name}/mask_with_gm/{output}'.format(
            wf_base_dir=wf_base_dir,
            wf_name=wf_name,
            output=label.name + '-' + label.hemi + '.nii.gz')
    else:
        wf.add_nodes([label2vol])
        out_vol = '{wf_base_dir}/{wf_name}/label2vol/{output}'.format(
            wf_base_dir=wf_base_dir,
            wf_name=wf_name,
            output=label.name + '-' + label.hemi + '.nii.gz')

    wf.run()
    return out_vol
source_id = [
    'fsaverage'
]  # name of the surface subject/space the to be transformed ROIs are in

subject_list = ['sub-01']  # create the subject_list variable

output_dir = 'output_inverse_transform_ROIs_ALPACA'  # name of norm output folder
working_dir = 'workingdir_inverse_transform_ROIs_ALPACA'  # name of norm working directory

##### Create & specify nodes to be used and connected during the normalization pipeline #####

# Concatenate BBRegister's and ANTS' transforms into a list
merge = Node(Merge(2), iterfield=['in2'], name='mergexfm')

# Binarize node - binarizes mask again after transformation
binarize_post2ant = MapNode(Binarize(min=0.1),
                            iterfield=['in_file'],
                            name='binarize_post2ant')

binarize_pt2pp = binarize_post2ant.clone('binarize_pt2pp')

# FreeSurferSource - Data grabber specific for FreeSurfer data
fssource_lh = Node(FreeSurferSource(subjects_dir=fs_dir, hemi='lh'),
                   run_without_submitting=True,
                   name='fssource_lh')

fssource_rh = Node(FreeSurferSource(subjects_dir=fs_dir, hemi='rh'),
                   run_without_submitting=True,
                   name='fssource_rh')

# Transform the volumetric ROIs to the target space
Ejemplo n.º 5
0
# FreeSurferSource - Data grabber specific for FreeSurfer data
fssource = Node(FreeSurferSource(subjects_dir=fs_dir),
                run_without_submitting=True,
                name='fssource')

# BBRegister - coregister a volume to the Freesurfer anatomical
bbregister = Node(BBRegister(init='header',
                             contrast_type='t2',
                             out_fsl_file=True),
                  name='bbregister')

# Volume Transformation - transform the brainmask into functional space
applyVolTrans = Node(ApplyVolTransform(inverse=True), name='applyVolTrans')

# Binarize -  binarize and dilate an image to create a brainmask
binarize = Node(Binarize(min=0.5, dilate=1, out_type='nii'), name='binarize')

### Connect the workflow
# Create a preprocessing workflow
preproc = Workflow(name='preproc')

# Connect all components of the preprocessing workflow
preproc.connect([
    (despike, sliceTiming, [('out_file', 'in_files')]),
    (sliceTiming, realign, [('timecorrected_files', 'in_files')]),
    (realign, tsnr, [('realigned_files', 'in_file')]),
    (tsnr, art, [('detrended_file', 'realigned_files')]),
    (realign, art, [('mean_image', 'mask_file'),
                    ('realignment_parameters', 'realignment_parameters')]),
    (tsnr, smooth, [('detrended_file', 'in_files')]),
    (realign, bbregister, [('mean_image', 'source_file')]),
Ejemplo n.º 6
0
                                    time_repetition=TR),
                         name='slicetime_correct')

# Motion correction
motion_correct = Node(MCFLIRT(save_plots=True, mean_vol=True),
                      name='motion_correct')

# Registration- using FLIRT
# The BOLD image is 'in_file', the anat is 'reference', the output is 'out_file'
coreg1 = Node(FLIRT(), name='coreg1')
coreg2 = Node(FLIRT(apply_xfm=True), name='coreg2')

# make binary mask
# structural is the 'in_file', output is 'binary_file'
binarize_struct = Node(Binarize(dilate=mask_dilation,
                                erode=mask_erosion,
                                min=1),
                       name='binarize_struct')

# apply the binary mask to the functional data
# functional is 'in_file', binary mask is 'mask_file', output is 'out_file'
mask_func = Node(ApplyMask(), name='mask_func')

# Artifact detection for scrubbing/motion assessment
art = Node(
    ArtifactDetect(
        mask_type='file',
        parameter_source='FSL',
        norm_threshold=
        0.5,  #mutually exclusive with rotation and translation thresh
        zintensity_threshold=3,
Ejemplo n.º 7
0
def create_tlc_workflow(config, t1_file, freesurf_parc, flair_lesion):
    """
    Inputs::
        config: Dictionary with PBR configuration options. See config.py
        t1_file: full path of t1 image
        freesurf_parc: full path of aparc+aseg.mgz from freesurfer
        flair_lesion: editted binary lesion mask based on FLAIR image (can also be labeled)
    Outputs::
        nipype.pipeline.engine.Workflow object
    """

    import nipype.interfaces.ants as ants
    from nipype.pipeline.engine import Node, Workflow, MapNode
    from nipype.interfaces.io import DataSink, DataGrabber
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.fsl as fsl
    from nipype.utils.filemanip import load_json
    import os
    import numpy as np
    from nipype.interfaces.freesurfer import Binarize, MRIConvert
    from nipype.interfaces.slicer.filtering import n4itkbiasfieldcorrection as n4
    from nipype.interfaces.fsl import Reorient2Std
    from nipype.interfaces.freesurfer import SegStats


    mse = get_mseid(t1_file)
    msid = get_msid(t1_file)
    working_dir = "tlc_{0}_{1}".format(msid, mse)

    register = Workflow(name=working_dir)
    register.base_dir = config["working_directory"]

    inputnode = Node(IdentityInterface(fields=["t1_image", "parc", "flair_lesion", "mse"]),
                     name="inputspec")
    inputnode.inputs.t1_image = t1_file
    inputnode.inputs.parc = freesurf_parc
    inputnode.inputs.flair_lesion = flair_lesion
    inputnode.inputs.mse = mse

    bin_math = Node(fsl.BinaryMaths(), name="Convert_to_binary")
    bin_math.inputs.operand_value = 1
    bin_math.inputs.operation = 'min'
    register.connect(inputnode, "flair_lesion", bin_math, "in_file")

    binvol1 = Node(Binarize(), name="binary_ventricle")
    binvol1.inputs.match = [4, 5, 11, 14, 15, 24, 43, 44, 50, 72, 213, 31, 63]
    #binvol1.inputs.match = [4, 5, 14, 15, 24, 43, 44, 72, 213]
    # every parcellation corresponds to ventricle CSF
    #binvol1.inputs.mask_thresh = 0.5
    binvol1.inputs.binary_file = os.path.join(config["working_directory"],
                                              working_dir, "binary_ventricle", "binarize_ventricle.nii.gz")
    register.connect(inputnode, "parc", binvol1, "in_file")

    binvol2 = Node(Binarize(), name="binary_gray_matter")
    binvol2.inputs.match = [3, 8, 42, 47, 169, 220, 702,
                            1878, 1915, 1979, 1993, 2000, 2001, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
                            2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024, 2025, 2026,
                            2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035,
                            772, 833, 835, 896, 925, 936, 1001, 1002, 1003, 1005, 1006, 1007, 1008, 1009, 1010, 1011,
                            1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026,
                            1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035]
    binvol2.inputs.binary_file = os.path.join(config["working_directory"], working_dir,
                                              "binary_gray_matter", "binarize_cortex.nii.gz")
    #binvol2.inputs.mask_thresh = 0.5
    register.connect(inputnode, "parc", binvol2, "in_file")

    bias_corr = Node(n4.N4ITKBiasFieldCorrection(), name="BiasFieldCorrection")
    bias_corr.inputs.outputimage = os.path.join(config["working_directory"], working_dir,
                                                "BiasFieldCorrection", "bias_corrected.nii.gz")
    register.connect(inputnode, "t1_image", bias_corr, "inputimage")

    reo1 = Node(Reorient2Std(), name="reorient1")
    reo2 = Node(Reorient2Std(), name="reorient2")
    register.connect(binvol1, "binary_file", reo1, "in_file")
    register.connect(binvol2, "binary_file", reo2, "in_file")

    mri_convert1 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert1")
    mri_convert2 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert2")
    mri_convert1.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert1')
    register.connect(bias_corr, "outputimage", mri_convert1, "t1_image")
    register.connect(reo1, "out_file", mri_convert1, "reorient_mask")
    mri_convert2.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert2')
    register.connect(bias_corr, "outputimage", mri_convert2, "t1_image")
    register.connect(reo2, "out_file", mri_convert2, "reorient_mask")

    binvol3 = Node(Binarize(), name="binary_white_matter")
    binvol3.inputs.match = [2, 7, 16, 28, 41, 46, 60, 77, 78, 79, 251, 252, 253, 254, 255]
    #binvol3.inputs.match = [2, 7, 41, 46, 77, 78, 79]
    #binvol3.inputs.mask_thresh = 0.5
    binvol3.inputs.binary_file = os.path.join(config["working_directory"], working_dir,
                                              "binary_white_matter", "binarize_white_matter.nii.gz")
    register.connect(inputnode, "parc", binvol3, "in_file")
    reo3 = Node(Reorient2Std(), name="reorient3")
    register.connect(binvol3, "binary_file", reo3, "in_file")

    mri_convert3 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert3")
    mri_convert3.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert3')
    register.connect(reo3, "out_file", mri_convert3, "reorient_mask")
    register.connect(bias_corr, "outputimage", mri_convert3, "t1_image")

    get_new_lesion = Node(Function(input_names=['t1_image', 'ventricle', 'cortex', 'flair_lesion', 'white_matter',
                                                'working_dir'],
                                   output_names=['out_path85', 'out_path90', 'out_path95', 'out_path100', 'out_path_combined'],
                                   function=matrix_operation), name='get_new_lesion')
    get_new_lesion.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'get_new_lesion')
    register.connect(bias_corr, "outputimage", get_new_lesion, "t1_image")
    register.connect(mri_convert1, "output_file", get_new_lesion, "ventricle")
    register.connect(mri_convert2, "output_file", get_new_lesion, "cortex")
    register.connect(bin_math, "out_file", get_new_lesion, "flair_lesion")
    register.connect(mri_convert3, "output_file", get_new_lesion, "white_matter")


    cluster85 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster85")
    register.connect(get_new_lesion, "out_path85", cluster85, "in_file")
    segstats85 = Node(SegStats(), name="segstats85")
    register.connect(cluster85, "index_file", segstats85, "segmentation_file")

    cluster90 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster90")
    register.connect(get_new_lesion, "out_path90", cluster90, "in_file")
    segstats90 = Node(SegStats(), name="segstats90")
    register.connect(cluster90, "index_file", segstats90, "segmentation_file")

    cluster95 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster95")
    register.connect(get_new_lesion, "out_path95", cluster95, "in_file")
    segstats95 = Node(SegStats(), name="segstats95")
    register.connect(cluster95, "index_file", segstats95, "segmentation_file")

    cluster100 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster100")
    register.connect(get_new_lesion, "out_path100", cluster100, "in_file")
    segstats100 = Node(SegStats(), name="segstats100")
    register.connect(cluster100, "index_file", segstats100, "segmentation_file")

    get_new_lesion2 = Node(Function(input_names=['t1_image', 'ventricle', 'cortex', 'flair_lesion', 'white_matter',
                                                'working_dir'],
                                   output_names=['out_path90', 'out_path95', 'out_path100'],
                                   function=matrix_operation2), name='get_new_lesion2')
    get_new_lesion2.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'get_new_lesion2')
    register.connect(bias_corr, "outputimage", get_new_lesion2, "t1_image")
    register.connect(mri_convert1, "output_file", get_new_lesion2, "ventricle")
    register.connect(mri_convert2, "output_file", get_new_lesion2, "cortex")
    register.connect(bin_math, "out_file", get_new_lesion2, "flair_lesion")
    register.connect(mri_convert3, "output_file", get_new_lesion2, "white_matter")
    cluster_intersection90 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection90")
    register.connect(get_new_lesion2, "out_path90", cluster_intersection90, "in_file")
    segstats_intersection90 = Node(SegStats(), name="segstats_intersection90")
    register.connect(cluster_intersection90, "index_file", segstats_intersection90, "segmentation_file")

    cluster_intersection95 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection95")
    register.connect(get_new_lesion2, "out_path95", cluster_intersection95, "in_file")
    segstats_intersection95 = Node(SegStats(), name="segstats_intersection95")
    register.connect(cluster_intersection95, "index_file", segstats_intersection95, "segmentation_file")

    cluster_intersection100 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection100")
    register.connect(get_new_lesion2, "out_path100", cluster_intersection100, "in_file")
    segstats_intersection100 = Node(SegStats(), name="segstats_intersection100")
    register.connect(cluster_intersection100, "index_file", segstats_intersection100, "segmentation_file")

    sinker = Node(DataSink(), name="sinker")
    sinker.inputs.base_directory = os.path.join(config["output_directory"], mse, "tlc")
    sinker.inputs.container = '.'
    sinker.inputs.substitutions = []

    register.connect(get_new_lesion, "out_path85", sinker, "85.@lesion85")
    register.connect(get_new_lesion, "out_path90", sinker, "90.@lesion90")
    register.connect(get_new_lesion, "out_path95", sinker, "95.@lesion95")
    register.connect(get_new_lesion, "out_path100", sinker, "100.@lesion100")
    register.connect(get_new_lesion, "out_path_combined", sinker, "@WhiteMatterCombined")
    register.connect(get_new_lesion2, "out_path90", sinker, "intersection90.@lesion90")
    register.connect(get_new_lesion2, "out_path95", sinker, "intersection95.@lesion95")
    register.connect(get_new_lesion2, "out_path100", sinker, "intersection100.@lesion100")

    register.connect(segstats85, "summary_file", sinker, "85.@summaryfile85")
    register.connect(segstats90, "summary_file", sinker, "90.@summaryfile90")
    register.connect(segstats95, "summary_file", sinker, "95.@summaryfile95")
    register.connect(segstats100, "summary_file", sinker, "100.@summaryfile100")
    register.connect(segstats_intersection90, "summary_file", sinker, "intersection90.@summaryfile90")
    register.connect(segstats_intersection95, "summary_file", sinker, "intersection95.@summaryfile95")
    register.connect(segstats_intersection100, "summary_file", sinker, "intersection100.@summaryfile100")

    register.connect(cluster85, "index_file", sinker, "85.@index_file85")
    register.connect(cluster90, "index_file", sinker, "90.@index_file90")
    register.connect(cluster95, "index_file", sinker, "95.@index_file95")
    register.connect(cluster100, "index_file", sinker, "100.@index_file100")
    register.connect(cluster_intersection90, "index_file", sinker, "intersection90.@index_file90")
    register.connect(cluster_intersection95, "index_file", sinker, "intersection95.@index_file95")
    register.connect(cluster_intersection100, "index_file", sinker, "intersection100.@index_file100")

    return register
Ejemplo n.º 8
0
aseg_to_gm = MapNode(Function(input_names=['aseg'],
                              output_names=['gm_list'],
                              function=aseg_to_tissuemaps),
                     name='aseg_to_gm', 
                     iterfield=['aseg'])

# Relabel the FAST segmentation 
relabel_fast_seg = MapNode(Function(input_names=['fast_tissue_list'],
                                    output_names=['wm_csf'],
                                    function=relabel_fast),
                           name='relabel_fast_seg',
                           iterfield=['fast_tissue_list'])

# make brainmask by binarizing the brainmask
binarize_brain = MapNode(Binarize(min=1, 
                                  dilate=1, 
                                  erode=1), 
                         name='binarize_brain', 
                         iterfield=['in_file'])


# In[ ]:


######### Tissue segmentation workflow #########
segment_flow = Workflow(name = "segment_flow")
segment_flow.connect([(fs_source, convert_to_nii, [('brainmask','in_file')]),
                      (convert_to_nii, reorient_to_std, [('out_file', 'in_file')]),
                      (reorient_to_std, segment, [('out_file', 'in_files')]),
                      (segment, relabel_fast_seg, [('tissue_class_files', 'fast_tissue_list')]),
                      (fs_source, convert_aseg, [('aseg','in_file')]),
Ejemplo n.º 9
0
subjects = pd.read_csv('/scr/ilz3/myelinconnect/subjects.csv')
subjects=list(subjects['DB'])
subjects.remove('KSMT')


labels= [11, 12, 13, 16, 18] + range(30,42)
templates={'seg': '/scr/ilz3/myelinconnect/struct/seg/{subject}*seg_merged.nii.gz'}
mask_file = '/scr/ilz3/myelinconnect/struct/myelinated_thickness/subcortex_mask/%s_subcortical_mask.nii.gz'


for subject in subjects:
    

    select = SelectFiles(templates)
    select.inputs.subject = subject
    select.run()
    seg_file = select.aggregate_outputs().seg
    
    binarize = Binarize(match = labels,
                        out_type = 'nii.gz')
    binarize.inputs.binary_file = mask_file%subject
    binarize.inputs.in_file=seg_file
    
    binarize.run()
    
    
    
    

                  ])

### mask preprocessing pipeline

# meanfuncmask - create a whole brain mask from the mean functional based on FSL's robust BET
meanfuncmask = Node(BET(mask=True,
                        no_output=False,
                        frac=0.55,
                        robust=True,
                        output_type='NIFTI',
                        out_file='meanfunc'),
                       name='meanfuncmask')

# Binarize Cortex node - creates a binary map of cortical voxel
binarizeCortical = Node(Binarize(out_type='nii.gz',
                                    match = [3,42],
                                    binary_file='binarized_cortical.nii.gz'),
                        name='binarizeCortical')

# Binarize Subcortex node - creates a binary map of subcortical voxel
binarizeSubcortical = Node(Binarize(out_type='nii.gz',
                                       match = [8,47,   # Cerebellum
                                                10,49,  # Thalamus
                                                11,50,  # Caudate
                                                12,51,  # Putamen
                                                13,52,  # Pallidum
                                                17,53,  # Hippocampus
                                                18,54,  # Amygdala
                                                26,58,  # AccumbensArea
                                                251,252,253,254,255,  # Corpus Callosum
                                                85,     # Optic Chiasm
Ejemplo n.º 11
0
    2015,  # middle temporal
]
mask_hippocampus_labels = [
    17,
    53,  # left and right hippocampus
]
mask_mtl_labels = [
    17,
    53,  # left and right hippocampus
    1016,
    2016,  # parahippocampal
    1006,
    2006,  # ctx-entorhinal
]
# function: use freesurfer mri_binarize to threshold an input volume
mask_visual = MapNode(interface=Binarize(),
                      name='mask_visual',
                      iterfield=['in_file'])
# input: match instead of threshold
mask_visual.inputs.match = mask_visual_labels
# optimize the efficiency of the node:
mask_visual.plugin_args = {'qsub_args': '-l nodes=1:ppn=1', 'overwrite': True}
mask_visual.plugin_args = {'qsub_args': '-l mem=100MB', 'overwrite': True}
# function: use freesurfer mri_binarize to threshold an input volume
mask_hippocampus = MapNode(interface=Binarize(),
                           name='mask_hippocampus',
                           iterfield=['in_file'])
# input: match instead of threshold
mask_hippocampus.inputs.match = mask_hippocampus_labels
# optimize the efficiency of the node:
mask_hippocampus.plugin_args = {