def run_feat(bold_file, bold_folder, brainmask_file, feat_gen):
    from nipype.interfaces.fsl import ImageStats, FEAT, Info
    # from bm_functions import gen_default_feat_config
    from numpy import shape
    from textwrap import dedent

    fslFilename = bold_folder + 'feat.fsf'

    # Get the number of voxels in the 4D file
    statComp = ImageStats()
    statComp.inputs.in_file = bold_file
    statComp.inputs.op_string = '-v'

    numVox = int(statComp.run().outputs.out_stat[0])

    # Get the number of raw volumes
    statComp.inputs.split_4d = True

    numVol = shape(statComp.run().outputs.out_stat)[0]

    # Generate the file
    standard_T1_brain = Info.standard_image('MNI152_T1_2mm_brain')
    theString = feat_gen(bold_folder, bold_file, brainmask_file,
                         standard_T1_brain, numVox, numVol)
    with open(fslFilename, 'w') as out_file:
        out_file.write(dedent(theString))
    out_file.close()

    # Run feat using the previously manipulated config
    runFeat = FEAT(fsf_file=fslFilename)
    # Run and pass back the foldername
    return runFeat.run().outputs.feat_dir
Exemplo n.º 2
0
def fslstats(infile, mask):
    stats = ImageStats()
    stats.inputs.in_file = infile
    stats.inputs.op_string = '-M'
    stats.inputs.mask_file = mask
    cout = stats.run()
    return cout.outputs.out_stat
Exemplo n.º 3
0
def fslstats(infile, mask):
    stats = ImageStats()
    stats.inputs.in_file = infile
    stats.inputs.op_string = '-M'
    stats.inputs.mask_file = mask
    cout = stats.run()
    return cout.outputs.out_stat
Exemplo n.º 4
0
def run_feat(bold_file, bold_folder, brainmask_file, feat_gen):
    from nipype.interfaces.fsl import ImageStats, FEAT, Info
    # from bm_functions import gen_default_feat_config
    from numpy import shape
    from textwrap import dedent

    fslFilename = bold_folder + 'feat.fsf'

    # Get the number of voxels in the 4D file
    statComp = ImageStats()
    statComp.inputs.in_file = bold_file
    statComp.inputs.op_string = '-v'

    numVox = int(statComp.run().outputs.out_stat[0])

    # Get the number of raw volumes
    statComp.inputs.split_4d = True

    numVol = shape(statComp.run().outputs.out_stat)[0]

    # Generate the file
    standard_T1_brain = Info.standard_image('MNI152_T1_2mm_brain')
    theString = feat_gen(bold_folder, bold_file, brainmask_file, standard_T1_brain, numVox, numVol)
    with open(fslFilename,'w') as out_file:
        out_file.write(dedent(theString))
    out_file.close()   

    # Run feat using the previously manipulated config
    runFeat = FEAT(fsf_file = fslFilename)
    # Run and pass back the foldername
    return runFeat.run().outputs.feat_dir
Exemplo n.º 5
0
def fslmath_imagestats(input_file, operation, output_prefix):
    from nipype.interfaces.fsl import ImageStats
    import os

    statsM = ImageStats(in_file=input_file, op_string= operation)
    print "ImageStats [" + os.path.basename(input_file) + "]:" + statsM.cmdline
    res = statsM.run()
    return res.outputs.out_stat
Exemplo n.º 6
0
def get_volume(mask, thresh, out_file):
    Mask = MathsCommand(in_file = mask,
                        args = "-thr {0} -bin".format(thresh),
                        out_file=out_file)
    Mout = Mask.run()
    Volume = ImageStats(in_file = mask, op_string = '-l {0} -V'.format(thresh))
    Vout=Volume.run()
    outstat = Vout.outputs.out_stat
    return outstat[1]
Exemplo n.º 7
0
def get_volume(mask):
    """
    Fslstats -V returns volume [voxels mm3]
    Fslstats -M returns mean value of non-zero voxels
    Multiplying these gives the partial volume estimate
    """
    Volume = ImageStats(in_file = mask, op_string = '-V -M')
    Vout=Volume.run()
    outstat = Vout.outputs.out_stat
    return outstat[1] * outstat[2]
Exemplo n.º 8
0
def fslstats(infile, mask):
    """
    Wrapper for fslstats. Takes input file and extract mean from specified ROI mask
    """
    stats = ImageStats()
    stats.inputs.in_file = infile
    stats.inputs.op_string = '-k %s -M'
    stats.inputs.mask_file = mask
    cout = stats.run()
    return cout.outputs.out_stat
Exemplo n.º 9
0
def run_invert_T1(subj, T1_inv_path, betfMRI_folder, betMRI_folder):
    print("\n    5.Calculating inverse T1")
    stats = ImageStats(in_file=opj(betMRI_folder, subj, 'T1_brain.nii.gz'),
                       op_string='-R')
    T1minmax = stats.run()
    stats = ImageStats(in_file=opj(betfMRI_folder, subj, 'mean_brain.nii.gz'),
                       op_string='-R')
    EPIminmax = stats.run()
    factor = (T1minmax.outputs.out_stat[1] - T1minmax.outputs.out_stat[0]) / (
        EPIminmax.outputs.out_stat[1] - EPIminmax.outputs.out_stat[0])
    os.system("fslmaths " + opj(betMRI_folder, subj, 'T1_brain.nii.gz') +
              " -mul -1" + " -add " + str(T1minmax.outputs.out_stat[1]) +
              " -mul " + str(factor) + " " +
              opj(betMRI_folder, subj, 'T1_brain_inv.nii.gz'))
Exemplo n.º 10
0
def FSLROI_Image(Image, ROI):
    """Performs fslstats to extract values from Image at location denoted
        by ROI."""
    stats = ImageStats(in_file=Image, op_string='-k %s -m' % ROI)
    return (stats.aggregate_outputs().out_stat)
Exemplo n.º 11
0
def Lesion_extractor(
    name='Lesion_Extractor',
    wf_name='Test',
    base_dir='/homes_unix/alaurent/',
    input_dir=None,
    subjects=None,
    main=None,
    acc=None,
    atlas='/homes_unix/alaurent/cbstools-public-master/atlases/brain-segmentation-prior3.0/brain-atlas-quant-3.0.8.txt'
):

    wf = Workflow(wf_name)
    wf.base_dir = base_dir

    #file = open(subjects,"r")
    #subjects = file.read().split("\n")
    #file.close()

    # Subject List
    subjectList = Node(IdentityInterface(fields=['subject_id'],
                                         mandatory_inputs=True),
                       name="subList")
    subjectList.iterables = ('subject_id', [
        sub for sub in subjects if sub != '' and sub != '\n'
    ])

    # T1w and FLAIR
    scanList = Node(DataGrabber(infields=['subject_id'],
                                outfields=['T1', 'FLAIR']),
                    name="scanList")
    scanList.inputs.base_directory = input_dir
    scanList.inputs.ignore_exception = False
    scanList.inputs.raise_on_empty = True
    scanList.inputs.sort_filelist = True
    #scanList.inputs.template = '%s/%s.nii'
    #scanList.inputs.template_args = {'T1': [['subject_id','T1*']],
    #                                 'FLAIR': [['subject_id','FLAIR*']]}
    scanList.inputs.template = '%s/anat/%s'
    scanList.inputs.template_args = {
        'T1': [['subject_id', '*_T1w.nii.gz']],
        'FLAIR': [['subject_id', '*_FLAIR.nii.gz']]
    }
    wf.connect(subjectList, "subject_id", scanList, "subject_id")

    #     # T1w and FLAIR
    #     dg = Node(DataGrabber(outfields=['T1', 'FLAIR']), name="T1wFLAIR")
    #     dg.inputs.base_directory = "/homes_unix/alaurent/LesionPipeline"
    #     dg.inputs.template = "%s/NIFTI/*.nii.gz"
    #     dg.inputs.template_args['T1']=[['7']]
    #     dg.inputs.template_args['FLAIR']=[['9']]
    #     dg.inputs.sort_filelist=True

    # Reorient Volume
    T1Conv = Node(Reorient2Std(), name="ReorientVolume")
    T1Conv.inputs.ignore_exception = False
    T1Conv.inputs.terminal_output = 'none'
    T1Conv.inputs.out_file = "T1_reoriented.nii.gz"
    wf.connect(scanList, "T1", T1Conv, "in_file")

    # Reorient Volume (2)
    T2flairConv = Node(Reorient2Std(), name="ReorientVolume2")
    T2flairConv.inputs.ignore_exception = False
    T2flairConv.inputs.terminal_output = 'none'
    T2flairConv.inputs.out_file = "FLAIR_reoriented.nii.gz"
    wf.connect(scanList, "FLAIR", T2flairConv, "in_file")

    # N3 Correction
    T1NUC = Node(N4BiasFieldCorrection(), name="N3Correction")
    T1NUC.inputs.dimension = 3
    T1NUC.inputs.environ = {'NSLOTS': '1'}
    T1NUC.inputs.ignore_exception = False
    T1NUC.inputs.num_threads = 1
    T1NUC.inputs.save_bias = False
    T1NUC.inputs.terminal_output = 'none'
    wf.connect(T1Conv, "out_file", T1NUC, "input_image")

    # N3 Correction (2)
    T2flairNUC = Node(N4BiasFieldCorrection(), name="N3Correction2")
    T2flairNUC.inputs.dimension = 3
    T2flairNUC.inputs.environ = {'NSLOTS': '1'}
    T2flairNUC.inputs.ignore_exception = False
    T2flairNUC.inputs.num_threads = 1
    T2flairNUC.inputs.save_bias = False
    T2flairNUC.inputs.terminal_output = 'none'
    wf.connect(T2flairConv, "out_file", T2flairNUC, "input_image")
    '''
    #####################
    ### PRE-NORMALIZE ###
    #####################
    To make sure there's no outlier values (negative, or really high) to offset the initialization steps
    '''

    # Intensity Range Normalization
    getMaxT1NUC = Node(ImageStats(op_string='-r'), name="getMaxT1NUC")
    wf.connect(T1NUC, 'output_image', getMaxT1NUC, 'in_file')

    T1NUCirn = Node(AbcImageMaths(), name="IntensityNormalization")
    T1NUCirn.inputs.op_string = "-div"
    T1NUCirn.inputs.out_file = "normT1.nii.gz"
    wf.connect(T1NUC, 'output_image', T1NUCirn, 'in_file')
    wf.connect(getMaxT1NUC, ('out_stat', getElementFromList, 1), T1NUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2NUC = Node(ImageStats(op_string='-r'), name="getMaxT2")
    wf.connect(T2flairNUC, 'output_image', getMaxT2NUC, 'in_file')

    T2NUCirn = Node(AbcImageMaths(), name="IntensityNormalization2")
    T2NUCirn.inputs.op_string = "-div"
    T2NUCirn.inputs.out_file = "normT2.nii.gz"
    wf.connect(T2flairNUC, 'output_image', T2NUCirn, 'in_file')
    wf.connect(getMaxT2NUC, ('out_stat', getElementFromList, 1), T2NUCirn,
               "op_value")
    '''
    ########################
    #### COREGISTRATION ####
    ########################
    '''

    # Optimized Automated Registration
    T2flairCoreg = Node(FLIRT(), name="OptimizedAutomatedRegistration")
    T2flairCoreg.inputs.output_type = 'NIFTI_GZ'
    wf.connect(T2NUCirn, "out_file", T2flairCoreg, "in_file")
    wf.connect(T1NUCirn, "out_file", T2flairCoreg, "reference")
    '''    
    #########################
    #### SKULL-STRIPPING ####
    #########################
    '''

    # SPECTRE
    T1ss = Node(BET(), name="SPECTRE")
    T1ss.inputs.frac = 0.45  #0.4
    T1ss.inputs.mask = True
    T1ss.inputs.outline = True
    T1ss.inputs.robust = True
    wf.connect(T1NUCirn, "out_file", T1ss, "in_file")

    # Image Calculator
    T2ss = Node(ApplyMask(), name="ImageCalculator")
    wf.connect(T1ss, "mask_file", T2ss, "mask_file")
    wf.connect(T2flairCoreg, "out_file", T2ss, "in_file")
    '''
    ####################################
    #### 2nd LAYER OF N3 CORRECTION ####
    ####################################
    This time without the skull: there were some significant amounts of inhomogeneities leftover.
    '''

    # N3 Correction (3)
    T1ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction3")
    T1ssNUC.inputs.dimension = 3
    T1ssNUC.inputs.environ = {'NSLOTS': '1'}
    T1ssNUC.inputs.ignore_exception = False
    T1ssNUC.inputs.num_threads = 1
    T1ssNUC.inputs.save_bias = False
    T1ssNUC.inputs.terminal_output = 'none'
    wf.connect(T1ss, "out_file", T1ssNUC, "input_image")

    # N3 Correction (4)
    T2ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction4")
    T2ssNUC.inputs.dimension = 3
    T2ssNUC.inputs.environ = {'NSLOTS': '1'}
    T2ssNUC.inputs.ignore_exception = False
    T2ssNUC.inputs.num_threads = 1
    T2ssNUC.inputs.save_bias = False
    T2ssNUC.inputs.terminal_output = 'none'
    wf.connect(T2ss, "out_file", T2ssNUC, "input_image")
    '''
    ####################################
    ####    NORMALIZE FOR MGDM      ####
    ####################################
    This normalization is a bit aggressive: only useful to have a 
    cropped dynamic range into MGDM, but possibly harmful to further 
    processing, so the unprocessed images are passed to the subsequent steps.
    '''

    # Intensity Range Normalization
    getMaxT1ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT1ssNUC")
    wf.connect(T1ssNUC, 'output_image', getMaxT1ssNUC, 'in_file')

    T1ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization3")
    T1ssNUCirn.inputs.op_string = "-div"
    T1ssNUCirn.inputs.out_file = "normT1ss.nii.gz"
    wf.connect(T1ssNUC, 'output_image', T1ssNUCirn, 'in_file')
    wf.connect(getMaxT1ssNUC, ('out_stat', getElementFromList, 1), T1ssNUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT2ssNUC")
    wf.connect(T2ssNUC, 'output_image', getMaxT2ssNUC, 'in_file')

    T2ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization4")
    T2ssNUCirn.inputs.op_string = "-div"
    T2ssNUCirn.inputs.out_file = "normT2ss.nii.gz"
    wf.connect(T2ssNUC, 'output_image', T2ssNUCirn, 'in_file')
    wf.connect(getMaxT2ssNUC, ('out_stat', getElementFromList, 1), T2ssNUCirn,
               "op_value")
    '''
    ####################################
    ####      ESTIMATE CSF PV       ####
    ####################################
    Here we try to get a better handle on CSF voxels to help the segmentation step
    '''

    # Recursive Ridge Diffusion
    CSF_pv = Node(RecursiveRidgeDiffusion(), name='estimate_CSF_pv')
    CSF_pv.plugin_args = {'sbatch_args': '--mem 6000'}
    CSF_pv.inputs.ridge_intensities = "dark"
    CSF_pv.inputs.ridge_filter = "2D"
    CSF_pv.inputs.orientation = "undefined"
    CSF_pv.inputs.ang_factor = 1.0
    CSF_pv.inputs.min_scale = 0
    CSF_pv.inputs.max_scale = 3
    CSF_pv.inputs.propagation_model = "diffusion"
    CSF_pv.inputs.diffusion_factor = 0.5
    CSF_pv.inputs.similarity_scale = 0.1
    CSF_pv.inputs.neighborhood_size = 4
    CSF_pv.inputs.max_iter = 100
    CSF_pv.inputs.max_diff = 0.001
    CSF_pv.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, CSF_pv.name),
        CSF_pv, 'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', CSF_pv, 'input_image')
    '''
    ####################################
    ####            MGDM            ####
    ####################################
    '''

    # Multi-contrast Brain Segmentation
    MGDM = Node(MGDMSegmentation(), name='MGDM')
    MGDM.plugin_args = {'sbatch_args': '--mem 7000'}
    MGDM.inputs.contrast_type1 = "Mprage3T"
    MGDM.inputs.contrast_type2 = "FLAIR3T"
    MGDM.inputs.contrast_type3 = "PVDURA"
    MGDM.inputs.save_data = True
    MGDM.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, MGDM.name), MGDM,
        'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', MGDM, 'contrast_image1')
    wf.connect(T2ssNUCirn, 'out_file', MGDM, 'contrast_image2')
    wf.connect(CSF_pv, 'ridge_pv', MGDM, 'contrast_image3')

    # Enhance Region Contrast
    ERC = Node(EnhanceRegionContrast(), name='ERC')
    ERC.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC.inputs.enhanced_region = "crwm"
    ERC.inputs.contrast_background = "crgm"
    ERC.inputs.partial_voluming_distance = 2.0
    ERC.inputs.save_data = True
    ERC.inputs.atlas_file = atlas
    wf.connect(subjectList,
               ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC.name),
               ERC, 'output_dir')
    wf.connect(T1ssNUC, 'output_image', ERC, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC, 'levelset_boundary_image')

    # Enhance Region Contrast (2)
    ERC2 = Node(EnhanceRegionContrast(), name='ERC2')
    ERC2.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC2.inputs.enhanced_region = "crwm"
    ERC2.inputs.contrast_background = "crgm"
    ERC2.inputs.partial_voluming_distance = 2.0
    ERC2.inputs.save_data = True
    ERC2.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC2.name), ERC2,
        'output_dir')
    wf.connect(T2ssNUC, 'output_image', ERC2, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC2, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC2, 'levelset_boundary_image')

    # Define Multi-Region Priors
    DMRP = Node(DefineMultiRegionPriors(), name='DefineMultRegPriors')
    DMRP.plugin_args = {'sbatch_args': '--mem 6000'}
    #DMRP.inputs.defined_region = "ventricle-horns"
    #DMRP.inputs.definition_method = "closest-distance"
    DMRP.inputs.distance_offset = 3.0
    DMRP.inputs.save_data = True
    DMRP.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, DMRP.name), DMRP,
        'output_dir')
    wf.connect(MGDM, 'segmentation', DMRP, 'segmentation_image')
    wf.connect(MGDM, 'distance', DMRP, 'levelset_boundary_image')
    '''
    ###############################################
    ####      REMOVE VENTRICLE POSTERIOR       ####
    ###############################################
    Due to topology constraints, the ventricles are often not fully segmented:
    here add back all ventricle voxels from the posterior probability (without the topology constraints)
    '''

    # Posterior label
    PostLabel = Node(Split(), name='PosteriorLabel')
    PostLabel.inputs.dimension = "t"
    wf.connect(MGDM, 'labels', PostLabel, 'in_file')

    # Posterior proba
    PostProba = Node(Split(), name='PosteriorProba')
    PostProba.inputs.dimension = "t"
    wf.connect(MGDM, 'memberships', PostProba, 'in_file')

    # Threshold binary mask : ventricle label part 1
    VentLabel1 = Node(Threshold(), name="VentricleLabel1")
    VentLabel1.inputs.thresh = 10.5
    VentLabel1.inputs.direction = "below"
    wf.connect(PostLabel, ("out_files", getFirstElement), VentLabel1,
               "in_file")

    # Threshold binary mask : ventricle label part 2
    VentLabel2 = Node(Threshold(), name="VentricleLabel2")
    VentLabel2.inputs.thresh = 13.5
    VentLabel2.inputs.direction = "above"
    wf.connect(VentLabel1, "out_file", VentLabel2, "in_file")

    # Image calculator : ventricle proba
    VentProba = Node(ImageMaths(), name="VentricleProba")
    VentProba.inputs.op_string = "-mul"
    VentProba.inputs.out_file = "ventproba.nii.gz"
    wf.connect(PostProba, ("out_files", getFirstElement), VentProba, "in_file")
    wf.connect(VentLabel2, "out_file", VentProba, "in_file2")

    # Image calculator : remove inter ventricles
    RmInterVent = Node(ImageMaths(), name="RemoveInterVent")
    RmInterVent.inputs.op_string = "-sub"
    RmInterVent.inputs.out_file = "rmintervent.nii.gz"
    wf.connect(ERC, "region_pv", RmInterVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInterVent, "in_file2")

    # Image calculator : add horns
    AddHorns = Node(ImageMaths(), name="AddHorns")
    AddHorns.inputs.op_string = "-add"
    AddHorns.inputs.out_file = "rmvent.nii.gz"
    wf.connect(RmInterVent, "out_file", AddHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddHorns, "in_file2")

    # Image calculator : remove ventricles
    RmVent = Node(ImageMaths(), name="RemoveVentricles")
    RmVent.inputs.op_string = "-sub"
    RmVent.inputs.out_file = "rmvent.nii.gz"
    wf.connect(AddHorns, "out_file", RmVent, "in_file")
    wf.connect(VentProba, "out_file", RmVent, "in_file2")

    # Image calculator : remove internal capsule
    RmIC = Node(ImageMaths(), name="RemoveInternalCap")
    RmIC.inputs.op_string = "-sub"
    RmIC.inputs.out_file = "rmic.nii.gz"
    wf.connect(RmVent, "out_file", RmIC, "in_file")
    wf.connect(DMRP, "internal_capsule_pv", RmIC, "in_file2")

    # Intensity Range Normalization (3)
    getMaxRmIC = Node(ImageStats(op_string='-r'), name="getMaxRmIC")
    wf.connect(RmIC, 'out_file', getMaxRmIC, 'in_file')

    RmICirn = Node(AbcImageMaths(), name="IntensityNormalization5")
    RmICirn.inputs.op_string = "-div"
    RmICirn.inputs.out_file = "normRmIC.nii.gz"
    wf.connect(RmIC, 'out_file', RmICirn, 'in_file')
    wf.connect(getMaxRmIC, ('out_stat', getElementFromList, 1), RmICirn,
               "op_value")

    # Probability To Levelset : WM orientation
    WM_Orient = Node(ProbabilityToLevelset(), name='WM_Orientation')
    WM_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_Orient.name),
        WM_Orient, 'output_dir')
    wf.connect(RmICirn, 'out_file', WM_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in WM only
    WM_pvs = Node(RecursiveRidgeDiffusion(), name='PVS_in_WM')
    WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_pvs.inputs.ridge_intensities = "bright"
    WM_pvs.inputs.ridge_filter = "1D"
    WM_pvs.inputs.orientation = "orthogonal"
    WM_pvs.inputs.ang_factor = 1.0
    WM_pvs.inputs.min_scale = 0
    WM_pvs.inputs.max_scale = 3
    WM_pvs.inputs.propagation_model = "diffusion"
    WM_pvs.inputs.diffusion_factor = 1.0
    WM_pvs.inputs.similarity_scale = 1.0
    WM_pvs.inputs.neighborhood_size = 2
    WM_pvs.inputs.max_iter = 100
    WM_pvs.inputs.max_diff = 0.001
    WM_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_pvs.name),
        WM_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', WM_pvs, 'input_image')
    wf.connect(WM_Orient, 'levelset', WM_pvs, 'surface_levelset')
    wf.connect(RmICirn, 'out_file', WM_pvs, 'loc_prior')

    # Extract Lesions : extract WM PVS
    extract_WM_pvs = Node(LesionExtraction(), name='ExtractPVSfromWM')
    extract_WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WM_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WM_pvs.inputs.csf_boundary_partial_vol_dist = 3.0
    extract_WM_pvs.inputs.lesion_clust_dist = 1.0
    extract_WM_pvs.inputs.prob_min_thresh = 0.1
    extract_WM_pvs.inputs.prob_max_thresh = 0.33
    extract_WM_pvs.inputs.small_lesion_size = 4.0
    extract_WM_pvs.inputs.save_data = True
    extract_WM_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WM_pvs.name), extract_WM_pvs,
               'output_dir')
    wf.connect(WM_pvs, 'propagation', extract_WM_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WM_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WM_pvs, 'levelset_boundary_image')
    wf.connect(RmICirn, 'out_file', extract_WM_pvs, 'location_prior_image')
    '''
    2nd branch
    '''

    # Image calculator : internal capsule witout ventricules
    ICwoVent = Node(ImageMaths(), name="ICWithoutVentricules")
    ICwoVent.inputs.op_string = "-sub"
    ICwoVent.inputs.out_file = "icwovent.nii.gz"
    wf.connect(DMRP, "internal_capsule_pv", ICwoVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", ICwoVent, "in_file2")

    # Image calculator : remove ventricles IC
    RmVentIC = Node(ImageMaths(), name="RmVentIC")
    RmVentIC.inputs.op_string = "-sub"
    RmVentIC.inputs.out_file = "RmVentIC.nii.gz"
    wf.connect(ICwoVent, "out_file", RmVentIC, "in_file")
    wf.connect(VentProba, "out_file", RmVentIC, "in_file2")

    # Intensity Range Normalization (4)
    getMaxRmVentIC = Node(ImageStats(op_string='-r'), name="getMaxRmVentIC")
    wf.connect(RmVentIC, 'out_file', getMaxRmVentIC, 'in_file')

    RmVentICirn = Node(AbcImageMaths(), name="IntensityNormalization6")
    RmVentICirn.inputs.op_string = "-div"
    RmVentICirn.inputs.out_file = "normRmVentIC.nii.gz"
    wf.connect(RmVentIC, 'out_file', RmVentICirn, 'in_file')
    wf.connect(getMaxRmVentIC, ('out_stat', getElementFromList, 1),
               RmVentICirn, "op_value")

    # Probability To Levelset : IC orientation
    IC_Orient = Node(ProbabilityToLevelset(), name='IC_Orientation')
    IC_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_Orient.name),
        IC_Orient, 'output_dir')
    wf.connect(RmVentICirn, 'out_file', IC_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in IC only
    IC_pvs = Node(RecursiveRidgeDiffusion(), name='RecursiveRidgeDiffusion2')
    IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_pvs.inputs.ridge_intensities = "bright"
    IC_pvs.inputs.ridge_filter = "1D"
    IC_pvs.inputs.orientation = "undefined"
    IC_pvs.inputs.ang_factor = 1.0
    IC_pvs.inputs.min_scale = 0
    IC_pvs.inputs.max_scale = 3
    IC_pvs.inputs.propagation_model = "diffusion"
    IC_pvs.inputs.diffusion_factor = 1.0
    IC_pvs.inputs.similarity_scale = 1.0
    IC_pvs.inputs.neighborhood_size = 2
    IC_pvs.inputs.max_iter = 100
    IC_pvs.inputs.max_diff = 0.001
    IC_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_pvs.name),
        IC_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', IC_pvs, 'input_image')
    wf.connect(IC_Orient, 'levelset', IC_pvs, 'surface_levelset')
    wf.connect(RmVentICirn, 'out_file', IC_pvs, 'loc_prior')

    # Extract Lesions : extract IC PVS
    extract_IC_pvs = Node(LesionExtraction(), name='ExtractPVSfromIC')
    extract_IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_IC_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_IC_pvs.inputs.csf_boundary_partial_vol_dist = 4.0
    extract_IC_pvs.inputs.lesion_clust_dist = 1.0
    extract_IC_pvs.inputs.prob_min_thresh = 0.25
    extract_IC_pvs.inputs.prob_max_thresh = 0.5
    extract_IC_pvs.inputs.small_lesion_size = 4.0
    extract_IC_pvs.inputs.save_data = True
    extract_IC_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_IC_pvs.name), extract_IC_pvs,
               'output_dir')
    wf.connect(IC_pvs, 'propagation', extract_IC_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_IC_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_IC_pvs, 'levelset_boundary_image')
    wf.connect(RmVentICirn, 'out_file', extract_IC_pvs, 'location_prior_image')
    '''
    3rd branch
    '''

    # Image calculator :
    RmInter = Node(ImageMaths(), name="RemoveInterVentricules")
    RmInter.inputs.op_string = "-sub"
    RmInter.inputs.out_file = "rminter.nii.gz"
    wf.connect(ERC2, 'region_pv', RmInter, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInter, "in_file2")

    # Image calculator :
    AddVentHorns = Node(ImageMaths(), name="AddVentHorns")
    AddVentHorns.inputs.op_string = "-add"
    AddVentHorns.inputs.out_file = "rminter.nii.gz"
    wf.connect(RmInter, 'out_file', AddVentHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddVentHorns, "in_file2")

    # Intensity Range Normalization (5)
    getMaxAddVentHorns = Node(ImageStats(op_string='-r'),
                              name="getMaxAddVentHorns")
    wf.connect(AddVentHorns, 'out_file', getMaxAddVentHorns, 'in_file')

    AddVentHornsirn = Node(AbcImageMaths(), name="IntensityNormalization7")
    AddVentHornsirn.inputs.op_string = "-div"
    AddVentHornsirn.inputs.out_file = "normAddVentHorns.nii.gz"
    wf.connect(AddVentHorns, 'out_file', AddVentHornsirn, 'in_file')
    wf.connect(getMaxAddVentHorns, ('out_stat', getElementFromList, 1),
               AddVentHornsirn, "op_value")

    # Extract Lesions : extract White Matter Hyperintensities
    extract_WMH = Node(LesionExtraction(), name='Extract_WMH')
    extract_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_WMH.inputs.lesion_clust_dist = 1.0
    extract_WMH.inputs.prob_min_thresh = 0.84
    extract_WMH.inputs.prob_max_thresh = 0.84
    extract_WMH.inputs.small_lesion_size = 4.0
    extract_WMH.inputs.save_data = True
    extract_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WMH.name), extract_WMH,
               'output_dir')
    wf.connect(ERC2, 'background_proba', extract_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_WMH2 = extract_WMH.clone(name='Extract_WMH2')
    # extract_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH2.name),extract_WMH2,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH2,'location_prior_image')
    #
    # extract_WMH3 = extract_WMH.clone(name='Extract_WMH3')
    # extract_WMH3.inputs.gm_boundary_partial_vol_dist = 3.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH3.name),extract_WMH3,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     FINDING SMALL WMHs     ####
    ####################################
    Small round WMHs near the cortex are often missed by the main algorithm, 
    so we're adding this one that takes care of them.
    '''

    # Recursive Ridge Diffusion : round WMH detection
    round_WMH = Node(RecursiveRidgeDiffusion(), name='round_WMH')
    round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    round_WMH.inputs.ridge_intensities = "bright"
    round_WMH.inputs.ridge_filter = "0D"
    round_WMH.inputs.orientation = "undefined"
    round_WMH.inputs.ang_factor = 1.0
    round_WMH.inputs.min_scale = 1
    round_WMH.inputs.max_scale = 4
    round_WMH.inputs.propagation_model = "none"
    round_WMH.inputs.diffusion_factor = 1.0
    round_WMH.inputs.similarity_scale = 0.1
    round_WMH.inputs.neighborhood_size = 4
    round_WMH.inputs.max_iter = 100
    round_WMH.inputs.max_diff = 0.001
    round_WMH.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, round_WMH.name),
        round_WMH, 'output_dir')
    wf.connect(ERC2, 'background_proba', round_WMH, 'input_image')
    wf.connect(AddVentHornsirn, 'out_file', round_WMH, 'loc_prior')

    # Extract Lesions : extract round WMH
    extract_round_WMH = Node(LesionExtraction(), name='Extract_round_WMH')
    extract_round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_round_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_round_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_round_WMH.inputs.lesion_clust_dist = 1.0
    extract_round_WMH.inputs.prob_min_thresh = 0.33
    extract_round_WMH.inputs.prob_max_thresh = 0.33
    extract_round_WMH.inputs.small_lesion_size = 6.0
    extract_round_WMH.inputs.save_data = True
    extract_round_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_round_WMH.name),
               extract_round_WMH, 'output_dir')
    wf.connect(round_WMH, 'ridge_pv', extract_round_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_round_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_round_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_round_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_round_WMH2 = extract_round_WMH.clone(name='Extract_round_WMH2')
    # extract_round_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH2.name),extract_round_WMH2,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH2,'location_prior_image')
    #
    # extract_round_WMH3 = extract_round_WMH.clone(name='Extract_round_WMH3')
    # extract_round_WMH3.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH3.name),extract_round_WMH3,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     COMBINE BOTH TYPES     ####
    ####################################
    Small round WMHs and regular WMH together before thresholding
    +
    PVS from white matter and internal capsule
    '''

    # Image calculator : WM + IC DVRS
    DVRS = Node(ImageMaths(), name="DVRS")
    DVRS.inputs.op_string = "-max"
    DVRS.inputs.out_file = "DVRS_map.nii.gz"
    wf.connect(extract_WM_pvs, 'lesion_score', DVRS, "in_file")
    wf.connect(extract_IC_pvs, "lesion_score", DVRS, "in_file2")

    # Image calculator : WMH + round
    WMH = Node(ImageMaths(), name="WMH")
    WMH.inputs.op_string = "-max"
    WMH.inputs.out_file = "WMH_map.nii.gz"
    wf.connect(extract_WMH, 'lesion_score', WMH, "in_file")
    wf.connect(extract_round_WMH, "lesion_score", WMH, "in_file2")

    #===========================================================================
    # WMH2 = Node(ImageMaths(), name="WMH2")
    # WMH2.inputs.op_string = "-max"
    # WMH2.inputs.out_file = "WMH2_map.nii.gz"
    # wf.connect(extract_WMH2,'lesion_score',WMH2,"in_file")
    # wf.connect(extract_round_WMH2,"lesion_score", WMH2, "in_file2")
    #
    # WMH3 = Node(ImageMaths(), name="WMH3")
    # WMH3.inputs.op_string = "-max"
    # WMH3.inputs.out_file = "WMH3_map.nii.gz"
    # wf.connect(extract_WMH3,'lesion_score',WMH3,"in_file")
    # wf.connect(extract_round_WMH3,"lesion_score", WMH3, "in_file2")
    #===========================================================================

    # Image calculator : multiply by boundnary partial volume
    WMH_mul = Node(ImageMaths(), name="WMH_mul")
    WMH_mul.inputs.op_string = "-mul"
    WMH_mul.inputs.out_file = "final_mask.nii.gz"
    wf.connect(WMH, "out_file", WMH_mul, "in_file")
    wf.connect(MGDM, "distance", WMH_mul, "in_file2")

    #===========================================================================
    # WMH2_mul = Node(ImageMaths(), name="WMH2_mul")
    # WMH2_mul.inputs.op_string = "-mul"
    # WMH2_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH2,"out_file", WMH2_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH2_mul, "in_file2")
    #
    # WMH3_mul = Node(ImageMaths(), name="WMH3_mul")
    # WMH3_mul.inputs.op_string = "-mul"
    # WMH3_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH3,"out_file", WMH3_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH3_mul, "in_file2")
    #===========================================================================
    '''
    ##########################################
    ####      SEGMENTATION THRESHOLD      ####
    ##########################################
    A threshold of 0.5 is very conservative, because the final lesion score is the product of two probabilities.
    This needs to be optimized to a value between 0.25 and 0.5 to balance false negatives 
    (dominant at 0.5) and false positives (dominant at low values).
    '''

    # Threshold binary mask :
    DVRS_mask = Node(Threshold(), name="DVRS_mask")
    DVRS_mask.inputs.thresh = 0.25
    DVRS_mask.inputs.direction = "below"
    wf.connect(DVRS, "out_file", DVRS_mask, "in_file")

    # Threshold binary mask : 025
    WMH1_025 = Node(Threshold(), name="WMH1_025")
    WMH1_025.inputs.thresh = 0.25
    WMH1_025.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_025, "in_file")

    #===========================================================================
    # WMH2_025 = Node(Threshold(), name="WMH2_025")
    # WMH2_025.inputs.thresh = 0.25
    # WMH2_025.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_025, "in_file")
    #
    # WMH3_025 = Node(Threshold(), name="WMH3_025")
    # WMH3_025.inputs.thresh = 0.25
    # WMH3_025.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_025, "in_file")
    #===========================================================================

    # Threshold binary mask : 050
    WMH1_050 = Node(Threshold(), name="WMH1_050")
    WMH1_050.inputs.thresh = 0.50
    WMH1_050.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_050, "in_file")

    #===========================================================================
    # WMH2_050 = Node(Threshold(), name="WMH2_050")
    # WMH2_050.inputs.thresh = 0.50
    # WMH2_050.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_050, "in_file")
    #
    # WMH3_050 = Node(Threshold(), name="WMH3_050")
    # WMH3_050.inputs.thresh = 0.50
    # WMH3_050.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_050, "in_file")
    #===========================================================================

    # Threshold binary mask : 075
    WMH1_075 = Node(Threshold(), name="WMH1_075")
    WMH1_075.inputs.thresh = 0.75
    WMH1_075.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_075, "in_file")

    #===========================================================================
    # WMH2_075 = Node(Threshold(), name="WMH2_075")
    # WMH2_075.inputs.thresh = 0.75
    # WMH2_075.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_075, "in_file")
    #
    # WMH3_075 = Node(Threshold(), name="WMH3_075")
    # WMH3_075.inputs.thresh = 0.75
    # WMH3_075.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_075, "in_file")
    #===========================================================================

    ## Outputs

    DVRS_Output = Node(IdentityInterface(fields=[
        'mask', 'region', 'lesion_size', 'lesion_proba', 'boundary', 'label',
        'score'
    ]),
                       name='DVRS_Output')
    wf.connect(DVRS_mask, 'out_file', DVRS_Output, 'mask')

    WMH_output = Node(IdentityInterface(fields=[
        'mask1025', 'mask1050', 'mask1075', 'mask2025', 'mask2050', 'mask2075',
        'mask3025', 'mask3050', 'mask3075'
    ]),
                      name='WMH_output')
    wf.connect(WMH1_025, 'out_file', WMH_output, 'mask1025')
    #wf.connect(WMH2_025,'out_file',WMH_output,'mask2025')
    #wf.connect(WMH3_025,'out_file',WMH_output,'mask3025')
    wf.connect(WMH1_050, 'out_file', WMH_output, 'mask1050')
    #wf.connect(WMH2_050,'out_file',WMH_output,'mask2050')
    #wf.connect(WMH3_050,'out_file',WMH_output,'mask3050')
    wf.connect(WMH1_075, 'out_file', WMH_output, 'mask1075')
    #wf.connect(WMH2_075,'out_file',WMH_output,'mask2070')
    #wf.connect(WMH3_075,'out_file',WMH_output,'mask3075')

    return wf
Exemplo n.º 12
0
# make all the segments first
for i in sublist:
    for j in seglist: 

      #get segment (note out_file contains L because this example is left striatum only)
      ImageMaths(in_file=('ITC%sbig.nii.gz' % i), out_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), op_string=('-thr %(1)s -uthr %(2)s' % {"1" : j, "2" : j})).run()
    
      # binarize
      ImageMaths(in_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), out_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), op_string=("-bin")).run()

# now calc average DICE per segement per Subject
for i in sublist:
    for j in seglist: 
      # get volume A
      stats = ImageStats(in_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}), op_string='-V').run()
      volume_A = stats.outputs.out_stat[1]
      
      # get for intersection for one area each other subject
      # first make sublist of participants not including self
      sect_list = filter (lambda a: a != i, sublist)
      Dice_list= [0]*(len(sect_list))
      for k in sect_list:
        #mulitply to get inter section
        ImageMaths(in_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : i}),in_file2=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : k}), out_file=('intersect_%(1)s_L_%(2)s_%(3)s.nii.gz' % {"1" : j, "2" : i, "3" : k}), op_string=("-mul")).run()
        #get Volume B
        stats2 = ImageStats(in_file=('%(1)s_L_%(2)s_seg.nii.gz' % {"1" : j, "2" : k}), op_string='-V').run()
        volume_B = stats2.outputs.out_stat[1]
        # get volume Intersect
        stats3 = ImageStats(in_file=('intersect_%(1)s_L_%(2)s_%(3)s.nii.gz' % {"1" : j, "2" : i, "3" : k}), op_string='-V').run()
        volume_I = stats3.outputs.out_stat[1]
def create_workflow(subject_id, outdir, file_url):
    """Create a workflow for a single participant"""

    sink_directory = os.path.join(outdir, subject_id)

    wf = Workflow(name=subject_id)

    getter = Node(Function(input_names=['url'],
                           output_names=['localfile'],
                           function=download_file),
                  name="download_url")
    getter.inputs.url = file_url

    orienter = Node(Reorient2Std(), name='reorient_brain')
    wf.connect(getter, 'localfile', orienter, 'in_file')

    better = Node(BET(), name='extract_brain')
    wf.connect(orienter, 'out_file', better, 'in_file')

    faster = Node(FAST(), name='segment_brain')
    wf.connect(better, 'out_file', faster, 'in_files')

    firster = Node(FIRST(), name='parcellate_brain')
    structures = [
        'L_Hipp', 'R_Hipp', 'L_Accu', 'R_Accu', 'L_Amyg', 'R_Amyg', 'L_Caud',
        'R_Caud', 'L_Pall', 'R_Pall', 'L_Puta', 'R_Puta', 'L_Thal', 'R_Thal'
    ]
    firster.inputs.list_of_specific_structures = structures
    wf.connect(orienter, 'out_file', firster, 'in_file')

    fslstatser = MapNode(ImageStats(),
                         iterfield=['op_string'],
                         name="compute_segment_stats")
    fslstatser.inputs.op_string = [
        '-l {thr1} -u {thr2} -v'.format(thr1=val + 0.5, thr2=val + 1.5)
        for val in range(3)
    ]
    wf.connect(faster, 'partial_volume_map', fslstatser, 'in_file')

    jsonfiler = Node(Function(
        input_names=['stats', 'seg_file', 'structure_map', 'struct_file'],
        output_names=['out_file'],
        function=toJSON),
                     name='save_json')
    structure_map = [('Background', 0), ('Left-Thalamus-Proper', 10),
                     ('Left-Caudate', 11), ('Left-Putamen', 12),
                     ('Left-Pallidum', 13), ('Left-Hippocampus', 17),
                     ('Left-Amygdala', 18), ('Left-Accumbens-area', 26),
                     ('Right-Thalamus-Proper', 49), ('Right-Caudate', 50),
                     ('Right-Putamen', 51), ('Right-Pallidum', 52),
                     ('Right-Hippocampus', 53), ('Right-Amygdala', 54),
                     ('Right-Accumbens-area', 58)]
    jsonfiler.inputs.structure_map = structure_map
    wf.connect(fslstatser, 'out_stat', jsonfiler, 'stats')
    wf.connect(firster, 'segmentation_file', jsonfiler, 'seg_file')

    sinker = Node(DataSink(), name='store_results')
    sinker.inputs.base_directory = sink_directory
    wf.connect(better, 'out_file', sinker, 'brain')
    wf.connect(faster, 'bias_field', sinker, 'segs.@bias_field')
    wf.connect(faster, 'partial_volume_files', sinker, 'segs.@partial_files')
    wf.connect(faster, 'partial_volume_map', sinker, 'segs.@partial_map')
    wf.connect(faster, 'probability_maps', sinker, 'segs.@prob_maps')
    wf.connect(faster, 'restored_image', sinker, 'segs.@restored')
    wf.connect(faster, 'tissue_class_files', sinker, 'segs.@tissue_files')
    wf.connect(faster, 'tissue_class_map', sinker, 'segs.@tissue_map')
    wf.connect(firster, 'bvars', sinker, 'parcels.@bvars')
    wf.connect(firster, 'original_segmentations', sinker, 'parcels.@origsegs')
    wf.connect(firster, 'segmentation_file', sinker, 'parcels.@segfile')
    wf.connect(firster, 'vtk_surfaces', sinker, 'parcels.@vtk')
    wf.connect(jsonfiler, 'out_file', sinker, '@stats')

    return wf
Exemplo n.º 14
0
def image_stats_wf(stat_keys, labels, name):
    """Create a workflow to calculate image statistics using fslstats

    :param stat_keys: list of keys indicating which statistics to calculate.
                      Can be any of the keys of :py:const:`pndniworkflows.postprocessing.STATS`.
    :param labels: :py:class:`list` of :py:class:`OrderedDict`, one for each label. Each
                   :py:class:`OrderedDict` must have an "index" field.
                   (e.g. ``[OrderedDict(index=1, name='Brain'), OrderedDict(index=2, name='WM')]``)
    :param name: The name of the workflow

    :return: A :py:mod:`nipype` workflow

    Workflow inputs/outputs

    :param inputspec.in_file: file on which to compute statistics
    :param inputspec.index_mask_file: label file indicating the ROIs of
                                      in_file in which to compute
                                      statistics

    :param outputspec.out_file: output tsv file

    """
    wf = pe.Workflow(name)
    stats = [STATS[key] for key in stat_keys]
    fsl_op_string = ' '.join((stat.flag for stat in stats if stat.fsl))
    stats_op_string = ' '.join((stat.flag for stat in stats if not stat.fsl))
    inputspec = pe.Node(IdentityInterface(['in_file', 'index_mask_file']),
                        'inputspec')
    if fsl_op_string:
        fslimagestats = pe.Node(ImageStats(op_string=fsl_op_string),
                                'fslimagestats')
    if stats_op_string:
        statsimagestats = pe.Node(Stats(op_string=stats_op_string),
                                  'statsimagestats')
    write = pe.Node(WriteFSLStats(), 'write')
    fsl_header = []
    stats_header = []
    for stat in stats:
        if stat.fsl:
            fsl_header += stat.names
        else:
            stats_header += stat.names
    header = fsl_header + stats_header
    write.inputs.statnames = header
    write.inputs.labels = utils.labels2dict(labels, 'name')
    outputspec = pe.Node(IdentityInterface(['out_file']), 'outputspec')
    if fsl_op_string:
        wf.connect([(inputspec, fslimagestats, [('in_file', 'in_file'),
                                                ('index_mask_file',
                                                 'index_mask_file')])])
    if stats_op_string:
        wf.connect([(inputspec, statsimagestats, [('in_file', 'in_file'),
                                                  ('index_mask_file',
                                                   'index_mask_file')])])
    if fsl_op_string and stats_op_string:
        zipper = pe.Node(
            Zipper(chunksize1=len(fsl_header), chunksize2=len(stats_header)),
            'zipper')
        wf.connect(fslimagestats, 'out_stat', zipper, 'list1')
        wf.connect(statsimagestats, 'out_stat', zipper, 'list2')
        wf.connect(zipper, 'out_list', write, 'data')
    elif fsl_op_string:
        wf.connect(fslimagestats, 'out_stat', write, 'data')
    elif stats_op_string:
        wf.connect(statsimagestats, 'out_stat', write, 'data')
    wf.connect(write, 'out_tsv', outputspec, 'out_file')
    return wf
Exemplo n.º 15
0
def create_fatsegnet_workflow(
    subject_list,
    bids_dir,
    work_dir,
    out_dir,
    bids_templates,
    n4=False
):
    # create initial workflow
    wf = Workflow(name='workflow_fatsegnet', base_dir=work_dir)

    # use infosource to iterate workflow across subject list
    n_infosource = Node(
        interface=IdentityInterface(
            fields=['subject_id']
        ),
        name="subject_source"
        # input: 'subject_id'
        # output: 'subject_id'
    )
    # runs the node with subject_id = each element in subject_list
    n_infosource.iterables = ('subject_id', subject_list)

    # select matching files from bids_dir
    n_selectfiles = Node(
        interface=SelectFiles(
            templates=bids_templates,
            base_directory=bids_dir
        ),
        name='get_subject_data'
        # output: ['fat_composed', 'water_composed']
    )
    wf.connect([
        (n_infosource, n_selectfiles, [('subject_id', 'subject_id_p')])
    ])

    if n4:
        mn_n4_fat = Node(
            interface=N4BiasFieldCorrection(),
            iterfield=['input_image'],
            name='N4_fat',
            # output: 'output_image'
        )
        wf.connect([
            (n_selectfiles, mn_n4_fat, [('fat_composed', 'input_image')]),
        ])

        # https://nipype.readthedocs.io/en/latest/api/generated/nipype.interfaces.ants.html
        mn_n4_water = Node(
            interface=N4BiasFieldCorrection(),
            iterfield=['input_image'],
            name='N4_water',
            # output: 'output_image'
        )
        wf.connect([
            (n_selectfiles, mn_n4_water, [('water_composed', 'input_image')]),
        ])

    # scale data
    # or better: https://intensity-normalization.readthedocs.io/en/latest/utilities.html
    def scale(min_and_max):
        min_value = min_and_max[0][0]
        max_value = min_and_max[0][1]
        fsl_cmd = ""

        # set range to [0, 2pi]
        fsl_cmd += "-mul %.10f " % (4)

        return fsl_cmd

    mn_fat_stats = MapNode(
        # -R : <min intensity> <max intensity>
        interface=ImageStats(op_string='-R'),
        iterfield=['in_file'],
        name='get_stats_fat',
        # output: 'out_stat'
    )
    mn_water_stats = MapNode(
        # -R : <min intensity> <max intensity>
        interface=ImageStats(op_string='-R'),
        iterfield=['in_file'],
        name='get_stats_water',
        # output: 'out_stat'
    )

    if n4:
        wf.connect([
            (mn_n4_fat, mn_fat_stats, [('output_image', 'in_file')]),
            (mn_n4_water, mn_water_stats, [('output_image', 'in_file')])
        ])
    else:
        wf.connect([
            (n_selectfiles, mn_fat_stats, [('fat_composed', 'in_file')]),
            (n_selectfiles, mn_water_stats, [('water_composed', 'in_file')])
        ])
    
    mn_fat_scaled = Node(
        interface=ImageMaths(suffix="_scaled"),
        name='fat_scaled',
        iterfield=['in_file']
        # inputs: 'in_file', 'op_string'
        # output: 'out_file'
    )
    mn_water_scaled = Node(
        interface=ImageMaths(suffix="_scaled"),
        name='water_scaled',
        iterfield=['in_file']
        # inputs: 'in_file', 'op_string'
        # output: 'out_file'
    )
    if n4:
        wf.connect([
            (mn_n4_fat, mn_fat_scaled, [('output_image', 'in_file')]),
            (mn_n4_water, mn_water_scaled, [('output_image', 'in_file')]),
            (mn_fat_stats, mn_fat_scaled, [(('out_stat', scale), 'op_string')]),
            (mn_water_stats, mn_water_scaled, [(('out_stat', scale), 'op_string')])
        ])
    else:
        wf.connect([
            (n_selectfiles, mn_fat_scaled, [('fat_composed', 'in_file')]),
            (n_selectfiles, mn_water_scaled, [('water_composed', 'in_file')]),
            (mn_fat_stats, mn_fat_scaled, [(('out_stat', scale), 'op_string')]),
            (mn_water_stats, mn_water_scaled, [(('out_stat', scale), 'op_string')])
        ])

    # fatsegnet could work here when running on cluster, but in multiproc memory issues on GPU
    # mn_fatsegnet = MapNode(
    #         interface=fatsegnet.FatSegNetInterface(
    #             out_suffix='/afm02/Q2/Q2653/data/2021-01-18-fatsegnet-output/out_5'
    #         ),
    #         iterfield=['water_file', 'fat_file'],
    #         name='fatsegnet'
    #         # output: 'out_file'
    #     )

    # wf.connect([
    #         (mn_fat_scaled, mn_fatsegnet, [('out_file', 'fat_file')]),
    #         (mn_water_scaled, mn_fatsegnet, [('out_file', 'water_file')]),
    #     ])


    # datasink
    n_datasink_fat = Node(
        interface=DataSink(base_directory=bids_dir, 
            container=out_dir,
            parameterization=True, 
            substitutions=[('_subject_id_', '')],
            regexp_substitutions=[('sub-\w{5}_t1.*', 'FatImaging_F.nii.gz')]),
        name='datasink_fat'
    )    
    n_datasink_water = Node(
        interface=DataSink(base_directory=bids_dir, 
            container=out_dir,
            parameterization=True, 
            substitutions=[('_subject_id_', '')],
            regexp_substitutions=[('sub-\w{5}_t1.*', 'FatImaging_W.nii.gz')]),
        name='datasink_water'
    )
    # https://pythex.org/: search for sub-, then 5 numbers then _t1 and grab the rest

    wf.connect([
            (mn_fat_scaled, n_datasink_fat, [('out_file', 'preprocessed_mul4.@fat')]),
            (mn_water_scaled, n_datasink_water, [('out_file', 'preprocessed_mul4.@water')]),
        ])
    # https://nipype.readthedocs.io/en/0.11.0/users/grabbing_and_sinking.html
    # https://miykael.github.io/nipype_tutorial/notebooks/example_1stlevel.html
    # The period (.) indicates that a subfolder should be created. 
    # But if we wanted to store it in the same folder, 
    # we would use the .@ syntax. The @ tells the DataSink interface to not create the subfolder. 

    return wf
selectfiles = Node(SelectFiles(templates, base_directory=experiment_dir),
                   name='selectfiles')

wf.connect([(infosource, selectfiles, [('subject_id', 'subject_id')])])
# </editor-fold>

# <editor-fold desc="Brain Extraction">
bet_n = MapNode(BET(frac=0.4, mask=True, robust=True),
                name='bet_node',
                iterfield=['in_file'])

wf.connect([(selectfiles, bet_n, [('mag', 'in_file')])])
# </editor-fold>

# <editor-fold desc="Scale phase data">
stats = MapNode(ImageStats(op_string='-R'),
                name='stats_node',
                iterfield=['in_file'])


def scale_to_pi(min_and_max):
    data_min = min_and_max[0][0]
    data_max = min_and_max[0][1]
    # TODO: Test at 3T with -4096 to + 4096 range
    return '-add %.10f -div %.10f -mul 6.28318530718 -sub 3.14159265359' % (
        data_min, data_max + data_min)


phs_range_n = MapNode(ImageMaths(),
                      name='phs_range_node',
                      iterfield=['in_file'])
Exemplo n.º 17
0
fill_tissue_2.inputs.operation = "fillh"
fill_tissue_3 = Node(UnaryMaths(), name="Fill_Tissue3_Mask")
fill_tissue_3.inputs.operation = "fillh"

fill_tissue_head = Node(UnaryMaths(), name="Fill_HeadMask")
fill_tissue_head.inputs.operation = "fillh"

cos_tissue_head = Node(UnaryMaths(), name="Cos_HeadMask")
cos_tissue_head.inputs.operation = "cos"

thr_tissue_head = Node(Threshold(), name="Reversed_HeadMask")
thr_tissue_head.inputs.thresh = 0.8  #Must be above cos(1)=~0.54 to properly reverse
thr_tissue_head.inputs.args = "-bin"

#ImageStats - Mean - Std - Length on GM,WM,CSF,BG
mean_image_t1_gm = Node(ImageStats(op_string="-k %s -M "), name="Mean_T1_GM")
mean_image_t1_wm = Node(ImageStats(op_string="-k %s -M "), name="Mean_T1_WM")
mean_image_t1_csf = Node(ImageStats(op_string="-k %s -M "), name="Mean_T1_CSF")
mean_image_t1_bg = Node(ImageStats(op_string="-k %s -M "), name="Mean_T1_BG")

std_image_t1_gm = Node(ImageStats(op_string="-k %s -S "), name="STD_T1_GM")
std_image_t1_wm = Node(ImageStats(op_string="-k %s -S "), name="STD_T1_WM")
std_image_t1_csf = Node(ImageStats(op_string="-k %s -S "), name="STD_T1_CSF")
std_image_t1_bg = Node(ImageStats(op_string="-k %s -S "), name="STD_T1_BG")

size_image_t1_gm = Node(ImageStats(op_string="-k %s -V "), name="Size_T1_GM")
size_image_t1_wm = Node(ImageStats(op_string="-k %s -V "), name="Size_T1_WM")
size_image_t1_csf = Node(ImageStats(op_string="-k %s -V "), name="Size_T1_CSF")
size_image_t1_bg = Node(ImageStats(op_string="-k %s -V "), name="Size_T1_BG")

                   name='applyVolReg')

# Despike node - despike data
despike = Node(Despike(outputtype='NIFTI'),
               name='despike')

# TSNR node - remove polynomials 2nd order
tsnr = Node(TSNR(regress_poly=2),
            name='tsnr')

# Demean node - demean data
demean = Node(BinaryMaths(operation='sub'),
              name='demean')

# Standard deviation node - calculate standard deviation
standev = Node(ImageStats(op_string='-S'), name='standev')

# Z-standardize node - z-standardize data
zstandardize = Node(BinaryMaths(operation = 'div'), name='zstandardize')

# Create the MVPA preprocessing workflow
preproc_ALPACA_MVPA = Workflow(name='preproc_ALPACA_MVPA')
preproc_ALPACA_MVPA.base_dir = opj(experiment_dir, working_dir)

# Connect the MVPA preprocessing nodes to a pipeline
preproc_ALPACA_MVPA.connect([(despike, applyVolReg,[('out_file', 'source_file')]),
                 (applyVolReg, tsnr, [('transformed_file', 'in_file')]),
                 (tsnr, demean, [('detrended_file', 'in_file')]),
                 (tsnr, demean, [('mean_file', 'operand_file')]),
                 (demean, standev, [('out_file', 'in_file')]),
                 (demean, zstandardize, [('out_file', 'in_file')]),