示例#1
0
def test_FLIRT_outputs():
    output_map = dict(out_file=dict(),
    out_log=dict(),
    out_matrix_file=dict(),
    )
    outputs = FLIRT.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
示例#2
0
def test_FLIRT_outputs():
    output_map = dict(
        out_file=dict(),
        out_log=dict(),
        out_matrix_file=dict(),
    )
    outputs = FLIRT.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
示例#3
0
def Lesion_extractor(
    name='Lesion_Extractor',
    wf_name='Test',
    base_dir='/homes_unix/alaurent/',
    input_dir=None,
    subjects=None,
    main=None,
    acc=None,
    atlas='/homes_unix/alaurent/cbstools-public-master/atlases/brain-segmentation-prior3.0/brain-atlas-quant-3.0.8.txt'
):

    wf = Workflow(wf_name)
    wf.base_dir = base_dir

    #file = open(subjects,"r")
    #subjects = file.read().split("\n")
    #file.close()

    # Subject List
    subjectList = Node(IdentityInterface(fields=['subject_id'],
                                         mandatory_inputs=True),
                       name="subList")
    subjectList.iterables = ('subject_id', [
        sub for sub in subjects if sub != '' and sub != '\n'
    ])

    # T1w and FLAIR
    scanList = Node(DataGrabber(infields=['subject_id'],
                                outfields=['T1', 'FLAIR']),
                    name="scanList")
    scanList.inputs.base_directory = input_dir
    scanList.inputs.ignore_exception = False
    scanList.inputs.raise_on_empty = True
    scanList.inputs.sort_filelist = True
    #scanList.inputs.template = '%s/%s.nii'
    #scanList.inputs.template_args = {'T1': [['subject_id','T1*']],
    #                                 'FLAIR': [['subject_id','FLAIR*']]}
    scanList.inputs.template = '%s/anat/%s'
    scanList.inputs.template_args = {
        'T1': [['subject_id', '*_T1w.nii.gz']],
        'FLAIR': [['subject_id', '*_FLAIR.nii.gz']]
    }
    wf.connect(subjectList, "subject_id", scanList, "subject_id")

    #     # T1w and FLAIR
    #     dg = Node(DataGrabber(outfields=['T1', 'FLAIR']), name="T1wFLAIR")
    #     dg.inputs.base_directory = "/homes_unix/alaurent/LesionPipeline"
    #     dg.inputs.template = "%s/NIFTI/*.nii.gz"
    #     dg.inputs.template_args['T1']=[['7']]
    #     dg.inputs.template_args['FLAIR']=[['9']]
    #     dg.inputs.sort_filelist=True

    # Reorient Volume
    T1Conv = Node(Reorient2Std(), name="ReorientVolume")
    T1Conv.inputs.ignore_exception = False
    T1Conv.inputs.terminal_output = 'none'
    T1Conv.inputs.out_file = "T1_reoriented.nii.gz"
    wf.connect(scanList, "T1", T1Conv, "in_file")

    # Reorient Volume (2)
    T2flairConv = Node(Reorient2Std(), name="ReorientVolume2")
    T2flairConv.inputs.ignore_exception = False
    T2flairConv.inputs.terminal_output = 'none'
    T2flairConv.inputs.out_file = "FLAIR_reoriented.nii.gz"
    wf.connect(scanList, "FLAIR", T2flairConv, "in_file")

    # N3 Correction
    T1NUC = Node(N4BiasFieldCorrection(), name="N3Correction")
    T1NUC.inputs.dimension = 3
    T1NUC.inputs.environ = {'NSLOTS': '1'}
    T1NUC.inputs.ignore_exception = False
    T1NUC.inputs.num_threads = 1
    T1NUC.inputs.save_bias = False
    T1NUC.inputs.terminal_output = 'none'
    wf.connect(T1Conv, "out_file", T1NUC, "input_image")

    # N3 Correction (2)
    T2flairNUC = Node(N4BiasFieldCorrection(), name="N3Correction2")
    T2flairNUC.inputs.dimension = 3
    T2flairNUC.inputs.environ = {'NSLOTS': '1'}
    T2flairNUC.inputs.ignore_exception = False
    T2flairNUC.inputs.num_threads = 1
    T2flairNUC.inputs.save_bias = False
    T2flairNUC.inputs.terminal_output = 'none'
    wf.connect(T2flairConv, "out_file", T2flairNUC, "input_image")
    '''
    #####################
    ### PRE-NORMALIZE ###
    #####################
    To make sure there's no outlier values (negative, or really high) to offset the initialization steps
    '''

    # Intensity Range Normalization
    getMaxT1NUC = Node(ImageStats(op_string='-r'), name="getMaxT1NUC")
    wf.connect(T1NUC, 'output_image', getMaxT1NUC, 'in_file')

    T1NUCirn = Node(AbcImageMaths(), name="IntensityNormalization")
    T1NUCirn.inputs.op_string = "-div"
    T1NUCirn.inputs.out_file = "normT1.nii.gz"
    wf.connect(T1NUC, 'output_image', T1NUCirn, 'in_file')
    wf.connect(getMaxT1NUC, ('out_stat', getElementFromList, 1), T1NUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2NUC = Node(ImageStats(op_string='-r'), name="getMaxT2")
    wf.connect(T2flairNUC, 'output_image', getMaxT2NUC, 'in_file')

    T2NUCirn = Node(AbcImageMaths(), name="IntensityNormalization2")
    T2NUCirn.inputs.op_string = "-div"
    T2NUCirn.inputs.out_file = "normT2.nii.gz"
    wf.connect(T2flairNUC, 'output_image', T2NUCirn, 'in_file')
    wf.connect(getMaxT2NUC, ('out_stat', getElementFromList, 1), T2NUCirn,
               "op_value")
    '''
    ########################
    #### COREGISTRATION ####
    ########################
    '''

    # Optimized Automated Registration
    T2flairCoreg = Node(FLIRT(), name="OptimizedAutomatedRegistration")
    T2flairCoreg.inputs.output_type = 'NIFTI_GZ'
    wf.connect(T2NUCirn, "out_file", T2flairCoreg, "in_file")
    wf.connect(T1NUCirn, "out_file", T2flairCoreg, "reference")
    '''    
    #########################
    #### SKULL-STRIPPING ####
    #########################
    '''

    # SPECTRE
    T1ss = Node(BET(), name="SPECTRE")
    T1ss.inputs.frac = 0.45  #0.4
    T1ss.inputs.mask = True
    T1ss.inputs.outline = True
    T1ss.inputs.robust = True
    wf.connect(T1NUCirn, "out_file", T1ss, "in_file")

    # Image Calculator
    T2ss = Node(ApplyMask(), name="ImageCalculator")
    wf.connect(T1ss, "mask_file", T2ss, "mask_file")
    wf.connect(T2flairCoreg, "out_file", T2ss, "in_file")
    '''
    ####################################
    #### 2nd LAYER OF N3 CORRECTION ####
    ####################################
    This time without the skull: there were some significant amounts of inhomogeneities leftover.
    '''

    # N3 Correction (3)
    T1ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction3")
    T1ssNUC.inputs.dimension = 3
    T1ssNUC.inputs.environ = {'NSLOTS': '1'}
    T1ssNUC.inputs.ignore_exception = False
    T1ssNUC.inputs.num_threads = 1
    T1ssNUC.inputs.save_bias = False
    T1ssNUC.inputs.terminal_output = 'none'
    wf.connect(T1ss, "out_file", T1ssNUC, "input_image")

    # N3 Correction (4)
    T2ssNUC = Node(N4BiasFieldCorrection(), name="N3Correction4")
    T2ssNUC.inputs.dimension = 3
    T2ssNUC.inputs.environ = {'NSLOTS': '1'}
    T2ssNUC.inputs.ignore_exception = False
    T2ssNUC.inputs.num_threads = 1
    T2ssNUC.inputs.save_bias = False
    T2ssNUC.inputs.terminal_output = 'none'
    wf.connect(T2ss, "out_file", T2ssNUC, "input_image")
    '''
    ####################################
    ####    NORMALIZE FOR MGDM      ####
    ####################################
    This normalization is a bit aggressive: only useful to have a 
    cropped dynamic range into MGDM, but possibly harmful to further 
    processing, so the unprocessed images are passed to the subsequent steps.
    '''

    # Intensity Range Normalization
    getMaxT1ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT1ssNUC")
    wf.connect(T1ssNUC, 'output_image', getMaxT1ssNUC, 'in_file')

    T1ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization3")
    T1ssNUCirn.inputs.op_string = "-div"
    T1ssNUCirn.inputs.out_file = "normT1ss.nii.gz"
    wf.connect(T1ssNUC, 'output_image', T1ssNUCirn, 'in_file')
    wf.connect(getMaxT1ssNUC, ('out_stat', getElementFromList, 1), T1ssNUCirn,
               "op_value")

    # Intensity Range Normalization (2)
    getMaxT2ssNUC = Node(ImageStats(op_string='-r'), name="getMaxT2ssNUC")
    wf.connect(T2ssNUC, 'output_image', getMaxT2ssNUC, 'in_file')

    T2ssNUCirn = Node(AbcImageMaths(), name="IntensityNormalization4")
    T2ssNUCirn.inputs.op_string = "-div"
    T2ssNUCirn.inputs.out_file = "normT2ss.nii.gz"
    wf.connect(T2ssNUC, 'output_image', T2ssNUCirn, 'in_file')
    wf.connect(getMaxT2ssNUC, ('out_stat', getElementFromList, 1), T2ssNUCirn,
               "op_value")
    '''
    ####################################
    ####      ESTIMATE CSF PV       ####
    ####################################
    Here we try to get a better handle on CSF voxels to help the segmentation step
    '''

    # Recursive Ridge Diffusion
    CSF_pv = Node(RecursiveRidgeDiffusion(), name='estimate_CSF_pv')
    CSF_pv.plugin_args = {'sbatch_args': '--mem 6000'}
    CSF_pv.inputs.ridge_intensities = "dark"
    CSF_pv.inputs.ridge_filter = "2D"
    CSF_pv.inputs.orientation = "undefined"
    CSF_pv.inputs.ang_factor = 1.0
    CSF_pv.inputs.min_scale = 0
    CSF_pv.inputs.max_scale = 3
    CSF_pv.inputs.propagation_model = "diffusion"
    CSF_pv.inputs.diffusion_factor = 0.5
    CSF_pv.inputs.similarity_scale = 0.1
    CSF_pv.inputs.neighborhood_size = 4
    CSF_pv.inputs.max_iter = 100
    CSF_pv.inputs.max_diff = 0.001
    CSF_pv.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, CSF_pv.name),
        CSF_pv, 'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', CSF_pv, 'input_image')
    '''
    ####################################
    ####            MGDM            ####
    ####################################
    '''

    # Multi-contrast Brain Segmentation
    MGDM = Node(MGDMSegmentation(), name='MGDM')
    MGDM.plugin_args = {'sbatch_args': '--mem 7000'}
    MGDM.inputs.contrast_type1 = "Mprage3T"
    MGDM.inputs.contrast_type2 = "FLAIR3T"
    MGDM.inputs.contrast_type3 = "PVDURA"
    MGDM.inputs.save_data = True
    MGDM.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, MGDM.name), MGDM,
        'output_dir')
    wf.connect(T1ssNUCirn, 'out_file', MGDM, 'contrast_image1')
    wf.connect(T2ssNUCirn, 'out_file', MGDM, 'contrast_image2')
    wf.connect(CSF_pv, 'ridge_pv', MGDM, 'contrast_image3')

    # Enhance Region Contrast
    ERC = Node(EnhanceRegionContrast(), name='ERC')
    ERC.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC.inputs.enhanced_region = "crwm"
    ERC.inputs.contrast_background = "crgm"
    ERC.inputs.partial_voluming_distance = 2.0
    ERC.inputs.save_data = True
    ERC.inputs.atlas_file = atlas
    wf.connect(subjectList,
               ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC.name),
               ERC, 'output_dir')
    wf.connect(T1ssNUC, 'output_image', ERC, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC, 'levelset_boundary_image')

    # Enhance Region Contrast (2)
    ERC2 = Node(EnhanceRegionContrast(), name='ERC2')
    ERC2.plugin_args = {'sbatch_args': '--mem 7000'}
    ERC2.inputs.enhanced_region = "crwm"
    ERC2.inputs.contrast_background = "crgm"
    ERC2.inputs.partial_voluming_distance = 2.0
    ERC2.inputs.save_data = True
    ERC2.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, ERC2.name), ERC2,
        'output_dir')
    wf.connect(T2ssNUC, 'output_image', ERC2, 'intensity_image')
    wf.connect(MGDM, 'segmentation', ERC2, 'segmentation_image')
    wf.connect(MGDM, 'distance', ERC2, 'levelset_boundary_image')

    # Define Multi-Region Priors
    DMRP = Node(DefineMultiRegionPriors(), name='DefineMultRegPriors')
    DMRP.plugin_args = {'sbatch_args': '--mem 6000'}
    #DMRP.inputs.defined_region = "ventricle-horns"
    #DMRP.inputs.definition_method = "closest-distance"
    DMRP.inputs.distance_offset = 3.0
    DMRP.inputs.save_data = True
    DMRP.inputs.atlas_file = atlas
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, DMRP.name), DMRP,
        'output_dir')
    wf.connect(MGDM, 'segmentation', DMRP, 'segmentation_image')
    wf.connect(MGDM, 'distance', DMRP, 'levelset_boundary_image')
    '''
    ###############################################
    ####      REMOVE VENTRICLE POSTERIOR       ####
    ###############################################
    Due to topology constraints, the ventricles are often not fully segmented:
    here add back all ventricle voxels from the posterior probability (without the topology constraints)
    '''

    # Posterior label
    PostLabel = Node(Split(), name='PosteriorLabel')
    PostLabel.inputs.dimension = "t"
    wf.connect(MGDM, 'labels', PostLabel, 'in_file')

    # Posterior proba
    PostProba = Node(Split(), name='PosteriorProba')
    PostProba.inputs.dimension = "t"
    wf.connect(MGDM, 'memberships', PostProba, 'in_file')

    # Threshold binary mask : ventricle label part 1
    VentLabel1 = Node(Threshold(), name="VentricleLabel1")
    VentLabel1.inputs.thresh = 10.5
    VentLabel1.inputs.direction = "below"
    wf.connect(PostLabel, ("out_files", getFirstElement), VentLabel1,
               "in_file")

    # Threshold binary mask : ventricle label part 2
    VentLabel2 = Node(Threshold(), name="VentricleLabel2")
    VentLabel2.inputs.thresh = 13.5
    VentLabel2.inputs.direction = "above"
    wf.connect(VentLabel1, "out_file", VentLabel2, "in_file")

    # Image calculator : ventricle proba
    VentProba = Node(ImageMaths(), name="VentricleProba")
    VentProba.inputs.op_string = "-mul"
    VentProba.inputs.out_file = "ventproba.nii.gz"
    wf.connect(PostProba, ("out_files", getFirstElement), VentProba, "in_file")
    wf.connect(VentLabel2, "out_file", VentProba, "in_file2")

    # Image calculator : remove inter ventricles
    RmInterVent = Node(ImageMaths(), name="RemoveInterVent")
    RmInterVent.inputs.op_string = "-sub"
    RmInterVent.inputs.out_file = "rmintervent.nii.gz"
    wf.connect(ERC, "region_pv", RmInterVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInterVent, "in_file2")

    # Image calculator : add horns
    AddHorns = Node(ImageMaths(), name="AddHorns")
    AddHorns.inputs.op_string = "-add"
    AddHorns.inputs.out_file = "rmvent.nii.gz"
    wf.connect(RmInterVent, "out_file", AddHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddHorns, "in_file2")

    # Image calculator : remove ventricles
    RmVent = Node(ImageMaths(), name="RemoveVentricles")
    RmVent.inputs.op_string = "-sub"
    RmVent.inputs.out_file = "rmvent.nii.gz"
    wf.connect(AddHorns, "out_file", RmVent, "in_file")
    wf.connect(VentProba, "out_file", RmVent, "in_file2")

    # Image calculator : remove internal capsule
    RmIC = Node(ImageMaths(), name="RemoveInternalCap")
    RmIC.inputs.op_string = "-sub"
    RmIC.inputs.out_file = "rmic.nii.gz"
    wf.connect(RmVent, "out_file", RmIC, "in_file")
    wf.connect(DMRP, "internal_capsule_pv", RmIC, "in_file2")

    # Intensity Range Normalization (3)
    getMaxRmIC = Node(ImageStats(op_string='-r'), name="getMaxRmIC")
    wf.connect(RmIC, 'out_file', getMaxRmIC, 'in_file')

    RmICirn = Node(AbcImageMaths(), name="IntensityNormalization5")
    RmICirn.inputs.op_string = "-div"
    RmICirn.inputs.out_file = "normRmIC.nii.gz"
    wf.connect(RmIC, 'out_file', RmICirn, 'in_file')
    wf.connect(getMaxRmIC, ('out_stat', getElementFromList, 1), RmICirn,
               "op_value")

    # Probability To Levelset : WM orientation
    WM_Orient = Node(ProbabilityToLevelset(), name='WM_Orientation')
    WM_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_Orient.name),
        WM_Orient, 'output_dir')
    wf.connect(RmICirn, 'out_file', WM_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in WM only
    WM_pvs = Node(RecursiveRidgeDiffusion(), name='PVS_in_WM')
    WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    WM_pvs.inputs.ridge_intensities = "bright"
    WM_pvs.inputs.ridge_filter = "1D"
    WM_pvs.inputs.orientation = "orthogonal"
    WM_pvs.inputs.ang_factor = 1.0
    WM_pvs.inputs.min_scale = 0
    WM_pvs.inputs.max_scale = 3
    WM_pvs.inputs.propagation_model = "diffusion"
    WM_pvs.inputs.diffusion_factor = 1.0
    WM_pvs.inputs.similarity_scale = 1.0
    WM_pvs.inputs.neighborhood_size = 2
    WM_pvs.inputs.max_iter = 100
    WM_pvs.inputs.max_diff = 0.001
    WM_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, WM_pvs.name),
        WM_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', WM_pvs, 'input_image')
    wf.connect(WM_Orient, 'levelset', WM_pvs, 'surface_levelset')
    wf.connect(RmICirn, 'out_file', WM_pvs, 'loc_prior')

    # Extract Lesions : extract WM PVS
    extract_WM_pvs = Node(LesionExtraction(), name='ExtractPVSfromWM')
    extract_WM_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WM_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WM_pvs.inputs.csf_boundary_partial_vol_dist = 3.0
    extract_WM_pvs.inputs.lesion_clust_dist = 1.0
    extract_WM_pvs.inputs.prob_min_thresh = 0.1
    extract_WM_pvs.inputs.prob_max_thresh = 0.33
    extract_WM_pvs.inputs.small_lesion_size = 4.0
    extract_WM_pvs.inputs.save_data = True
    extract_WM_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WM_pvs.name), extract_WM_pvs,
               'output_dir')
    wf.connect(WM_pvs, 'propagation', extract_WM_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WM_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WM_pvs, 'levelset_boundary_image')
    wf.connect(RmICirn, 'out_file', extract_WM_pvs, 'location_prior_image')
    '''
    2nd branch
    '''

    # Image calculator : internal capsule witout ventricules
    ICwoVent = Node(ImageMaths(), name="ICWithoutVentricules")
    ICwoVent.inputs.op_string = "-sub"
    ICwoVent.inputs.out_file = "icwovent.nii.gz"
    wf.connect(DMRP, "internal_capsule_pv", ICwoVent, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", ICwoVent, "in_file2")

    # Image calculator : remove ventricles IC
    RmVentIC = Node(ImageMaths(), name="RmVentIC")
    RmVentIC.inputs.op_string = "-sub"
    RmVentIC.inputs.out_file = "RmVentIC.nii.gz"
    wf.connect(ICwoVent, "out_file", RmVentIC, "in_file")
    wf.connect(VentProba, "out_file", RmVentIC, "in_file2")

    # Intensity Range Normalization (4)
    getMaxRmVentIC = Node(ImageStats(op_string='-r'), name="getMaxRmVentIC")
    wf.connect(RmVentIC, 'out_file', getMaxRmVentIC, 'in_file')

    RmVentICirn = Node(AbcImageMaths(), name="IntensityNormalization6")
    RmVentICirn.inputs.op_string = "-div"
    RmVentICirn.inputs.out_file = "normRmVentIC.nii.gz"
    wf.connect(RmVentIC, 'out_file', RmVentICirn, 'in_file')
    wf.connect(getMaxRmVentIC, ('out_stat', getElementFromList, 1),
               RmVentICirn, "op_value")

    # Probability To Levelset : IC orientation
    IC_Orient = Node(ProbabilityToLevelset(), name='IC_Orientation')
    IC_Orient.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_Orient.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_Orient.name),
        IC_Orient, 'output_dir')
    wf.connect(RmVentICirn, 'out_file', IC_Orient, 'probability_image')

    # Recursive Ridge Diffusion : PVS in IC only
    IC_pvs = Node(RecursiveRidgeDiffusion(), name='RecursiveRidgeDiffusion2')
    IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    IC_pvs.inputs.ridge_intensities = "bright"
    IC_pvs.inputs.ridge_filter = "1D"
    IC_pvs.inputs.orientation = "undefined"
    IC_pvs.inputs.ang_factor = 1.0
    IC_pvs.inputs.min_scale = 0
    IC_pvs.inputs.max_scale = 3
    IC_pvs.inputs.propagation_model = "diffusion"
    IC_pvs.inputs.diffusion_factor = 1.0
    IC_pvs.inputs.similarity_scale = 1.0
    IC_pvs.inputs.neighborhood_size = 2
    IC_pvs.inputs.max_iter = 100
    IC_pvs.inputs.max_diff = 0.001
    IC_pvs.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, IC_pvs.name),
        IC_pvs, 'output_dir')
    wf.connect(ERC, 'background_proba', IC_pvs, 'input_image')
    wf.connect(IC_Orient, 'levelset', IC_pvs, 'surface_levelset')
    wf.connect(RmVentICirn, 'out_file', IC_pvs, 'loc_prior')

    # Extract Lesions : extract IC PVS
    extract_IC_pvs = Node(LesionExtraction(), name='ExtractPVSfromIC')
    extract_IC_pvs.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_IC_pvs.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_IC_pvs.inputs.csf_boundary_partial_vol_dist = 4.0
    extract_IC_pvs.inputs.lesion_clust_dist = 1.0
    extract_IC_pvs.inputs.prob_min_thresh = 0.25
    extract_IC_pvs.inputs.prob_max_thresh = 0.5
    extract_IC_pvs.inputs.small_lesion_size = 4.0
    extract_IC_pvs.inputs.save_data = True
    extract_IC_pvs.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_IC_pvs.name), extract_IC_pvs,
               'output_dir')
    wf.connect(IC_pvs, 'propagation', extract_IC_pvs, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_IC_pvs, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_IC_pvs, 'levelset_boundary_image')
    wf.connect(RmVentICirn, 'out_file', extract_IC_pvs, 'location_prior_image')
    '''
    3rd branch
    '''

    # Image calculator :
    RmInter = Node(ImageMaths(), name="RemoveInterVentricules")
    RmInter.inputs.op_string = "-sub"
    RmInter.inputs.out_file = "rminter.nii.gz"
    wf.connect(ERC2, 'region_pv', RmInter, "in_file")
    wf.connect(DMRP, "inter_ventricular_pv", RmInter, "in_file2")

    # Image calculator :
    AddVentHorns = Node(ImageMaths(), name="AddVentHorns")
    AddVentHorns.inputs.op_string = "-add"
    AddVentHorns.inputs.out_file = "rminter.nii.gz"
    wf.connect(RmInter, 'out_file', AddVentHorns, "in_file")
    wf.connect(DMRP, "ventricular_horns_pv", AddVentHorns, "in_file2")

    # Intensity Range Normalization (5)
    getMaxAddVentHorns = Node(ImageStats(op_string='-r'),
                              name="getMaxAddVentHorns")
    wf.connect(AddVentHorns, 'out_file', getMaxAddVentHorns, 'in_file')

    AddVentHornsirn = Node(AbcImageMaths(), name="IntensityNormalization7")
    AddVentHornsirn.inputs.op_string = "-div"
    AddVentHornsirn.inputs.out_file = "normAddVentHorns.nii.gz"
    wf.connect(AddVentHorns, 'out_file', AddVentHornsirn, 'in_file')
    wf.connect(getMaxAddVentHorns, ('out_stat', getElementFromList, 1),
               AddVentHornsirn, "op_value")

    # Extract Lesions : extract White Matter Hyperintensities
    extract_WMH = Node(LesionExtraction(), name='Extract_WMH')
    extract_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_WMH.inputs.lesion_clust_dist = 1.0
    extract_WMH.inputs.prob_min_thresh = 0.84
    extract_WMH.inputs.prob_max_thresh = 0.84
    extract_WMH.inputs.small_lesion_size = 4.0
    extract_WMH.inputs.save_data = True
    extract_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_WMH.name), extract_WMH,
               'output_dir')
    wf.connect(ERC2, 'background_proba', extract_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_WMH2 = extract_WMH.clone(name='Extract_WMH2')
    # extract_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH2.name),extract_WMH2,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH2,'location_prior_image')
    #
    # extract_WMH3 = extract_WMH.clone(name='Extract_WMH3')
    # extract_WMH3.inputs.gm_boundary_partial_vol_dist = 3.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_WMH3.name),extract_WMH3,'output_dir')
    # wf.connect(ERC2,'background_proba',extract_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     FINDING SMALL WMHs     ####
    ####################################
    Small round WMHs near the cortex are often missed by the main algorithm, 
    so we're adding this one that takes care of them.
    '''

    # Recursive Ridge Diffusion : round WMH detection
    round_WMH = Node(RecursiveRidgeDiffusion(), name='round_WMH')
    round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    round_WMH.inputs.ridge_intensities = "bright"
    round_WMH.inputs.ridge_filter = "0D"
    round_WMH.inputs.orientation = "undefined"
    round_WMH.inputs.ang_factor = 1.0
    round_WMH.inputs.min_scale = 1
    round_WMH.inputs.max_scale = 4
    round_WMH.inputs.propagation_model = "none"
    round_WMH.inputs.diffusion_factor = 1.0
    round_WMH.inputs.similarity_scale = 0.1
    round_WMH.inputs.neighborhood_size = 4
    round_WMH.inputs.max_iter = 100
    round_WMH.inputs.max_diff = 0.001
    round_WMH.inputs.save_data = True
    wf.connect(
        subjectList,
        ('subject_id', createOutputDir, wf.base_dir, wf.name, round_WMH.name),
        round_WMH, 'output_dir')
    wf.connect(ERC2, 'background_proba', round_WMH, 'input_image')
    wf.connect(AddVentHornsirn, 'out_file', round_WMH, 'loc_prior')

    # Extract Lesions : extract round WMH
    extract_round_WMH = Node(LesionExtraction(), name='Extract_round_WMH')
    extract_round_WMH.plugin_args = {'sbatch_args': '--mem 6000'}
    extract_round_WMH.inputs.gm_boundary_partial_vol_dist = 1.0
    extract_round_WMH.inputs.csf_boundary_partial_vol_dist = 2.0
    extract_round_WMH.inputs.lesion_clust_dist = 1.0
    extract_round_WMH.inputs.prob_min_thresh = 0.33
    extract_round_WMH.inputs.prob_max_thresh = 0.33
    extract_round_WMH.inputs.small_lesion_size = 6.0
    extract_round_WMH.inputs.save_data = True
    extract_round_WMH.inputs.atlas_file = atlas
    wf.connect(subjectList, ('subject_id', createOutputDir, wf.base_dir,
                             wf.name, extract_round_WMH.name),
               extract_round_WMH, 'output_dir')
    wf.connect(round_WMH, 'ridge_pv', extract_round_WMH, 'probability_image')
    wf.connect(MGDM, 'segmentation', extract_round_WMH, 'segmentation_image')
    wf.connect(MGDM, 'distance', extract_round_WMH, 'levelset_boundary_image')
    wf.connect(AddVentHornsirn, 'out_file', extract_round_WMH,
               'location_prior_image')

    #===========================================================================
    # extract_round_WMH2 = extract_round_WMH.clone(name='Extract_round_WMH2')
    # extract_round_WMH2.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH2.name),extract_round_WMH2,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH2,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH2,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH2,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH2,'location_prior_image')
    #
    # extract_round_WMH3 = extract_round_WMH.clone(name='Extract_round_WMH3')
    # extract_round_WMH3.inputs.gm_boundary_partial_vol_dist = 2.0
    # wf.connect(subjectList,('subject_id',createOutputDir,wf.base_dir,wf.name,extract_round_WMH3.name),extract_round_WMH3,'output_dir')
    # wf.connect(round_WMH,'ridge_pv',extract_round_WMH3,'probability_image')
    # wf.connect(MGDM,'segmentation',extract_round_WMH3,'segmentation_image')
    # wf.connect(MGDM,'distance',extract_round_WMH3,'levelset_boundary_image')
    # wf.connect(AddVentHornsirn,'out_file',extract_round_WMH3,'location_prior_image')
    #===========================================================================
    '''
    ####################################
    ####     COMBINE BOTH TYPES     ####
    ####################################
    Small round WMHs and regular WMH together before thresholding
    +
    PVS from white matter and internal capsule
    '''

    # Image calculator : WM + IC DVRS
    DVRS = Node(ImageMaths(), name="DVRS")
    DVRS.inputs.op_string = "-max"
    DVRS.inputs.out_file = "DVRS_map.nii.gz"
    wf.connect(extract_WM_pvs, 'lesion_score', DVRS, "in_file")
    wf.connect(extract_IC_pvs, "lesion_score", DVRS, "in_file2")

    # Image calculator : WMH + round
    WMH = Node(ImageMaths(), name="WMH")
    WMH.inputs.op_string = "-max"
    WMH.inputs.out_file = "WMH_map.nii.gz"
    wf.connect(extract_WMH, 'lesion_score', WMH, "in_file")
    wf.connect(extract_round_WMH, "lesion_score", WMH, "in_file2")

    #===========================================================================
    # WMH2 = Node(ImageMaths(), name="WMH2")
    # WMH2.inputs.op_string = "-max"
    # WMH2.inputs.out_file = "WMH2_map.nii.gz"
    # wf.connect(extract_WMH2,'lesion_score',WMH2,"in_file")
    # wf.connect(extract_round_WMH2,"lesion_score", WMH2, "in_file2")
    #
    # WMH3 = Node(ImageMaths(), name="WMH3")
    # WMH3.inputs.op_string = "-max"
    # WMH3.inputs.out_file = "WMH3_map.nii.gz"
    # wf.connect(extract_WMH3,'lesion_score',WMH3,"in_file")
    # wf.connect(extract_round_WMH3,"lesion_score", WMH3, "in_file2")
    #===========================================================================

    # Image calculator : multiply by boundnary partial volume
    WMH_mul = Node(ImageMaths(), name="WMH_mul")
    WMH_mul.inputs.op_string = "-mul"
    WMH_mul.inputs.out_file = "final_mask.nii.gz"
    wf.connect(WMH, "out_file", WMH_mul, "in_file")
    wf.connect(MGDM, "distance", WMH_mul, "in_file2")

    #===========================================================================
    # WMH2_mul = Node(ImageMaths(), name="WMH2_mul")
    # WMH2_mul.inputs.op_string = "-mul"
    # WMH2_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH2,"out_file", WMH2_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH2_mul, "in_file2")
    #
    # WMH3_mul = Node(ImageMaths(), name="WMH3_mul")
    # WMH3_mul.inputs.op_string = "-mul"
    # WMH3_mul.inputs.out_file = "final_mask.nii.gz"
    # wf.connect(WMH3,"out_file", WMH3_mul,"in_file")
    # wf.connect(MGDM,"distance", WMH3_mul, "in_file2")
    #===========================================================================
    '''
    ##########################################
    ####      SEGMENTATION THRESHOLD      ####
    ##########################################
    A threshold of 0.5 is very conservative, because the final lesion score is the product of two probabilities.
    This needs to be optimized to a value between 0.25 and 0.5 to balance false negatives 
    (dominant at 0.5) and false positives (dominant at low values).
    '''

    # Threshold binary mask :
    DVRS_mask = Node(Threshold(), name="DVRS_mask")
    DVRS_mask.inputs.thresh = 0.25
    DVRS_mask.inputs.direction = "below"
    wf.connect(DVRS, "out_file", DVRS_mask, "in_file")

    # Threshold binary mask : 025
    WMH1_025 = Node(Threshold(), name="WMH1_025")
    WMH1_025.inputs.thresh = 0.25
    WMH1_025.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_025, "in_file")

    #===========================================================================
    # WMH2_025 = Node(Threshold(), name="WMH2_025")
    # WMH2_025.inputs.thresh = 0.25
    # WMH2_025.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_025, "in_file")
    #
    # WMH3_025 = Node(Threshold(), name="WMH3_025")
    # WMH3_025.inputs.thresh = 0.25
    # WMH3_025.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_025, "in_file")
    #===========================================================================

    # Threshold binary mask : 050
    WMH1_050 = Node(Threshold(), name="WMH1_050")
    WMH1_050.inputs.thresh = 0.50
    WMH1_050.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_050, "in_file")

    #===========================================================================
    # WMH2_050 = Node(Threshold(), name="WMH2_050")
    # WMH2_050.inputs.thresh = 0.50
    # WMH2_050.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_050, "in_file")
    #
    # WMH3_050 = Node(Threshold(), name="WMH3_050")
    # WMH3_050.inputs.thresh = 0.50
    # WMH3_050.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_050, "in_file")
    #===========================================================================

    # Threshold binary mask : 075
    WMH1_075 = Node(Threshold(), name="WMH1_075")
    WMH1_075.inputs.thresh = 0.75
    WMH1_075.inputs.direction = "below"
    wf.connect(WMH_mul, "out_file", WMH1_075, "in_file")

    #===========================================================================
    # WMH2_075 = Node(Threshold(), name="WMH2_075")
    # WMH2_075.inputs.thresh = 0.75
    # WMH2_075.inputs.direction = "below"
    # wf.connect(WMH2_mul,"out_file", WMH2_075, "in_file")
    #
    # WMH3_075 = Node(Threshold(), name="WMH3_075")
    # WMH3_075.inputs.thresh = 0.75
    # WMH3_075.inputs.direction = "below"
    # wf.connect(WMH3_mul,"out_file", WMH3_075, "in_file")
    #===========================================================================

    ## Outputs

    DVRS_Output = Node(IdentityInterface(fields=[
        'mask', 'region', 'lesion_size', 'lesion_proba', 'boundary', 'label',
        'score'
    ]),
                       name='DVRS_Output')
    wf.connect(DVRS_mask, 'out_file', DVRS_Output, 'mask')

    WMH_output = Node(IdentityInterface(fields=[
        'mask1025', 'mask1050', 'mask1075', 'mask2025', 'mask2050', 'mask2075',
        'mask3025', 'mask3050', 'mask3075'
    ]),
                      name='WMH_output')
    wf.connect(WMH1_025, 'out_file', WMH_output, 'mask1025')
    #wf.connect(WMH2_025,'out_file',WMH_output,'mask2025')
    #wf.connect(WMH3_025,'out_file',WMH_output,'mask3025')
    wf.connect(WMH1_050, 'out_file', WMH_output, 'mask1050')
    #wf.connect(WMH2_050,'out_file',WMH_output,'mask2050')
    #wf.connect(WMH3_050,'out_file',WMH_output,'mask3050')
    wf.connect(WMH1_075, 'out_file', WMH_output, 'mask1075')
    #wf.connect(WMH2_075,'out_file',WMH_output,'mask2070')
    #wf.connect(WMH3_075,'out_file',WMH_output,'mask3075')

    return wf
示例#4
0
def test_FLIRT_inputs():
    input_map = dict(angle_rep=dict(argstr='-anglerep %s',
    ),
    apply_isoxfm=dict(argstr='-applyisoxfm %f',
    xor=['apply_xfm'],
    ),
    apply_xfm=dict(argstr='-applyxfm',
    requires=['in_matrix_file'],
    ),
    args=dict(argstr='%s',
    ),
    bbrslope=dict(argstr='-bbrslope %f',
    min_ver='5.0.0',
    ),
    bbrtype=dict(argstr='-bbrtype %s',
    min_ver='5.0.0',
    ),
    bgvalue=dict(argstr='-setbackground %f',
    ),
    bins=dict(argstr='-bins %d',
    ),
    coarse_search=dict(argstr='-coarsesearch %d',
    units='degrees',
    ),
    cost=dict(argstr='-cost %s',
    ),
    cost_func=dict(argstr='-searchcost %s',
    ),
    datatype=dict(argstr='-datatype %s',
    ),
    display_init=dict(argstr='-displayinit',
    ),
    dof=dict(argstr='-dof %d',
    ),
    echospacing=dict(argstr='-echospacing %f',
    min_ver='5.0.0',
    ),
    environ=dict(nohash=True,
    usedefault=True,
    ),
    fieldmap=dict(argstr='-fieldmap %s',
    min_ver='5.0.0',
    ),
    fieldmapmask=dict(argstr='-fieldmapmask %s',
    min_ver='5.0.0',
    ),
    fine_search=dict(argstr='-finesearch %d',
    units='degrees',
    ),
    force_scaling=dict(argstr='-forcescaling',
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_file=dict(argstr='-in %s',
    mandatory=True,
    position=0,
    ),
    in_matrix_file=dict(argstr='-init %s',
    ),
    in_weight=dict(argstr='-inweight %s',
    ),
    interp=dict(argstr='-interp %s',
    ),
    min_sampling=dict(argstr='-minsampling %f',
    units='mm',
    ),
    no_clamp=dict(argstr='-noclamp',
    ),
    no_resample=dict(argstr='-noresample',
    ),
    no_resample_blur=dict(argstr='-noresampblur',
    ),
    no_search=dict(argstr='-nosearch',
    ),
    out_file=dict(argstr='-out %s',
    hash_files=False,
    name_source=['in_file'],
    name_template='%s_flirt',
    position=2,
    ),
    out_log=dict(keep_extension=True,
    name_source=['in_file'],
    name_template='%s_flirt.log',
    requires=['save_log'],
    ),
    out_matrix_file=dict(argstr='-omat %s',
    hash_files=False,
    keep_extension=True,
    name_source=['in_file'],
    name_template='%s_flirt.mat',
    position=3,
    ),
    output_type=dict(),
    padding_size=dict(argstr='-paddingsize %d',
    units='voxels',
    ),
    pedir=dict(argstr='-pedir %d',
    min_ver='5.0.0',
    ),
    ref_weight=dict(argstr='-refweight %s',
    ),
    reference=dict(argstr='-ref %s',
    mandatory=True,
    position=1,
    ),
    rigid2D=dict(argstr='-2D',
    ),
    save_log=dict(),
    schedule=dict(argstr='-schedule %s',
    ),
    searchr_x=dict(argstr='-searchrx %s',
    units='degrees',
    ),
    searchr_y=dict(argstr='-searchry %s',
    units='degrees',
    ),
    searchr_z=dict(argstr='-searchrz %s',
    units='degrees',
    ),
    sinc_width=dict(argstr='-sincwidth %d',
    units='voxels',
    ),
    sinc_window=dict(argstr='-sincwindow %s',
    ),
    terminal_output=dict(mandatory=True,
    nohash=True,
    ),
    uses_qform=dict(argstr='-usesqform',
    ),
    verbose=dict(argstr='-verbose %d',
    ),
    wm_seg=dict(argstr='-wmseg %s',
    min_ver='5.0.0',
    ),
    wmcoords=dict(argstr='-wmcoords %s',
    min_ver='5.0.0',
    ),
    wmnorms=dict(argstr='-wmnorms %s',
    min_ver='5.0.0',
    ),
    )
    inputs = FLIRT.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
示例#5
0
    def motion_correction_pipeline(self, **name_maps):

        if 'struct2align' in self.input_names:
            StructAlignment = True
        else:
            StructAlignment = False

        pipeline = self.new_pipeline(
            name='pet_mc',
            desc=("Given a folder with reconstructed PET data, this "
                  "pipeline will generate a motion corrected PET"
                  "image using information extracted from the MR-based "
                  "motion detection pipeline"),
            citations=[fsl_cite],
            name_maps=name_maps)

        check_pet = pipeline.add(
            'check_pet_data',
            CheckPetMCInputs(),
            inputs={
                'pet_data': ('pet_data_prepared', directory_format),
                'reference': ('ref_brain', nifti_gz_format)
            },
            requirements=[fsl_req.v('5.0.9'),
                          mrtrix_req.v('3.0rc3')])
        if self.branch('dynamic_pet_mc'):
            pipeline.connect_input('fixed_binning_mats', check_pet,
                                   'motion_mats')
        else:
            pipeline.connect_input('average_mats', check_pet, 'motion_mats')
            pipeline.connect_input('correction_factors', check_pet,
                                   'corr_factors')

        if StructAlignment:
            struct_reg = pipeline.add('ref2structural_reg',
                                      FLIRT(dof=6,
                                            cost_func='normmi',
                                            cost='normmi',
                                            output_type='NIFTI_GZ'),
                                      inputs={
                                          'reference':
                                          ('ref_brain', nifti_gz_format),
                                          'in_file':
                                          ('struct2align', nifti_gz_format)
                                      },
                                      requirements=[fsl_req.v('5.0.9')])

        if self.branch('dynamic_pet_mc'):
            pet_mc = pipeline.add('pet_mc',
                                  PetImageMotionCorrection(),
                                  inputs={
                                      'pet_image': (check_pet, 'pet_images'),
                                      'motion_mat': (check_pet, 'motion_mats'),
                                      'pet2ref_mat': (check_pet, 'pet2ref_mat')
                                  },
                                  requirements=[fsl_req.v('5.0.9')],
                                  iterfield=['pet_image', 'motion_mat'])
        else:
            pet_mc = pipeline.add(
                'pet_mc',
                PetImageMotionCorrection(),
                inputs={'corr_factor': (check_pet, 'corr_factors')},
                requirements=[fsl_req.v('5.0.9')],
                iterfield=['corr_factor', 'pet_image', 'motion_mat'])

        if StructAlignment:
            pipeline.connect(struct_reg, 'out_matrix_file', pet_mc,
                             'structural2ref_regmat')
            pipeline.connect_input('struct2align', pet_mc, 'structural_image')
        if self.parameter('PET2MNI_reg'):
            mni_reg = True
        else:
            mni_reg = False

        if self.branch('dynamic_pet_mc'):
            merge_mc = pipeline.add(
                'merge_pet_mc',
                fsl.Merge(dimension='t'),
                inputs={'in_files': (pet_mc, 'pet_mc_image')},
                requirements=[fsl_req.v('5.0.9')])

            merge_no_mc = pipeline.add(
                'merge_pet_no_mc',
                fsl.Merge(dimension='t'),
                inputs={'in_files': (pet_mc, 'pet_no_mc_image')},
                requirements=[fsl_req.v('5.0.9')])
        else:
            static_mc = pipeline.add('static_mc_generation',
                                     StaticPETImageGeneration(),
                                     inputs={
                                         'pet_mc_images':
                                         (pet_mc, 'pet_mc_image'),
                                         'pet_no_mc_images':
                                         (pet_mc, 'pet_no_mc_image')
                                     },
                                     requirements=[fsl_req.v('5.0.9')])

        merge_outputs = pipeline.add(
            'merge_outputs',
            Merge(3),
            inputs={'in1': ('mean_displacement_plot', png_format)})

        if not StructAlignment:
            cropping = pipeline.add(
                'pet_cropping',
                PETFovCropping(x_min=self.parameter('crop_xmin'),
                               x_size=self.parameter('crop_xsize'),
                               y_min=self.parameter('crop_ymin'),
                               y_size=self.parameter('crop_ysize'),
                               z_min=self.parameter('crop_zmin'),
                               z_size=self.parameter('crop_zsize')))
            if self.branch('dynamic_pet_mc'):
                pipeline.connect(merge_mc, 'merged_file', cropping,
                                 'pet_image')
            else:
                pipeline.connect(static_mc, 'static_mc', cropping, 'pet_image')

            cropping_no_mc = pipeline.add(
                'pet_no_mc_cropping',
                PETFovCropping(x_min=self.parameter('crop_xmin'),
                               x_size=self.parameter('crop_xsize'),
                               y_min=self.parameter('crop_ymin'),
                               y_size=self.parameter('crop_ysize'),
                               z_min=self.parameter('crop_zmin'),
                               z_size=self.parameter('crop_zsize')))
            if self.branch('dynamic_pet_mc'):
                pipeline.connect(merge_no_mc, 'merged_file', cropping_no_mc,
                                 'pet_image')
            else:
                pipeline.connect(static_mc, 'static_no_mc', cropping_no_mc,
                                 'pet_image')

            if mni_reg:
                if self.branch('dynamic_pet_mc'):
                    t_mean = pipeline.add(
                        'PET_temporal_mean',
                        ImageMaths(op_string='-Tmean'),
                        inputs={'in_file': (cropping, 'pet_cropped')},
                        requirements=[fsl_req.v('5.0.9')])

                reg_tmean2MNI = pipeline.add(
                    'reg2MNI',
                    AntsRegSyn(num_dimensions=3,
                               transformation='s',
                               out_prefix='reg2MNI',
                               num_threads=4,
                               ref_file=self.parameter('PET_template_MNI')),
                    wall_time=25,
                    requirements=[ants_req.v('2')])

                if self.branch('dynamic_pet_mc'):
                    pipeline.connect(t_mean, 'out_file', reg_tmean2MNI,
                                     'input_file')

                    merge_trans = pipeline.add('merge_transforms',
                                               Merge(2),
                                               inputs={
                                                   'in1': (reg_tmean2MNI,
                                                           'warp_file'),
                                                   'in2':
                                                   (reg_tmean2MNI, 'regmat')
                                               },
                                               wall_time=1)

                    apply_trans = pipeline.add(
                        'apply_trans',
                        ApplyTransforms(
                            reference_image=self.parameter('PET_template_MNI'),
                            interpolation='Linear',
                            input_image_type=3),
                        inputs={
                            'input_image': (cropping, 'pet_cropped'),
                            'transforms': (merge_trans, 'out')
                        },
                        wall_time=7,
                        mem_gb=24,
                        requirements=[ants_req.v('2')])
                    pipeline.connect(apply_trans, 'output_image',
                                     merge_outputs, 'in2'),
                else:
                    pipeline.connect(cropping, 'pet_cropped', reg_tmean2MNI,
                                     'input_file')
                    pipeline.connect(reg_tmean2MNI, 'reg_file', merge_outputs,
                                     'in2')
            else:
                pipeline.connect(cropping, 'pet_cropped', merge_outputs, 'in2')
            pipeline.connect(cropping_no_mc, 'pet_cropped', merge_outputs,
                             'in3')
        else:
            if self.branch('dynamic_pet_mc'):
                pipeline.connect(merge_mc, 'merged_file', merge_outputs, 'in2')
                pipeline.connect(merge_no_mc, 'merged_file', merge_outputs,
                                 'in3')
            else:
                pipeline.connect(static_mc, 'static_mc', merge_outputs, 'in2')
                pipeline.connect(static_mc, 'static_no_mc', merge_outputs,
                                 'in3')


#         mcflirt = pipeline.add('mcflirt', MCFLIRT())
#                 'in_file': (merge_mc_ps, 'merged_file'),
#                 cost='normmi',

        copy2dir = pipeline.add('copy2dir',
                                CopyToDir(),
                                inputs={'in_files': (merge_outputs, 'out')})
        if self.branch('dynamic_pet_mc'):
            pipeline.connect_output('dynamic_motion_correction_results',
                                    copy2dir, 'out_dir')
        else:
            pipeline.connect_output('static_motion_correction_results',
                                    copy2dir, 'out_dir')
        return pipeline
示例#6
0
def test_FLIRT_inputs():
    input_map = dict(
        angle_rep=dict(argstr='-anglerep %s', ),
        apply_isoxfm=dict(
            argstr='-applyisoxfm %f',
            xor=['apply_xfm'],
        ),
        apply_xfm=dict(
            argstr='-applyxfm',
            requires=['in_matrix_file'],
        ),
        args=dict(argstr='%s', ),
        bbrslope=dict(
            argstr='-bbrslope %f',
            min_ver='5.0.0',
        ),
        bbrtype=dict(
            argstr='-bbrtype %s',
            min_ver='5.0.0',
        ),
        bgvalue=dict(argstr='-setbackground %f', ),
        bins=dict(argstr='-bins %d', ),
        coarse_search=dict(
            argstr='-coarsesearch %d',
            units='degrees',
        ),
        cost=dict(argstr='-cost %s', ),
        cost_func=dict(argstr='-searchcost %s', ),
        datatype=dict(argstr='-datatype %s', ),
        display_init=dict(argstr='-displayinit', ),
        dof=dict(argstr='-dof %d', ),
        echospacing=dict(
            argstr='-echospacing %f',
            min_ver='5.0.0',
        ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        fieldmap=dict(
            argstr='-fieldmap %s',
            min_ver='5.0.0',
        ),
        fieldmapmask=dict(
            argstr='-fieldmapmask %s',
            min_ver='5.0.0',
        ),
        fine_search=dict(
            argstr='-finesearch %d',
            units='degrees',
        ),
        force_scaling=dict(argstr='-forcescaling', ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_file=dict(
            argstr='-in %s',
            mandatory=True,
            position=0,
        ),
        in_matrix_file=dict(argstr='-init %s', ),
        in_weight=dict(argstr='-inweight %s', ),
        interp=dict(argstr='-interp %s', ),
        min_sampling=dict(
            argstr='-minsampling %f',
            units='mm',
        ),
        no_clamp=dict(argstr='-noclamp', ),
        no_resample=dict(argstr='-noresample', ),
        no_resample_blur=dict(argstr='-noresampblur', ),
        no_search=dict(argstr='-nosearch', ),
        out_file=dict(
            argstr='-out %s',
            hash_files=False,
            name_source=['in_file'],
            name_template='%s_flirt',
            position=2,
        ),
        out_log=dict(
            keep_extension=True,
            name_source=['in_file'],
            name_template='%s_flirt.log',
            requires=['save_log'],
        ),
        out_matrix_file=dict(
            argstr='-omat %s',
            hash_files=False,
            keep_extension=True,
            name_source=['in_file'],
            name_template='%s_flirt.mat',
            position=3,
        ),
        output_type=dict(),
        padding_size=dict(
            argstr='-paddingsize %d',
            units='voxels',
        ),
        pedir=dict(
            argstr='-pedir %d',
            min_ver='5.0.0',
        ),
        ref_weight=dict(argstr='-refweight %s', ),
        reference=dict(
            argstr='-ref %s',
            mandatory=True,
            position=1,
        ),
        rigid2D=dict(argstr='-2D', ),
        save_log=dict(),
        schedule=dict(argstr='-schedule %s', ),
        searchr_x=dict(
            argstr='-searchrx %s',
            units='degrees',
        ),
        searchr_y=dict(
            argstr='-searchry %s',
            units='degrees',
        ),
        searchr_z=dict(
            argstr='-searchrz %s',
            units='degrees',
        ),
        sinc_width=dict(
            argstr='-sincwidth %d',
            units='voxels',
        ),
        sinc_window=dict(argstr='-sincwindow %s', ),
        terminal_output=dict(
            mandatory=True,
            nohash=True,
        ),
        uses_qform=dict(argstr='-usesqform', ),
        verbose=dict(argstr='-verbose %d', ),
        wm_seg=dict(
            argstr='-wmseg %s',
            min_ver='5.0.0',
        ),
        wmcoords=dict(
            argstr='-wmcoords %s',
            min_ver='5.0.0',
        ),
        wmnorms=dict(
            argstr='-wmnorms %s',
            min_ver='5.0.0',
        ),
    )
    inputs = FLIRT.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
示例#7
0
def hybrid_rois_registration(base_dir,
                             subject_id,
                             visit_id,
                             output_dir,
                             csf_roi,
                             wm_roi,
                             template="MNI152_T1_2mm.nii.gz",
                             dof=6,
                             output_type="NIFTI"):
    """Function to register csf and wm roi from template to diffusion space and output the MEDIAN signal within the roi. Needed for hybrid Beltrami FW fitting
        Parameters
        ----------
        base_dir : absolute path pointing at the root directory of the bids repository
        subject_id: string, the tag subject (i.e. sub-sublabel) of the subject to treat
        visit_id: the VALUE of the session tag to be treated (i.e. for ses-02, only "02" need to be entered)
        output_dir: absolute path of the directories were the indexes will be written
        csf_roi: absolute path of the roi representative of the csf. It should be in the same space as "template" and binary
        wm_roi: absolute path of the roi representative of the wm. It should be in in the same space as "template" and binary
        template: name of the file with extension of the template to be used for inverse registation of the rois. 
        output_type: ["NIFTI", "NIFTI_GZ"], the type of nifti output desired,
        ... : other parameters related to the dipy functions used
        Returns
        ----------
        csf_mean, wm_mean: floats. The median csf and wm values from the registered ROIs
        
        """

    diff_dir = "{}/{}/ses-{}/dwi".format(base_dir, subject_id, visit_id)
    anat_dir = "{}/{}/ses-{}/anat".format(base_dir, subject_id, visit_id)
    if output_type == "NIFTI":
        ext = ".nii"
    elif output_type == "NIFTI_GZ":
        ext = ".nii.gz"
    else:
        raise ValueError(
            "Output file type {} not recognized".format(output_type))
    t1_file = glob("{}/*T1w.nii*".format(anat_dir))[0]
    t1_base_name = get_base_name_all_type(t1_file)
    diff_file = glob("{}/*dwi.nii*".format(diff_dir))[0]
    diff_base_name = get_base_name_all_type(diff_file)
    fsldir_system = subprocess.check_output("echo $FSLDIR", shell=True)
    if type(fsldir_system) == bytes:
        fsl_dir = fsldir_system.decode("utf-8").rstrip()
    else:
        fsl_dir = fsldir_system.rstrip()
    template_base_name = get_base_name_all_type(template)
    template_no_underscore = "x".join(template_base_name.split("_"))
    template_abs = "{}/data/standard/{}".format(fsl_dir, template)
    flirt = FLIRT()
    #T1 to template
    print("Registering T1 onto template")
    flirt.inputs.in_file = t1_file
    flirt.inputs.reference = template_abs
    flirt.inputs.out_file = "{}/{}_ref-{}_dof-12_reg-flirt{}".format(
        output_dir, t1_base_name, template_no_underscore, ext)
    flirt.inputs.out_matrix_file = "{}/{}_ref-{}_dof-12_reg-flirt.mat".format(
        output_dir, t1_base_name, template_no_underscore)
    flirt.inputs.output_type = output_type
    flirt.run()
    #diff to t1
    print("Registering diffusion onto T1")
    flirt.inputs.in_file = diff_file
    flirt.inputs.reference = t1_file
    flirt.inputs.dof = dof
    flirt.inputs.out_file = "{}/{}_ref-t1_dof-{}_reg-flirt{}".format(
        output_dir, diff_base_name, str(dof), ext)
    flirt.inputs.out_matrix_file = "{}/{}_ref-t1_dof-{}_reg-flirt.mat".format(
        output_dir, diff_base_name, str(dof))
    flirt.inputs.output_type = output_type
    flirt.run()
    #concat and inverse matrices
    print("Concat and inverse matrix, send rois to diff")
    mat_transform = ConvertXFM()
    #concat diff to t1 and t1 to template
    mat_transform.inputs.in_file = "{}/{}_ref-{}_dof-12_reg-flirt.mat".format(
        output_dir, t1_base_name, template_no_underscore)
    mat_transform.inputs.in_file2 = "{}/{}_ref-t1_dof-{}_reg-flirt.mat".format(
        output_dir, diff_base_name, str(dof))
    mat_transform.inputs.concat_xfm = True
    mat_transform.inputs.out_file = "{}/diff_to_{}.mat".format(
        output_dir, template_no_underscore)
    mat_transform.run()
    #inverse
    mat_transform = ConvertXFM()
    mat_transform.inputs.in_file = "{}/diff_to_{}.mat".format(
        output_dir, template_no_underscore)
    mat_transform.inputs.invert_xfm = True
    mat_transform.inputs.out_file = "{}/{}_to_diff.mat".format(
        output_dir, template_no_underscore)
    mat_transform.run()
    #send csf and wm roi to diff
    csf_roi_base_name = get_base_name_all_type(csf_roi)
    wm_roi_base_name = get_base_name_all_type(wm_roi)
    #csf
    flirt.inputs.in_file = csf_roi
    flirt.inputs.reference = diff_file
    flirt.inputs.apply_xfm = True
    flirt.inputs.in_matrix_file = "{}/{}_to_diff.mat".format(
        output_dir, template_no_underscore)
    flirt.inputs.interp = "nearestneighbour"
    flirt.inputs.output_type = output_type
    flirt.inputs.out_file = "{}/{}_in_diff{}".format(output_dir,
                                                     csf_roi_base_name, ext)
    flirt.run()
    #wm
    flirt.inputs.in_file = wm_roi
    flirt.inputs.reference = diff_file
    flirt.inputs.apply_xfm = True
    flirt.inputs.in_matrix_file = "{}/{}_to_diff.mat".format(
        output_dir, template_no_underscore)
    flirt.inputs.interp = "nearestneighbour"
    flirt.inputs.output_type = output_type
    flirt.inputs.out_file = "{}/{}_in_diff{}".format(output_dir,
                                                     wm_roi_base_name, ext)
    flirt.run()

    #extract values from roi
    csf_image = nb.load("{}/{}_in_diff{}".format(output_dir, csf_roi_base_name,
                                                 ext))
    csf_array = csf_image.get_fdata()
    csf_array_mask = np.where(csf_array == 1)
    wm_image = nb.load("{}/{}_in_diff{}".format(output_dir, wm_roi_base_name,
                                                ext))
    wm_array = wm_image.get_fdata()
    wm_array_mask = np.where(wm_array == 1)
    diff_image = nb.load(diff_file)
    b0_array = diff_image.get_fdata()[:, :, :, 0]
    csf_mean = np.median(b0_array[csf_array_mask])
    wm_mean = np.median(b0_array[wm_array_mask])
    return (csf_mean, wm_mean)
示例#8
0
    def registration_pipeline(self, **kwargs):  # @UnusedVariable @IgnorePep8
        """
        Register T1 and T2 to the

        Parameters
        ----------
        """
        pipeline = self.create_pipeline(
            name='registration_pipeline',
            inputs=[
                DatasetSpec('ute_echo1', dicom_format),
                DatasetSpec('ute_echo2', dicom_format)
            ],
            outputs=[
                DatasetSpec('ute1_registered', nifti_format),
                DatasetSpec('ute2_registered', nifti_gz_format),
                DatasetSpec('template_to_ute_mat', text_matrix_format),
                DatasetSpec('ute_to_template_mat', text_matrix_format)
            ],
            desc="Register ute images to the template",
            version=1,
            citations=(fsl_cite),
            **kwargs)

        echo1_conv = pipeline.create_node(MRConvert(), name='echo1_conv')
        echo1_conv.inputs.out_ext = '.nii.gz'

        pipeline.connect_input('ute_echo1', echo1_conv, 'in_file')

        echo2_conv = pipeline.create_node(MRConvert(), name='echo2_conv')
        echo2_conv.inputs.out_ext = '.nii.gz'

        pipeline.connect_input('ute_echo2', echo2_conv, 'in_file')

        # Create registration node
        registration = pipeline.create_node(FLIRT(),
                                            name='ute1_registration',
                                            requirements=[fsl5_req],
                                            wall_time=180)

        pipeline.connect(echo1_conv, 'out_file', registration, 'in_file')

        registration.inputs.reference = self.template_path
        registration.inputs.output_type = 'NIFTI_GZ'
        registration.inputs.searchr_x = [-180, 180]
        registration.inputs.searchr_y = [-180, 180]
        registration.inputs.searchr_z = [-180, 180]
        registration.inputs.bins = 256
        registration.inputs.cost_func = 'corratio'

        # Inverse matrix conversion
        convert_mat = pipeline.create_node(ConvertXFM(),
                                           name='inverse_matrix_conversion',
                                           requirements=[fsl5_req],
                                           wall_time=10)
        pipeline.connect(registration, 'out_matrix_file', convert_mat,
                         'in_file')
        convert_mat.inputs.invert_xfm = True

        # UTE_echo_2 transformation
        transform_ute2 = pipeline.create_node(ApplyXFM(),
                                              name='transform_t2',
                                              requirements=[fsl5_req],
                                              wall_time=10)
        pipeline.connect(registration, 'out_matrix_file', transform_ute2,
                         'in_matrix_file')
        pipeline.connect(echo2_conv, 'out_file', transform_ute2, 'in_file')

        transform_ute2.inputs.output_type = 'NIFTI_GZ'
        transform_ute2.inputs.reference = self.template_path
        transform_ute2.inputs.apply_xfm = True

        # Connect outputs
        pipeline.connect_output('ute1_registered', registration, 'out_file')
        pipeline.connect_output('ute_to_template_mat', registration,
                                'out_matrix_file')
        pipeline.connect_output('ute2_registered', transform_ute2, 'out_file')
        pipeline.connect_output('template_to_ute_mat', convert_mat, 'out_file')
        pipeline.assert_connected()

        return pipeline
示例#9
0
    def backwrap_to_ute_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='backwrap_to_ute',
            inputs=[
                DatasetSpec('ute1_registered', nifti_gz_format),
                DatasetSpec('ute_echo1', dicom_format),
                DatasetSpec('umap_ute', dicom_format),
                DatasetSpec('template_to_ute_mat', text_matrix_format),
                DatasetSpec('sute_cont_template', nifti_gz_format),
                DatasetSpec('sute_fix_template', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('sute_cont_ute', nifti_gz_format),
                DatasetSpec('sute_fix_ute', nifti_gz_format)
            ],
            desc="Moving umaps back to the UTE space",
            version=1,
            citations=(matlab_cite),
            **kwargs)

        echo1_conv = pipeline.create_node(MRConvert(), name='echo1_conv')
        echo1_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('ute_echo1', echo1_conv, 'in_file')

        umap_conv = pipeline.create_node(MRConvert(), name='umap_conv')
        umap_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('umap_ute', umap_conv, 'in_file')

        zero_template_mask = pipeline.create_node(BinaryMaths(),
                                                  name='zero_template_mask',
                                                  requirements=[fsl5_req],
                                                  wall_time=3)
        pipeline.connect_input('ute1_registered', zero_template_mask,
                               'in_file')
        zero_template_mask.inputs.operation = "mul"
        zero_template_mask.inputs.operand_value = 0
        zero_template_mask.inputs.output_type = 'NIFTI_GZ'

        region_template_mask = pipeline.create_node(
            FLIRT(),
            name='region_template_mask',
            requirements=[fsl5_req],
            wall_time=5)
        region_template_mask.inputs.apply_xfm = True
        region_template_mask.inputs.bgvalue = 1
        region_template_mask.inputs.interp = 'nearestneighbour'
        region_template_mask.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(zero_template_mask, 'out_file', region_template_mask,
                         'in_file')
        pipeline.connect(echo1_conv, 'out_file', region_template_mask,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', region_template_mask,
                               'in_matrix_file')

        fill_in_umap = pipeline.create_node(MultiImageMaths(),
                                            name='fill_in_umap',
                                            requirements=[fsl5_req],
                                            wall_time=3)
        fill_in_umap.inputs.op_string = "-mul %s "
        fill_in_umap.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(region_template_mask, 'out_file', fill_in_umap,
                         'in_file')
        pipeline.connect(umap_conv, 'out_file', fill_in_umap, 'operand_files')

        sute_fix_ute_space = pipeline.create_node(FLIRT(),
                                                  name='sute_fix_ute_space',
                                                  requirements=[fsl5_req],
                                                  wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_fix_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_fix_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_fix_template', sute_fix_ute_space,
                               'in_file')
        sute_fix_ute_space.inputs.apply_xfm = True
        sute_fix_ute_space.inputs.bgvalue = 0
        sute_fix_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_cont_ute_space = pipeline.create_node(FLIRT(),
                                                   name='sute_cont_ute_space',
                                                   requirements=[fsl5_req],
                                                   wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_cont_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_cont_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_cont_template', sute_cont_ute_space,
                               'in_file')
        sute_cont_ute_space.inputs.apply_xfm = True
        sute_cont_ute_space.inputs.bgvalue = 0
        sute_cont_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_fix_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_fix_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_fix_ute_space, 'out_file',
                         sute_fix_ute_background, 'in_file')
        sute_fix_ute_background.inputs.op_string = "-add %s "
        sute_fix_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_fix_ute_background,
                         'operand_files')

        sute_cont_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_cont_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_cont_ute_space, 'out_file',
                         sute_cont_ute_background, 'in_file')
        sute_cont_ute_background.inputs.op_string = "-add %s "
        sute_cont_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_cont_ute_background,
                         'operand_files')

        smooth_sute_fix = pipeline.create_node(Smooth(),
                                               name='smooth_sute_fix',
                                               requirements=[fsl5_req],
                                               wall_time=5)
        smooth_sute_fix.inputs.sigma = 2.
        pipeline.connect(sute_fix_ute_background, 'out_file', smooth_sute_fix,
                         'in_file')

        smooth_sute_cont = pipeline.create_node(Smooth(),
                                                name='smooth_sute_cont',
                                                requirements=[fsl5_req],
                                                wall_time=5)
        smooth_sute_cont.inputs.sigma = 2.
        pipeline.connect(sute_cont_ute_background, 'out_file',
                         smooth_sute_cont, 'in_file')

        pipeline.connect_output('sute_fix_ute', smooth_sute_fix,
                                'smoothed_file')
        pipeline.connect_output('sute_cont_ute', smooth_sute_cont,
                                'smoothed_file')
        pipeline.assert_connected()

        return pipeline
示例#10
0
segment = Node(FAST(no_bias=True, segments=True, number_classes=3),
               name='segment')

#Slice timing correction based on interleaved acquisition using FSL
slicetime_correct = Node(SliceTimer(interleaved=interleave,
                                    slice_direction=slice_dir,
                                    time_repetition=TR),
                         name='slicetime_correct')

# Motion correction
motion_correct = Node(MCFLIRT(save_plots=True, mean_vol=True),
                      name='motion_correct')

# Registration- using FLIRT
# The BOLD image is 'in_file', the anat is 'reference', the output is 'out_file'
coreg1 = Node(FLIRT(), name='coreg1')
coreg2 = Node(FLIRT(apply_xfm=True), name='coreg2')

# make binary mask
# structural is the 'in_file', output is 'binary_file'
binarize_struct = Node(Binarize(dilate=mask_dilation,
                                erode=mask_erosion,
                                min=1),
                       name='binarize_struct')

# apply the binary mask to the functional data
# functional is 'in_file', binary mask is 'mask_file', output is 'out_file'
mask_func = Node(ApplyMask(), name='mask_func')

# Artifact detection for scrubbing/motion assessment
art = Node(