Exemple #1
0
def head_motion_correction(name='motion_correction'):
    workflow = Workflow(name)

    input_node = Node(
        niu.IdentityInterface(fields=['bold_file', 'raw_ref_image']),
        name='input')
    output_node = Node(niu.IdentityInterface(fields=['xforms', 'movpar_file']),
                       name='outputnode')

    mcflirt = Node(fsl.MCFLIRT(save_mats=True, save_plots=True),
                   name='mcflirt')

    fsl2itk = Node(MCFLIRT2ITK(), name='fsl2itk')

    normalize_motion = Node(NormalizeMotionParams(format='FSL'),
                            name="normalize_motion")

    workflow.connect([
        (input_node, mcflirt, [('raw_ref_image', 'ref_file'),
                               ('bold_file', 'in_file')]),
        (input_node, fsl2itk, [('raw_ref_image', 'in_source'),
                               ('raw_ref_image', 'in_reference')]),
        (mcflirt, fsl2itk, [('mat_file', 'in_files')]),
        (mcflirt, normalize_motion, [('par_file', 'in_file')]),
        (fsl2itk, output_node, [('out_file', 'xforms')]),
        (normalize_motion, output_node, [('out_file', 'movpar_file')]),
    ])

    return workflow
Exemple #2
0
def init_b1_mcf(rf_pulse=None, scale=150):
    inputnode = Node(IdentityInterface(fields=['2db1map_file', 'ref_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['b1_plus', 'b1_pulse']),
                      name='outputnode')

    b1_b1 = Node(ExtractROI(t_min=0, t_size=1), name='b1_extract_b1')
    b1_filter = Node(Filter(filter_spec='Gauss,3.0'), name='b1_filter')
    b1_mag = Node(ExtractROI(t_min=1, t_size=1), name='b1_extract_mag')

    b1_reg = Node(FLIRT(out_file='b1mag_reg.nii.gz',
                        out_matrix_file='b1mag_reg.mat'),
                  name='b1_reg')
    b1_invert = Node(ConvertXFM(invert_xfm=True), name='b1_invert')
    b1_apply = Node(FLIRT(apply_xfm=True), name='b1_reg_apply')
    b1_scale = Node(ImageMaths(op_string='-div %f' % scale), name='b1_scale')

    wf = Workflow(name='b1_prep')
    wf.connect([(inputnode, b1_b1, [('2db1map_file', 'in_file')]),
                (inputnode, b1_mag, [('2db1map_file', 'in_file')]),
                (inputnode, b1_reg, [('ref_file', 'in_file')]),
                (inputnode, b1_apply, [('ref_file', 'reference')]),
                (b1_mag, b1_reg, [('roi_file', 'reference')]),
                (b1_reg, b1_invert, [('out_matrix_file', 'in_file')]),
                (b1_invert, b1_apply, [('out_file', 'in_matrix_file')]),
                (b1_b1, b1_filter, [('roi_file', 'in_file')]),
                (b1_filter, b1_apply, [('out_file', 'in_file')]),
                (b1_apply, b1_scale, [('out_file', 'in_file')]),
                (b1_scale, outputnode, [('out_file', 'b1_plus')])])
    if rf_pulse:
        b1_rf = Node(RFProfile(rf=rf_pulse, out_file='b1_rf.nii.gz'),
                     name='b1_rf')
        wf.connect([(b1_scale, b1_rf, [('out_file', 'in_file')]),
                    (b1_rf, outputnode, [('out_file', 'b1_pulse')])])
    return wf
def run(output_dir: str, pipeline_name: str, fmri_file: str, conf_raw: str,
        conf_json: str):
    pipeline = load_pipeline_from_json(get_pipeline_path(pipeline_name))
    workflow = Workflow(name="test_workflow", base_dir=output_dir)
    conf_node = Node(Confounds(pipeline=pipeline,
                               conf_raw=conf_raw,
                               conf_json=conf_json,
                               subject="test",
                               task="test",
                               session="test",
                               output_dir=output_dir),
                     name="Confprep")
    denoising_node = Node(Denoise(pipeline=pipeline,
                                  task="test",
                                  output_dir=output_dir),
                          name="Denoise")
    if not is_IcaAROMA(pipeline):
        smoothing_node = Node(Smooth(fmri_prep=fmri_file,
                                     output_directory=output_dir),
                              name="Smooth")
        workflow.connect([(smoothing_node, denoising_node, [("fmri_smoothed",
                                                             "fmri_prep")])])
    else:
        denoising_node.inputs.fmri_prep_aroma = fmri_file
    workflow.connect([(conf_node, denoising_node, [("conf_prep", "conf_prep")])
                      ])
    workflow.run()
Exemple #4
0
def workflow_corr_ieeg_fmri(PARAMETERS, FREESURFER_PATH):

    input = Node(IdentityInterface(
        fields=['subject', 'T1w', 'bold', 'ieeg', 'electrodes']),
                 name='input')

    output = Node(function_corr, name='output')
    output.inputs.pvalue = PARAMETERS['corr']['pvalue']

    w_fmri = workflow_fmri(PARAMETERS['fmri'], FREESURFER_PATH)
    w_ieeg = workflow_ieeg(PARAMETERS['ieeg'])

    w = Workflow('grvx')

    w.connect(input, 'ieeg', w_ieeg, 'input.ieeg')
    w.connect(input, 'electrodes', w_ieeg, 'input.electrodes')

    w.connect(input, 'subject', w_fmri, 'input.subject')
    w.connect(input, 'T1w', w_fmri, 'input.T1w')
    w.connect(input, 'bold', w_fmri, 'input.bold')
    w.connect(input, 'electrodes', w_fmri, 'input.electrodes')

    w.connect(w_ieeg, 'ecog_compare.tsv_compare', output, 'ecog_file')
    w.connect(w_fmri, 'at_elec.fmri_vals', output, 'fmri_file')

    return w
Exemple #5
0
def workflow_ieeg(PARAMETERS):

    input = Node(IdentityInterface(fields=['ieeg', 'electrodes']), name='input')

    node_read = Node(function_ieeg_read, name='read')
    node_read.inputs.conditions = PARAMETERS['read']['conditions']
    node_read.inputs.minimalduration = PARAMETERS['read']['minimalduration']

    node_preprocess = MapNode(function_ieeg_preprocess, name='preprocess', iterfield=['ieeg', ])
    node_preprocess.inputs.duration = PARAMETERS['preprocess']['duration']
    node_preprocess.inputs.reref = PARAMETERS['preprocess']['reref']
    node_preprocess.inputs.offset = PARAMETERS['preprocess']['offset']

    node_frequency = MapNode(function_ieeg_powerspectrum, name='powerspectrum', iterfield=['ieeg', ])
    node_frequency.inputs.method = PARAMETERS['powerspectrum']['method']
    node_frequency.inputs.taper = PARAMETERS['powerspectrum']['taper']
    node_frequency.inputs.duration = PARAMETERS['powerspectrum']['duration']

    node_compare = Node(function_ieeg_compare, name='ecog_compare')
    node_compare.inputs.frequency = PARAMETERS['ecog_compare']['frequency']
    node_compare.inputs.baseline = PARAMETERS['ecog_compare']['baseline']
    node_compare.inputs.method = PARAMETERS['ecog_compare']['method']
    node_compare.inputs.measure = PARAMETERS['ecog_compare']['measure']

    w = Workflow('ieeg')

    w.connect(input, 'ieeg', node_read, 'ieeg')
    w.connect(input, 'electrodes', node_read, 'electrodes')
    w.connect(node_read, 'ieeg', node_preprocess, 'ieeg')
    w.connect(node_preprocess, 'ieeg', node_frequency, 'ieeg')
    w.connect(node_frequency, 'ieeg', node_compare, 'in_files')

    return w
Exemple #6
0
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['T1_skullstrip'])
        self.set_output(
            ['affine_anat_2_atlas', 'warp_anat_2_atlas', 'anat_atlas'])

        # define datasink substitutions
        self.set_subs([
            ('_calc_calc_calc_calc_calc', ''),
            ('_Warped', '_atlas'),
        ])
        self.set_resubs([('sub-(?P<subject>\w+_)', 'anat/sub-\g<subject>'
                          )  # place file under anat folder
                         ])

        # create the output name for the registration
        self.create_prefix = Node(Function(input_names=['filename'],
                                           output_names=['basename'],
                                           function=get_prefix),
                                  name='create_prefix')

        # Register to Atlas
        self.register = Node(
            ants.RegistrationSynQuick(num_threads=settings['num_threads']),
            name='atlasregister')
        self.register.inputs.fixed_image = set_atlas_path(
            settings['atlas'])  # get atlas image
        self.register.n_procs = settings['num_threads']
Exemple #7
0
def create_templates_2func_workflow(threshold=0.5,
                                    name='templates_2func_workflow'):
    templates_2func_workflow = Workflow(name=name)

    # Input Node
    inputspec = Node(utility.IdentityInterface(fields=[
        'func_file',
        'premat',
        'warp',
        'templates',
    ]),
                     name='inputspec')

    # Get the overal EPI to MNI warp
    func_2mni_warp = Node(fsl.ConvertWarp(), name='func_2mni_warp')
    func_2mni_warp.inputs.reference = fsl.Info.standard_image(
        'MNI152_T1_2mm.nii.gz')

    # Calculate the inverse warp
    mni_2func_warp = Node(fsl.InvWarp(), name='mni_2func_warp')

    # Transform MNI templates to EPI space
    templates_2func_apply = MapNode(fsl.ApplyWarp(),
                                    iterfield=['in_file'],
                                    name='templates_2func_apply')

    # Threshold templates
    templates_threshold = MapNode(
        fsl.ImageMaths(op_string='-thr {0} -bin'.format(threshold)),
        iterfield=['in_file'],
        name='templates_threshold')

    # Output Node
    outputspec = Node(utility.IdentityInterface(
        fields=['templates_2func_files', 'func_2mni_warp']),
                      name='outputspec')

    # Connect the workflow nodes
    templates_2func_workflow.connect(inputspec, 'premat', func_2mni_warp,
                                     'premat')
    templates_2func_workflow.connect(inputspec, 'warp', func_2mni_warp,
                                     'warp1')
    templates_2func_workflow.connect(inputspec, 'func_file', mni_2func_warp,
                                     'reference')
    templates_2func_workflow.connect(func_2mni_warp, 'out_file',
                                     mni_2func_warp, 'warp')
    templates_2func_workflow.connect(inputspec, 'templates',
                                     templates_2func_apply, 'in_file')
    templates_2func_workflow.connect(inputspec, 'func_file',
                                     templates_2func_apply, 'ref_file')
    templates_2func_workflow.connect(mni_2func_warp, 'inverse_warp',
                                     templates_2func_apply, 'field_file')
    templates_2func_workflow.connect(templates_2func_apply, 'out_file',
                                     templates_threshold, 'in_file')
    templates_2func_workflow.connect(func_2mni_warp, 'out_file', outputspec,
                                     'func_2mni_warp')
    templates_2func_workflow.connect(templates_threshold, 'out_file',
                                     outputspec, 'templates_2func_files')

    return templates_2func_workflow
Exemple #8
0
def create_workflow_hrfpattern_7T(glm='spm'):
    input_node = Node(IdentityInterface(fields=[
        'bold',
        'events',
        't2star_fov',
        't2star_whole',
        't1w',
    ]),
                      name='input')

    coreg_tstat = Node(interface=FLIRT(), name='realign_result_to_anat')
    coreg_tstat.inputs.apply_xfm = True

    w = Workflow('hrf_7T')

    w_preproc = create_workflow_preproc_spm()
    if glm == 'spm':
        w_hrfpattern = create_workflow_hrfpattern_spm()
    elif glm == 'fsl':
        w_hrfpattern = create_workflow_hrfpattern_fsl()
    w_coreg = create_workflow_coreg_epi2t1w()

    w.connect(input_node, 'bold', w_preproc, 'input.bold')
    w.connect(input_node, 'events', w_hrfpattern, 'input.events')
    w.connect(input_node, 't2star_fov', w_coreg, 'input.t2star_fov')
    w.connect(input_node, 't2star_whole', w_coreg, 'input.t2star_whole')
    w.connect(input_node, 't1w', w_coreg, 'input.t1w')
    w.connect(input_node, 't1w', coreg_tstat, 'reference')
    w.connect(w_preproc, 'realign.realigned_files', w_hrfpattern, 'input.bold')
    w.connect(w_preproc, 'realign.mean_image', w_coreg, 'input.bold_mean')

    w.connect(w_hrfpattern, 'output.T_image', coreg_tstat, 'in_file')
    w.connect(w_coreg, 'output.mat_epi2t1w', coreg_tstat, 'in_matrix_file')

    return w
Exemple #9
0
def create_workflow_to_resample_baw_files(name="ResampleBAWOutputs"):
    """
    This function...
    :param name:
    :return:
    """
    workflow = Workflow(name)
    inputs_to_resample = ["t1_file", "t2_file", "hncma_file", "abc_file"]
    other_inputs = ["reference_file", "acpc_transform"]
    label_maps = ["hncma_file", "abc_file"]
    input_spec = Node(IdentityInterface(inputs_to_resample + other_inputs),
                      name="input_spec")
    output_spec = Node(IdentityInterface(inputs_to_resample),
                       name="output_spec")
    for input in inputs_to_resample:
        node = Node(BRAINSResample(), "Resample_{0}".format(input))
        node.inputs.pixelType = "short"
        node.inputs.inverseTransform = True
        node.inputs.outputVolume = input + ".nii.gz"
        if input in label_maps:
            node.inputs.interpolationMode = "NearestNeighbor"
        workflow.connect([
            (input_spec, node, [("reference_file", "referenceVolume"),
                                ("acpc_transform", "warpTransform"),
                                ("{0}".format(input), "inputVolume")]),
            (node, output_spec, [("outputVolume", "{0}".format(input))])
        ])
    return workflow
Exemple #10
0
def init_mpm_wf(me_params, mtsat_params):
    inputnode = Node(IdentityInterface(fields=['pdw_file', 't1w_file', 'mtw_file',
                                               'pdw_cal', 't1w_cal', 'mtw_cal']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['pd_map', 'r1_map', 'r2s_map', 'mtsat_map']),
                      name='outputnode')

    bet = Node(BET(mask=True, no_output=True), name='brain_mask')
    mpm = Node(MPMR2s(sequence=me_params, verbose=True), name='MPM_R2s')
    mtsat = Node(MTSat(sequence=mtsat_params, verbose=True), name='MPM_MTSat')

    wf = Workflow(name='Multi-Parametric-Mapping')
    wf.connect([(inputnode, bet, [('t1w_file', 'in_file')]),
                (inputnode, mpm, [('pdw_file', 'pdw_file'),
                                  ('t1w_file', 't1w_file'),
                                  ('mtw_file', 'mtw_file')]),
                (bet, mpm, [('mask_file', 'mask_file')]),
                (mpm, mtsat, [('s0_pdw', 'pdw_file'),
                              ('s0_t1w', 't1w_file'),
                              ('s0_mtw', 'mtw_file')]),
                (bet, mtsat, [('mask_file', 'mask_file')]),
                (mpm, outputnode, [('r2s_map', 'r2s_map')]),
                (mtsat, outputnode, [('s0_map', 'pd_map'),
                                     ('r1_map', 'r1_map'),
                                     ('delta_map', 'mtsat_map')])])
    return wf
Exemple #11
0
def workflow_ieeg(parameters):
    node_read = Node(function_ieeg_read, name='read')
    node_read.inputs.active_conditions = parameters['ieeg']['read']['active_conditions']
    node_read.inputs.baseline_conditions = parameters['ieeg']['read']['baseline_conditions']
    node_read.inputs.minimalduration = parameters['ieeg']['read']['minimalduration']

    node_preprocess = MapNode(function_ieeg_preprocess, name='preprocess', iterfield=['ieeg', ])
    node_preprocess.inputs.duration = parameters['ieeg']['preprocess']['duration']
    node_preprocess.inputs.reref = parameters['ieeg']['preprocess']['reref']
    node_preprocess.inputs.offset = parameters['ieeg']['preprocess']['offset']

    node_frequency = MapNode(function_ieeg_powerspectrum, name='powerspectrum', iterfield=['ieeg', ])
    node_frequency.inputs.method = parameters['ieeg']['powerspectrum']['method']
    node_frequency.inputs.taper = parameters['ieeg']['powerspectrum']['taper']
    node_frequency.inputs.halfbandwidth = parameters['ieeg']['powerspectrum']['halfbandwidth']
    node_frequency.inputs.duration = parameters['ieeg']['powerspectrum']['duration']

    node_compare = Node(function_ieeg_compare, name='ecog_compare')
    node_compare.iterables = (
        'frequency', parameters['ieeg']['ecog_compare']['frequency_bands'],
        )
    node_compare.inputs.baseline = parameters['ieeg']['ecog_compare']['baseline']
    node_compare.inputs.method = parameters['ieeg']['ecog_compare']['method']
    node_compare.inputs.measure = parameters['ieeg']['ecog_compare']['measure']

    node_compare_allfreq = Node(function_ieeg_compare_allfreq, name='ecog_compare_allfreq')

    w = Workflow('ieeg')

    w.connect(node_read, 'ieeg', node_preprocess, 'ieeg')
    w.connect(node_preprocess, 'ieeg', node_frequency, 'ieeg')
    w.connect(node_frequency, 'ieeg', node_compare, 'in_files')
    w.connect(node_frequency, 'ieeg', node_compare_allfreq, 'in_files')

    return w
Exemple #12
0
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['refimg', 'T1_skullstrip'])
        self.set_output(['affine_func_2_anat', 'warp_func_2_anat'])

        # define datasink substitutions
        self.set_subs([
            ('_calc_calc_calc_calc_calc', ''),
            ('_roi', '_reference'),
            ('_unwarped_Warped', '_unwarped'),
            ('_masked_calc', '_skullstrip'),
            ('_Warped', '_anat'),
        ])

        # Skullstrip the EPI image
        self.epi_skullstrip = Node(fsl.BET(), name='epi_skullstrip')
        self.epi_automask = Node(afni.Automask(args='-overwrite',
                                               outputtype='NIFTI_GZ'),
                                 name='epi_automask')
        self.epi_3dcalc = Node(afni.Calc(expr='c*or(a,b)',
                                         overwrite=True,
                                         outputtype='NIFTI_GZ'),
                               name='epi_3dcalc')

        # create the output name for the registration
        self.create_prefix = Node(Function(input_names=['filename'],
                                           output_names=['basename'],
                                           function=get_prefix),
                                  name='create_prefix')

        # align func to anat
        self.align_func_2_anat = Node(ants.Registration(
            num_threads=settings['num_threads'],
            collapse_output_transforms=False,
            initial_moving_transform_com=1,
            write_composite_transform=True,
            initialize_transforms_per_stage=True,
            transforms=['Rigid', 'Affine'],
            transform_parameters=[(0.1, ), (0.1, )],
            metric=['MI', 'MI'],
            metric_weight=[1, 1],
            radius_or_number_of_bins=[32, 32],
            sampling_strategy=['Regular', 'Regular'],
            sampling_percentage=[0.25, 0.25],
            convergence_threshold=[1.e-6, 1.e-8],
            convergence_window_size=[10, 10],
            smoothing_sigmas=[[3, 2, 1, 0], [2, 1, 0]],
            sigma_units=['vox', 'vox'],
            shrink_factors=[[8, 4, 2, 1], [4, 2, 1]],
            number_of_iterations=[[1000, 500, 250, 100], [500, 250, 100]],
            use_estimate_learning_rate_once=[False, True],
            use_histogram_matching=False,
            verbose=True,
            output_warped_image=True),
                                      name='align_func_2_anat')
        self.align_func_2_anat.n_procs = settings['num_threads']
Exemple #13
0
def create_bbregister_workflow(name="bbregister",
                               contrast_type="t2",
                               partial_brain=False,
                               init_with="fsl"):
    """Find a linear transformation to align the EPI file with the anatomy."""
    in_fields = ["subject_id", "timeseries"]
    if partial_brain:
        in_fields.append("whole_brain_template")
    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Take the mean over time to get a target volume
    meanvol = MapNode(fsl.MeanImage(), "in_file", "meanvol")

    # Do a rough skullstrip using BET
    skullstrip = MapNode(fsl.BET(), "in_file", "bet")

    # Estimate the registration to Freesurfer conformed space
    func2anat = MapNode(
        fs.BBRegister(contrast_type=contrast_type,
                      init=init_with,
                      epi_mask=True,
                      registered_file=True,
                      out_reg_file="func2anat_tkreg.dat",
                      out_fsl_file="func2anat_flirt.mat"), "source_file",
        "func2anat")

    # Make an image for quality control on the registration
    report = MapNode(CoregReport(), "in_file", "coreg_report")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["tkreg_mat", "flirt_mat", "report"]),
                      "outputs")

    bbregister = Workflow(name=name)

    # Connect the registration
    bbregister.connect([
        (inputnode, func2anat, [("subject_id", "subject_id")]),
        (inputnode, report, [("subject_id", "subject_id")]),
        (inputnode, meanvol, [("timeseries", "in_file")]),
        (meanvol, skullstrip, [("out_file", "in_file")]),
        (skullstrip, func2anat, [("out_file", "source_file")]),
        (func2anat, report, [("registered_file", "in_file")]),
        (func2anat, outputnode, [("out_reg_file", "tkreg_mat")]),
        (func2anat, outputnode, [("out_fsl_file", "flirt_mat")]),
        (report, outputnode, [("out_file", "report")]),
    ])

    # Possibly connect the full_fov image
    if partial_brain:
        bbregister.connect([
            (inputnode, func2anat, [("whole_brain_template",
                                     "intermediate_file")]),
        ])

    return bbregister
def get_ants_cmd_normalize_T1_MNI ():
    """Prepare Workflow to 
    Parameters
    ----------
    
    Returns
    -------
 
    """
    from os import path, environ
    from nipype import Workflow, Node
    from nipype.interfaces import utility 
    from nipype.interfaces.base import CommandLine
    from nipype.interfaces.io import DataGrabber

     #Defines workflow
    wf=Workflow(name='Normalize_Struct2MNI_cmd', base_dir='');
       
    #Setting INPUT node...
    node_input = Node(utility.IdentityInterface(fields=[
        'T1_img',
        'MNI_ref_img',
    ]),
    name='input_node')   
    
    # Reading command file (including ants-registration parameters, not including --metric)
    with open("T12mni_ants_command.txt") as file:  
        cmd = file.read()
        print(cmd)
         
    
    node_T12mni_cmd = Node(CommandLine(
        command= 'antsRegistration',
        environ={'DISPLAY': ':1'}
    ),
    name='T12mni_cmd_node')

    node_output = Node(utility.IdentityInterface(fields=[
        'struct2MNI_warp', 
        'struct2MNI_img'
    ]),
    name='output_node') 
    
    wf.connect([
#inputs         
                (node_input, node_grabber, [("T1_img", "arg2")]),
                (node_input, node_grabber, [("MNI_ref_img", "arg1")]),
#connections
                (node_grabber, node_T12mni_cmd, [("", "args")]),               
#yeld relevant data to output node
                (node_T12mni , node_output, [("composite_transform", "struct2MNI_warp")]),
                (node_T12mni, node_output,[("warped_image","struct2MNI_img")]),               
    ])
                                 
    return(wf)
Exemple #15
0
def get_ants_normalize_T1_MNI():
    """Prepare Workflow to 
    Parameters
    ----------
    
    Returns
    -------
 
    """
    from os import path, environ
    from nipype import Workflow, Node
    from nipype.interfaces import ants, utility

    #Defines workflow
    wf = Workflow(name='Normalize_Struct2MNI', base_dir='')

    #Setting INPUT node...
    node_input = Node(utility.IdentityInterface(fields=[
        'T1_img',
        'MNI_ref_img',
    ]),
                      name='input_node')

    # ¿Se dan los inputs así?
    node_T12mni = Node(ants.Registration(
        transforms=['Rigid', 'Affine', 'SyN'],
        shrink_factors=[[8, 4, 2, 1], [8, 4, 2, 1], [8, 4, 2, 1]],
        smoothing_sigmas=[[3, 2, 1, 0], [3, 2, 1, 0], [3, 2, 1, 0]],
        radius_or_number_of_bins=[32] * 3,
        metric=['MI'] * 3,
        transform_parameters=[(0.1, ), (0.1, ), (0.1, 3, 0)],
        number_of_iterations=[[1000, 500, 250, 100], [1000, 500, 250, 100],
                              [1000, 500, 250, 100]],
        write_composite_transform=True,
        metric_weight=[1] * 3,
    ),
                       name='T12mni_node')

    # Se recomienda usar apply
    node_output = Node(utility.IdentityInterface(
        fields=['struct2MNI_warp', 'struct2MNI_img']),
                       name='output_node')

    wf.connect([
        #inputs
        (node_input, node_T12mni, [("T1_img", "moving_image")]),
        (node_input, node_T12mni, [("MNI_ref_img", "fixed_image")]),
        #yeld relevant data to output node
        (node_T12mni, node_output, [("composite_transform", "struct2MNI_warp")]
         ),
        (node_T12mni, node_output, [("warped_image", "struct2MNI_img")]),
    ])

    return (wf)
Exemple #16
0
def create_reg_workflow(name="reg",
                        space="mni",
                        regtype="model",
                        method="fsl",
                        residual=False,
                        cross_exp=False):
    """Flexibly register files into one of several common spaces."""

    # Define the input fields flexibly
    if regtype == "model":
        fields = ["copes", "varcopes", "sumsquares"]
    elif regtype == "timeseries":
        fields = ["timeseries"]

    if cross_exp:
        fields.extend(["first_rigid"])

    fields.extend(["means", "masks", "rigids"])

    if space == "mni":
        fields.extend(["affine", "warpfield"])
    else:
        fields.extend(["tkreg_rigid"])

    inputnode = Node(IdentityInterface(fields), "inputnode")

    # Grap the correct interface class dynamically
    interface_name = "{}{}Registration".format(space.upper(),
                                               regtype.capitalize())
    reg_interface = globals()[interface_name]
    transform = Node(reg_interface(method=method), "transform")

    # Sanity check on inputs
    if regtype == "model" and residual:
        raise ValueError("residual and regtype=model does not make sense")

    # Set the kind of timeseries
    if residual:
        transform.inputs.residual = True

    outputnode = Node(IdentityInterface(["out_files"]), "outputnode")

    # Define the workflow
    regflow = Workflow(name=name)

    # Connect the inputs programatically
    for field in fields:
        regflow.connect(inputnode, field, transform, field)

    # The transform node only ever has one output
    regflow.connect(transform, "out_files", outputnode, "out_files")

    return regflow, inputnode, outputnode
Exemple #17
0
def define_workflow(subject_list, run_list, experiment_dir, output_dir):
    """run the smooth workflow given subject and runs"""
    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=4, t_size=-1, output_type='NIFTI'),
                   name="extract")

    # Smooth - image smoothing
    smooth = Node(Smooth(fwhm=[8, 8, 8]), name="smooth")

    # Mask - applying mask to smoothed
    # mask_func = Node(ApplyMask(output_type='NIFTI'),
    # name="mask_func")

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id', 'run_num']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list),
                            ('run_num', run_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    func_file = opj(
        'sub-{subject_id}', 'func',
        'sub-{subject_id}_task-tsl_run-{run_num}_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz'
    )
    templates = {'func': func_file}
    selectfiles = Node(SelectFiles(templates, base_directory=data_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    ## Use the following DataSink output substitutions
    substitutions = [('_subject_id_', 'sub-'), ('ssub', 'sub'),
                     ('_space-MNI152NLin2009cAsym_desc-preproc_', '_fwhm-8_'),
                     ('_fwhm_', ''), ('_roi', '')]
    substitutions += [('_run_num_%s' % r, '') for r in run_list]
    datasink.inputs.substitutions = substitutions

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the preprocessing workflow (spm smooth)
    preproc.connect([(infosource, selectfiles, [('subject_id', 'subject_id'),
                                                ('run_num', 'run_num')]),
                     (selectfiles, extract, [('func', 'in_file')]),
                     (extract, smooth, [('roi_file', 'in_files')]),
                     (smooth, datasink, [('smoothed_files', 'preproc.@smooth')
                                         ])])
    return preproc
Exemple #18
0
def workflow_spec(name="{workflow_name}", exp_info=None):
    """Return a Nipype workflow for MR processing.

    Parameters
    ----------
    name : string
        workflow object name
    exp_info : dict
        dictionary with experimental information
    """
    workflow = Workflow(name)

    if exp_info is None:
        exp_info = fitz.default_experiment_parameters()

    # Define the inputs for the preprocessing workflow
    in_fields = [""]  # "timeseries"]

    inputnode = Node(IdentityInterface(in_fields), "inputs")
    """
    # Define Actual Nipype Nodes, Workflows, etc.
    # e.g. The start of an example SPM preproc workflow
    # --------------------------------------------------

    slicetiming = pe.Node(interface=spm.SliceTiming(), name="slicetiming")
    slicetiming.inputs.ref_slice = 1
    realign = pe.Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True
    """
    workflow.connect([
        """
        (inputnode, slicetiming,
            [('timeseries', 'in_files')]),
        (slicetiming, realign,
            [('timecorrected_files', 'in_files')]),
        """
    ])

    output_fields = [""]  # realigned_files", "realignment_parameters"]

    outputnode = Node(IdentityInterface(output_fields), "outputs")

    workflow.connect([
        """
        (realign, outputnode,
            [("realigned_files", "realigned_files"),
             ("realignment_parameters", "realignment_parameters")]),
        """
    ])

    # Return the workflow itself and input and output nodes.
    return workflow, inputnode, outputnode
Exemple #19
0
def create_machine_learning_workflow(name="CreateEdgeProbabilityMap",
                                     resample=True,
                                     plugin_args=None):
    """
    This function...
    :param name:
    :param resample:
    :param plugin_args:
    :return:
    """
    workflow = Workflow(name)
    input_spec = Node(IdentityInterface([
        "rho", "phi", "theta", "posteriors", "t1_file", "acpc_transform",
        "gm_classifier_file", "wm_classifier_file"
    ]),
                      name="input_spec")

    predict_edge_probability = Node(PredictEdgeProbability(),
                                    name="PredictEdgeProbability")
    if plugin_args:
        predict_edge_probability.plugin_args = plugin_args
    workflow.connect([(input_spec, predict_edge_probability,
                       [("t1_file", "t1_file"),
                        ("gm_classifier_file", "gm_classifier_file"),
                        ("wm_classifier_file", "wm_classifier_file")])])

    if resample:
        collect_features = Node(CollectFeatureFiles(),
                                name="CollectFeatureFiles")
        collect_features.inputs.inverse_transform = True
        workflow.connect([(input_spec, collect_features,
                           [("rho", "rho"), ("phi", "phi"), ("theta", "theta"),
                            ("posteriors", "posterior_files"),
                            ("t1_file", "reference_file"),
                            ("acpc_transform", "transform_file")])])

        workflow.connect([(collect_features, predict_edge_probability,
                           [("feature_files", "additional_files")])])
    else:
        print("workflow not yet created")
        # TODO: create workflow that does not resample the input images
        return

    output_spec = Node(IdentityInterface(
        ["gm_probability_map", "wm_probability_map"]),
                       name="output_spec")
    workflow.connect(predict_edge_probability, "gm_edge_probability",
                     output_spec, "gm_probability_map")
    workflow.connect(predict_edge_probability, "wm_edge_probability",
                     output_spec, "wm_probability_map")

    return workflow
Exemple #20
0
    def test_execute(self, lyman_dir, execdir):

        info = frontend.info(lyman_dir=lyman_dir)

        def f(x):
            return x**2

        assert f(2) == 4

        n1 = Node(Function("x", "y", f), "n1")
        n2 = Node(Function("x", "y", f), "n2")

        wf = Workflow("test", base_dir=info.cache_dir)
        wf.connect(n1, "y", n2, "x")
        wf.inputs.n1.x = 2

        cache_dir = execdir.join("cache").join("test")

        class args(object):
            graph = False
            n_procs = 1
            debug = False
            clear_cache = True
            execute = True

        frontend.execute(wf, args, info)
        assert not cache_dir.exists()

        args.debug = True
        frontend.execute(wf, args, info)
        assert cache_dir.exists()

        args.debug = False
        info.remove_cache = False
        frontend.execute(wf, args, info)
        assert cache_dir.exists()

        args.execute = False
        res = frontend.execute(wf, args, info)
        assert res is None

        args.execute = True
        fname = str(execdir.join("graph").join("workflow.dot"))
        args.graph = fname
        res = frontend.execute(wf, args, info)
        assert res == fname[:-4] + ".svg"

        args.graph = True
        args.stage = "preproc"
        res = frontend.execute(wf, args, info)
        assert res == cache_dir.join("preproc.svg")
Exemple #21
0
def create_slicetime_workflow(name="slicetime",
                              TR=2,
                              slice_order="up",
                              interleaved=False):

    inputnode = Node(IdentityInterface(["timeseries"]), "inputs")

    if isinstance(interleaved, str) and interleaved.lower() == "siemens":

        sliceorder = MapNode(SiemensSliceOrder(), "in_file", "sliceorder")
        slicetimer_set_interleaved = False
        slicetimer_iterfields = ["in_file", "custom_order"]

    elif isinstance(interleaved, bool):

        sliceorder = None
        slicetimer_set_interleaved = interleaved
        slicetimer_iterfields = ["in_file"]

    else:

        raise ValueError("interleaved must be True, False, or 'siemens'")

    slicetimer = MapNode(fsl.SliceTimer(time_repetition=TR),
                         slicetimer_iterfields, "slicetime")

    if slicetimer_set_interleaved:
        slicetimer.inputs.interleaved = True

    if slice_order == "down":
        slicetimer.inputs.index_dir = True
    elif slice_order != "up":
        raise ValueError("slice_order must be 'up' or 'down'")

    outputnode = Node(IdentityInterface(["timeseries"]), "outputs")

    slicetime = Workflow(name)
    slicetime.connect([
        (inputnode, slicetimer, [("timeseries", "in_file")]),
        (slicetimer, outputnode, [("slice_time_corrected_file", "timeseries")
                                  ]),
    ])

    if sliceorder is not None:
        slicetime.connect([
            (inputnode, sliceorder, [("timeseries", "in_file")]),
            (sliceorder, slicetimer, [("out_file", "custom_order")]),
        ])

    return slicetime
Exemple #22
0
def embed_metadata_from_dicoms(bids_options, item_dicoms, outname,
                               outname_bids, prov_file, scaninfo, tempdirs,
                               with_prov):
    """
    Enhance sidecar information file with more information from DICOMs

    Parameters
    ----------
    bids_options
    item_dicoms
    outname
    outname_bids
    prov_file
    scaninfo
    tempdirs
    with_prov

    Returns
    -------

    """
    from nipype import Node, Function
    tmpdir = tempdirs(prefix='embedmeta')

    # We need to assure that paths are absolute if they are relative
    item_dicoms = list(map(op.abspath, item_dicoms))

    embedfunc = Node(Function(input_names=[
        'dcmfiles',
        'niftifile',
        'infofile',
        'bids_info',
    ],
                              function=embed_dicom_and_nifti_metadata),
                     name='embedder')
    embedfunc.inputs.dcmfiles = item_dicoms
    embedfunc.inputs.niftifile = op.abspath(outname)
    embedfunc.inputs.infofile = op.abspath(scaninfo)
    embedfunc.inputs.bids_info = load_json(
        op.abspath(outname_bids)) if (bids_options is not None) else None
    embedfunc.base_dir = tmpdir
    cwd = os.getcwd()

    lgr.debug("Embedding into %s based on dicoms[0]=%s for nifti %s", scaninfo,
              item_dicoms[0], outname)
    try:
        if op.lexists(scaninfo):
            # TODO: handle annexed file case
            if not op.islink(scaninfo):
                set_readonly(scaninfo, False)
        res = embedfunc.run()
        set_readonly(scaninfo)
        if with_prov:
            g = res.provenance.rdf()
            g.parse(prov_file, format='turtle')
            g.serialize(prov_file, format='turtle')
            set_readonly(prov_file)
    except Exception as exc:
        lgr.error("Embedding failed: %s", str(exc))
        os.chdir(cwd)
def register_t1_2_standard_node(metric, metric_weight, transforms,
                                smoothing_sigmas, shrink_factors, number_of_iterations,
                                transform_parameters):
    
    reg = Node(ants.Registration(metric=metric,
                      metric_weight=metric_weight,
                      transforms=transforms,
                      smoothing_sigmas=smoothing_sigmas,
                      shrink_factors=shrink_factors,
                      number_of_iterations=number_of_iterations,
                      transform_parameters = transform_parameters,
                      radius_or_number_of_bins = [32]*3,
                      output_transform_prefix = "output_",
                      dimension = 3,
                      write_composite_transform = True,
                      collapse_output_transforms = False,
                      initialize_transforms_per_stage = False,
                      sampling_strategy = ['Random', 'Random', None],
                      sampling_percentage = [0.05, 0.05, None],
                      convergence_threshold = [1.e-8,1.e-9,1.e-10],
                      convergence_window_size = [20]*3,
                      sigma_units = ['vox']*3,
                      output_warped_image = 'output_warped_image.nii.gz'),
               name='registration_node')

    
    return(reg)
Exemple #24
0
def run_freesurfer(subject_id, T1_images, subjects_dir, T2_image=None):
    """Run freesurfer, convert to nidm and extract stats
    """
    from nipype import freesurfer as fs
    from nipype import Node
    from fs_dir_to_graph import to_graph
    from query_convert_fs_stats import get_collections, process_collection

    recon = Node(fs.ReconAll(), name='recon')
    recon.inputs.T1_files = T1_images
    recon.inputs.subject_id = subject_id
    recon.inputs.subjects_dir = subjects_dir
    recon.inputs.openmp = 4
    if T2_image:
        recon.inputs.T2_file = T2_image
    recon.base_dir = os.path.abspath(os.path.join('working', subject_id))

    results = recon.run()
    provgraph = results.provenance
    newgraph = to_graph(
        os.path.join(results.outputs.subjects_dir, results.outputs.subject_id))
    provgraph.add_bundle(newgraph)
    provgraph.rdf().serialize('test1.ttl', format='turtle')
    results = get_collections(provgraph.rdf())
    collections = []
    for row in results:
        collections.append(str(row[0]))
    if len(collections) > 1:
        raise ValueError('More than one freesurfer directory collection found')
    provgraph, termsrdf = process_collection(provgraph, collections.pop())
    rdfgraph = provgraph.rdf() + termsrdf
    return provgraph, rdfgraph
Exemple #25
0
    def register_T1_to_simnibs(self):

        ### run flirt registartion if it has not been run before
        dest_img = os.path.join(self.mesh_dir, 'm2m_' + self.subject,
                                'T1fs_conform.nii.gz')
        if not os.path.exists(
                os.path.join(self.wf_base_dir, 'T1_to_simnibs_registration')):

            flirt = Node(FLIRT(), name='flirt')
            flirt.inputs.in_file = os.path.join(self.mesh_dir,
                                                'm2m_' + self.subject,
                                                'T1fs.nii.gz')
            flirt.inputs.reference = dest_img
            flirt.inputs.out_file = 'T1_in_Simnibs.nii.gz'
            flirt.inputs.out_matrix_file = 'T12Simnibs.mat'
            flirt.inputs.searchr_x = [-180, 180]
            flirt.inputs.searchr_y = [-180, 180]
            flirt.inputs.searchr_z = [-180, 180]

            wf = Workflow(name='T1_to_simnibs_registration',
                          base_dir=self.wf_base_dir)
            wf.add_nodes([flirt])
            wf.run()

        ## path to registration file
        t12simnibs_reg = os.path.join(self.wf_base_dir,
                                      'T1_to_simnibs_registration', 'flirt',
                                      'T12Simnibs.mat')

        return t12simnibs_reg
Exemple #26
0
def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir):
    """ """
    import nipype
    if with_prov:
        from nipype import config
        config.enable_provenance()
    from nipype import Node
    from nipype.interfaces.dcm2nii import Dcm2niix

    item_dicoms = list(map(op.abspath, item_dicoms))  # absolute paths

    dicom_dir = op.dirname(item_dicoms[0]) if item_dicoms else None

    convertnode = Node(Dcm2niix(), name='convert')
    convertnode.base_dir = tmpdir
    convertnode.inputs.source_dir = dicom_dir
    convertnode.inputs.out_filename = op.basename(op.dirname(prefix))

    if nipype.__version__.split('.')[0] == '0':
        # deprecated since 1.0, might be needed(?) before
        convertnode.inputs.terminal_output = 'allatonce'
    else:
        convertnode.terminal_output = 'allatonce'
    convertnode.inputs.bids_format = bids
    eg = convertnode.run()

    # prov information
    prov_file = prefix + '_prov.ttl' if with_prov else None
    if prov_file:
        safe_copyfile(
            op.join(convertnode.base_dir, convertnode.name, 'provenance.ttl'),
            prov_file)

    return eg, prov_file
def NodeJoinFeatures():
    node = Node(Function(
        function=joinFeatures,
        input_names=["data", "prefix", "output_dir", "confName", "kindConn"],
        output_names=["graphFeatures"]),
                name="JoinFeatures")
    return node
def NodePandasAdj2Nx():

    node = Node(Function(function=pandasAdj2Nx,
                         input_names=["df"],
                         output_names=["graph"]),
                name="Pandas2Graph")
    return node
Exemple #29
0
def run_bet(
        skip_existing: bool = True
):
    full_pattern = os.path.join(DATA_DIR, PATTERN)
    scans = glob.iglob(full_pattern, recursive=True)
    for scan in scans:
        print(f'\nCurrent series: {scan}')
        if skip_existing:
            print('Checking for existing skull-stripping output...', end='\t')
        dest = get_default_destination(scan)
        if skip_existing and os.path.isfile(dest):
            print(f'\u2714')
            continue
        print(f'\u2718')
        print('Running skull-stripping with BET...')
        try:
            bet = Node(BET(robust=True), name='bet_node')
            bet.inputs.in_file = scan
            bet.inputs.out_file = dest
            bet.run()
            print(f'\u2714\tDone!')
        except Exception as e:
            print(f'\u2718')
            print(e.args)
            break
Exemple #30
0
def create_workflow_temporalpatterns_7T(subjects, runs):


    input_node = Node(IdentityInterface(fields=[
        'bold',
        'events',
        't2star_fov',
        't2star_whole',
        't1w',
        ]), name='input')

    coreg_tstat = MapNode(
        interface=FLIRT(), name='realign_result_to_anat',
        iterfield=['in_file', ])
    coreg_tstat.inputs.apply_xfm = True

    w = Workflow('temporalpatterns_7T')

    w_preproc = create_workflow_preproc_spm()
    w_spatialobject = create_workflow_temporalpatterns_fsl()
    w_coreg = create_workflow_coreg_epi2t1w()

    w.connect(input_node, 'bold', w_preproc, 'input.bold')
    w.connect(input_node, 'events', w_spatialobject, 'input.events')
    w.connect(input_node, 't2star_fov', w_coreg, 'input.t2star_fov')
    w.connect(input_node, 't2star_whole', w_coreg, 'input.t2star_whole')
    w.connect(input_node, 't1w', w_coreg, 'input.t1w')
    w.connect(input_node, 't1w', coreg_tstat, 'reference')
    w.connect(w_preproc, 'realign.realigned_files', w_spatialobject, 'input.bold')
    w.connect(w_preproc, 'realign.mean_image', w_coreg, 'input.bold_mean')

    w.connect(w_spatialobject, 'output.T_image', coreg_tstat, 'in_file')
    w.connect(w_coreg, 'output.mat_epi2t1w', coreg_tstat, 'in_matrix_file')

    return w