Esempio n. 1
0
def workflow_ieeg(parameters):
    node_read = Node(function_ieeg_read, name='read')
    node_read.inputs.active_conditions = parameters['ieeg']['read']['active_conditions']
    node_read.inputs.baseline_conditions = parameters['ieeg']['read']['baseline_conditions']
    node_read.inputs.minimalduration = parameters['ieeg']['read']['minimalduration']

    node_preprocess = MapNode(function_ieeg_preprocess, name='preprocess', iterfield=['ieeg', ])
    node_preprocess.inputs.duration = parameters['ieeg']['preprocess']['duration']
    node_preprocess.inputs.reref = parameters['ieeg']['preprocess']['reref']
    node_preprocess.inputs.offset = parameters['ieeg']['preprocess']['offset']

    node_frequency = MapNode(function_ieeg_powerspectrum, name='powerspectrum', iterfield=['ieeg', ])
    node_frequency.inputs.method = parameters['ieeg']['powerspectrum']['method']
    node_frequency.inputs.taper = parameters['ieeg']['powerspectrum']['taper']
    node_frequency.inputs.halfbandwidth = parameters['ieeg']['powerspectrum']['halfbandwidth']
    node_frequency.inputs.duration = parameters['ieeg']['powerspectrum']['duration']

    node_compare = Node(function_ieeg_compare, name='ecog_compare')
    node_compare.iterables = (
        'frequency', parameters['ieeg']['ecog_compare']['frequency_bands'],
        )
    node_compare.inputs.baseline = parameters['ieeg']['ecog_compare']['baseline']
    node_compare.inputs.method = parameters['ieeg']['ecog_compare']['method']
    node_compare.inputs.measure = parameters['ieeg']['ecog_compare']['measure']

    node_compare_allfreq = Node(function_ieeg_compare_allfreq, name='ecog_compare_allfreq')

    w = Workflow('ieeg')

    w.connect(node_read, 'ieeg', node_preprocess, 'ieeg')
    w.connect(node_preprocess, 'ieeg', node_frequency, 'ieeg')
    w.connect(node_frequency, 'ieeg', node_compare, 'in_files')
    w.connect(node_frequency, 'ieeg', node_compare_allfreq, 'in_files')

    return w
Esempio n. 2
0
def run_freesurfer(subject_id, T1_images, subjects_dir, T2_image=None):
    """Run freesurfer, convert to nidm and extract stats
    """
    from nipype import freesurfer as fs
    from nipype import Node
    from fs_dir_to_graph import to_graph
    from query_convert_fs_stats import get_collections, process_collection

    recon = Node(fs.ReconAll(), name='recon')
    recon.inputs.T1_files = T1_images
    recon.inputs.subject_id = subject_id
    recon.inputs.subjects_dir = subjects_dir
    recon.inputs.openmp = 4
    if T2_image:
        recon.inputs.T2_file = T2_image
    recon.base_dir = os.path.abspath(os.path.join('working', subject_id))

    results = recon.run()
    provgraph = results.provenance
    newgraph = to_graph(
        os.path.join(results.outputs.subjects_dir, results.outputs.subject_id))
    provgraph.add_bundle(newgraph)
    provgraph.rdf().serialize('test1.ttl', format='turtle')
    results = get_collections(provgraph.rdf())
    collections = []
    for row in results:
        collections.append(str(row[0]))
    if len(collections) > 1:
        raise ValueError('More than one freesurfer directory collection found')
    provgraph, termsrdf = process_collection(provgraph, collections.pop())
    rdfgraph = provgraph.rdf() + termsrdf
    return provgraph, rdfgraph
Esempio n. 3
0
def workflow_corr_ieeg_fmri(PARAMETERS, FREESURFER_PATH):

    input = Node(IdentityInterface(
        fields=['subject', 'T1w', 'bold', 'ieeg', 'electrodes']),
                 name='input')

    output = Node(function_corr, name='output')
    output.inputs.pvalue = PARAMETERS['corr']['pvalue']

    w_fmri = workflow_fmri(PARAMETERS['fmri'], FREESURFER_PATH)
    w_ieeg = workflow_ieeg(PARAMETERS['ieeg'])

    w = Workflow('grvx')

    w.connect(input, 'ieeg', w_ieeg, 'input.ieeg')
    w.connect(input, 'electrodes', w_ieeg, 'input.electrodes')

    w.connect(input, 'subject', w_fmri, 'input.subject')
    w.connect(input, 'T1w', w_fmri, 'input.T1w')
    w.connect(input, 'bold', w_fmri, 'input.bold')
    w.connect(input, 'electrodes', w_fmri, 'input.electrodes')

    w.connect(w_ieeg, 'ecog_compare.tsv_compare', output, 'ecog_file')
    w.connect(w_fmri, 'at_elec.fmri_vals', output, 'fmri_file')

    return w
Esempio n. 4
0
def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir):
    """ """
    import nipype
    if with_prov:
        from nipype import config
        config.enable_provenance()
    from nipype import Node
    from nipype.interfaces.dcm2nii import Dcm2niix

    item_dicoms = list(map(op.abspath, item_dicoms)) # absolute paths

    dicom_dir = op.dirname(item_dicoms[0]) if item_dicoms else None

    convertnode = Node(Dcm2niix(), name='convert')
    convertnode.base_dir = tmpdir
    convertnode.inputs.source_names = item_dicoms
    convertnode.inputs.out_filename = op.basename(op.dirname(prefix))

    if nipype.__version__.split('.')[0] == '0':
        # deprecated since 1.0, might be needed(?) before
        convertnode.inputs.terminal_output = 'allatonce'
    else:
        convertnode.terminal_output = 'allatonce'
    convertnode.inputs.bids_format = bids
    eg = convertnode.run()

    # prov information
    prov_file = prefix + '_prov.ttl' if with_prov else None
    if prov_file:
        safe_copyfile(op.join(convertnode.base_dir,
                              convertnode.name,
                              'provenance.ttl'),
                      prov_file)

    return eg, prov_file
Esempio n. 5
0
def init_b1_mcf(rf_pulse=None, scale=150):
    inputnode = Node(IdentityInterface(fields=['2db1map_file', 'ref_file']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['b1_plus', 'b1_pulse']),
                      name='outputnode')

    b1_b1 = Node(ExtractROI(t_min=0, t_size=1), name='b1_extract_b1')
    b1_filter = Node(Filter(filter_spec='Gauss,3.0'), name='b1_filter')
    b1_mag = Node(ExtractROI(t_min=1, t_size=1), name='b1_extract_mag')

    b1_reg = Node(FLIRT(out_file='b1mag_reg.nii.gz',
                        out_matrix_file='b1mag_reg.mat'),
                  name='b1_reg')
    b1_invert = Node(ConvertXFM(invert_xfm=True), name='b1_invert')
    b1_apply = Node(FLIRT(apply_xfm=True), name='b1_reg_apply')
    b1_scale = Node(ImageMaths(op_string='-div %f' % scale), name='b1_scale')

    wf = Workflow(name='b1_prep')
    wf.connect([(inputnode, b1_b1, [('2db1map_file', 'in_file')]),
                (inputnode, b1_mag, [('2db1map_file', 'in_file')]),
                (inputnode, b1_reg, [('ref_file', 'in_file')]),
                (inputnode, b1_apply, [('ref_file', 'reference')]),
                (b1_mag, b1_reg, [('roi_file', 'reference')]),
                (b1_reg, b1_invert, [('out_matrix_file', 'in_file')]),
                (b1_invert, b1_apply, [('out_file', 'in_matrix_file')]),
                (b1_b1, b1_filter, [('roi_file', 'in_file')]),
                (b1_filter, b1_apply, [('out_file', 'in_file')]),
                (b1_apply, b1_scale, [('out_file', 'in_file')]),
                (b1_scale, outputnode, [('out_file', 'b1_plus')])])
    if rf_pulse:
        b1_rf = Node(RFProfile(rf=rf_pulse, out_file='b1_rf.nii.gz'),
                     name='b1_rf')
        wf.connect([(b1_scale, b1_rf, [('out_file', 'in_file')]),
                    (b1_rf, outputnode, [('out_file', 'b1_pulse')])])
    return wf
Esempio n. 6
0
def run_freesurfer(subject_id, T1_images, subjects_dir, T2_image=None):
    """Run freesurfer, convert to nidm and extract stats
    """
    from nipype import freesurfer as fs
    from nipype import Node
    from fs_dir_to_graph import to_graph
    from query_convert_fs_stats import get_collections, process_collection

    recon = Node(fs.ReconAll(), name='recon')
    recon.inputs.T1_files = T1_images
    recon.inputs.subject_id = subject_id
    recon.inputs.subjects_dir = subjects_dir
    recon.inputs.openmp = 4
    if T2_image:
        recon.inputs.T2_file = T2_image
    recon.base_dir = os.path.abspath(os.path.join('working', subject_id))

    results = recon.run()
    provgraph = results.provenance
    newgraph = to_graph(os.path.join(results.outputs.subjects_dir,
                                     results.outputs.subject_id))
    provgraph.add_bundle(newgraph)
    provgraph.rdf().serialize('test1.ttl', format='turtle')
    results = get_collections(provgraph.rdf())
    collections = []
    for row in results:
        collections.append(str(row[0]))
    if len(collections) > 1:
        raise ValueError('More than one freesurfer directory collection found')
    provgraph, termsrdf = process_collection(provgraph, collections.pop())
    rdfgraph = provgraph.rdf() + termsrdf
    return provgraph, rdfgraph
Esempio n. 7
0
def workflow_ieeg(PARAMETERS):

    input = Node(IdentityInterface(fields=['ieeg', 'electrodes']), name='input')

    node_read = Node(function_ieeg_read, name='read')
    node_read.inputs.conditions = PARAMETERS['read']['conditions']
    node_read.inputs.minimalduration = PARAMETERS['read']['minimalduration']

    node_preprocess = MapNode(function_ieeg_preprocess, name='preprocess', iterfield=['ieeg', ])
    node_preprocess.inputs.duration = PARAMETERS['preprocess']['duration']
    node_preprocess.inputs.reref = PARAMETERS['preprocess']['reref']
    node_preprocess.inputs.offset = PARAMETERS['preprocess']['offset']

    node_frequency = MapNode(function_ieeg_powerspectrum, name='powerspectrum', iterfield=['ieeg', ])
    node_frequency.inputs.method = PARAMETERS['powerspectrum']['method']
    node_frequency.inputs.taper = PARAMETERS['powerspectrum']['taper']
    node_frequency.inputs.duration = PARAMETERS['powerspectrum']['duration']

    node_compare = Node(function_ieeg_compare, name='ecog_compare')
    node_compare.inputs.frequency = PARAMETERS['ecog_compare']['frequency']
    node_compare.inputs.baseline = PARAMETERS['ecog_compare']['baseline']
    node_compare.inputs.method = PARAMETERS['ecog_compare']['method']
    node_compare.inputs.measure = PARAMETERS['ecog_compare']['measure']

    w = Workflow('ieeg')

    w.connect(input, 'ieeg', node_read, 'ieeg')
    w.connect(input, 'electrodes', node_read, 'electrodes')
    w.connect(node_read, 'ieeg', node_preprocess, 'ieeg')
    w.connect(node_preprocess, 'ieeg', node_frequency, 'ieeg')
    w.connect(node_frequency, 'ieeg', node_compare, 'in_files')

    return w
Esempio n. 8
0
def run_bet(
        skip_existing: bool = True
):
    full_pattern = os.path.join(DATA_DIR, PATTERN)
    scans = glob.iglob(full_pattern, recursive=True)
    for scan in scans:
        print(f'\nCurrent series: {scan}')
        if skip_existing:
            print('Checking for existing skull-stripping output...', end='\t')
        dest = get_default_destination(scan)
        if skip_existing and os.path.isfile(dest):
            print(f'\u2714')
            continue
        print(f'\u2718')
        print('Running skull-stripping with BET...')
        try:
            bet = Node(BET(robust=True), name='bet_node')
            bet.inputs.in_file = scan
            bet.inputs.out_file = dest
            bet.run()
            print(f'\u2714\tDone!')
        except Exception as e:
            print(f'\u2718')
            print(e.args)
            break
Esempio n. 9
0
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['T1_skullstrip'])
        self.set_output(
            ['affine_anat_2_atlas', 'warp_anat_2_atlas', 'anat_atlas'])

        # define datasink substitutions
        self.set_subs([
            ('_calc_calc_calc_calc_calc', ''),
            ('_Warped', '_atlas'),
        ])
        self.set_resubs([('sub-(?P<subject>\w+_)', 'anat/sub-\g<subject>'
                          )  # place file under anat folder
                         ])

        # create the output name for the registration
        self.create_prefix = Node(Function(input_names=['filename'],
                                           output_names=['basename'],
                                           function=get_prefix),
                                  name='create_prefix')

        # Register to Atlas
        self.register = Node(
            ants.RegistrationSynQuick(num_threads=settings['num_threads']),
            name='atlasregister')
        self.register.inputs.fixed_image = set_atlas_path(
            settings['atlas'])  # get atlas image
        self.register.n_procs = settings['num_threads']
Esempio n. 10
0
def embed_metadata_from_dicoms(bids_options, item_dicoms, outname,
                               outname_bids, prov_file, scaninfo, tempdirs,
                               with_prov):
    """
    Enhance sidecar information file with more information from DICOMs

    Parameters
    ----------
    bids_options
    item_dicoms
    outname
    outname_bids
    prov_file
    scaninfo
    tempdirs
    with_prov

    Returns
    -------

    """
    from nipype import Node, Function
    tmpdir = tempdirs(prefix='embedmeta')

    # We need to assure that paths are absolute if they are relative
    item_dicoms = list(map(op.abspath, item_dicoms))

    embedfunc = Node(Function(input_names=[
        'dcmfiles',
        'niftifile',
        'infofile',
        'bids_info',
    ],
                              function=embed_dicom_and_nifti_metadata),
                     name='embedder')
    embedfunc.inputs.dcmfiles = item_dicoms
    embedfunc.inputs.niftifile = op.abspath(outname)
    embedfunc.inputs.infofile = op.abspath(scaninfo)
    embedfunc.inputs.bids_info = load_json(
        op.abspath(outname_bids)) if (bids_options is not None) else None
    embedfunc.base_dir = tmpdir
    cwd = os.getcwd()

    lgr.debug("Embedding into %s based on dicoms[0]=%s for nifti %s", scaninfo,
              item_dicoms[0], outname)
    try:
        if op.lexists(scaninfo):
            # TODO: handle annexed file case
            if not op.islink(scaninfo):
                set_readonly(scaninfo, False)
        res = embedfunc.run()
        set_readonly(scaninfo)
        if with_prov:
            g = res.provenance.rdf()
            g.parse(prov_file, format='turtle')
            g.serialize(prov_file, format='turtle')
            set_readonly(prov_file)
    except Exception as exc:
        lgr.error("Embedding failed: %s", str(exc))
        os.chdir(cwd)
Esempio n. 11
0
def nipype_convert(item_dicoms, prefix, with_prov, bids, tmpdir):
    """ """
    import nipype
    if with_prov:
        from nipype import config
        config.enable_provenance()
    from nipype import Node
    from nipype.interfaces.dcm2nii import Dcm2niix

    item_dicoms = list(map(op.abspath, item_dicoms))  # absolute paths

    dicom_dir = op.dirname(item_dicoms[0]) if item_dicoms else None

    convertnode = Node(Dcm2niix(), name='convert')
    convertnode.base_dir = tmpdir
    convertnode.inputs.source_dir = dicom_dir
    convertnode.inputs.out_filename = op.basename(op.dirname(prefix))

    if nipype.__version__.split('.')[0] == '0':
        # deprecated since 1.0, might be needed(?) before
        convertnode.inputs.terminal_output = 'allatonce'
    else:
        convertnode.terminal_output = 'allatonce'
    convertnode.inputs.bids_format = bids
    eg = convertnode.run()

    # prov information
    prov_file = prefix + '_prov.ttl' if with_prov else None
    if prov_file:
        safe_copyfile(
            op.join(convertnode.base_dir, convertnode.name, 'provenance.ttl'),
            prov_file)

    return eg, prov_file
Esempio n. 12
0
def create_workflow_to_resample_baw_files(name="ResampleBAWOutputs"):
    """
    This function...
    :param name:
    :return:
    """
    workflow = Workflow(name)
    inputs_to_resample = ["t1_file", "t2_file", "hncma_file", "abc_file"]
    other_inputs = ["reference_file", "acpc_transform"]
    label_maps = ["hncma_file", "abc_file"]
    input_spec = Node(IdentityInterface(inputs_to_resample + other_inputs),
                      name="input_spec")
    output_spec = Node(IdentityInterface(inputs_to_resample),
                       name="output_spec")
    for input in inputs_to_resample:
        node = Node(BRAINSResample(), "Resample_{0}".format(input))
        node.inputs.pixelType = "short"
        node.inputs.inverseTransform = True
        node.inputs.outputVolume = input + ".nii.gz"
        if input in label_maps:
            node.inputs.interpolationMode = "NearestNeighbor"
        workflow.connect([
            (input_spec, node, [("reference_file", "referenceVolume"),
                                ("acpc_transform", "warpTransform"),
                                ("{0}".format(input), "inputVolume")]),
            (node, output_spec, [("outputVolume", "{0}".format(input))])
        ])
    return workflow
Esempio n. 13
0
def init_mpm_wf(me_params, mtsat_params):
    inputnode = Node(IdentityInterface(fields=['pdw_file', 't1w_file', 'mtw_file',
                                               'pdw_cal', 't1w_cal', 'mtw_cal']),
                     name='inputnode')
    outputnode = Node(IdentityInterface(fields=['pd_map', 'r1_map', 'r2s_map', 'mtsat_map']),
                      name='outputnode')

    bet = Node(BET(mask=True, no_output=True), name='brain_mask')
    mpm = Node(MPMR2s(sequence=me_params, verbose=True), name='MPM_R2s')
    mtsat = Node(MTSat(sequence=mtsat_params, verbose=True), name='MPM_MTSat')

    wf = Workflow(name='Multi-Parametric-Mapping')
    wf.connect([(inputnode, bet, [('t1w_file', 'in_file')]),
                (inputnode, mpm, [('pdw_file', 'pdw_file'),
                                  ('t1w_file', 't1w_file'),
                                  ('mtw_file', 'mtw_file')]),
                (bet, mpm, [('mask_file', 'mask_file')]),
                (mpm, mtsat, [('s0_pdw', 'pdw_file'),
                              ('s0_t1w', 't1w_file'),
                              ('s0_mtw', 'mtw_file')]),
                (bet, mtsat, [('mask_file', 'mask_file')]),
                (mpm, outputnode, [('r2s_map', 'r2s_map')]),
                (mtsat, outputnode, [('s0_map', 'pd_map'),
                                     ('r1_map', 'r1_map'),
                                     ('delta_map', 'mtsat_map')])])
    return wf
Esempio n. 14
0
def create_templates_2func_workflow(threshold=0.5,
                                    name='templates_2func_workflow'):
    templates_2func_workflow = Workflow(name=name)

    # Input Node
    inputspec = Node(utility.IdentityInterface(fields=[
        'func_file',
        'premat',
        'warp',
        'templates',
    ]),
                     name='inputspec')

    # Get the overal EPI to MNI warp
    func_2mni_warp = Node(fsl.ConvertWarp(), name='func_2mni_warp')
    func_2mni_warp.inputs.reference = fsl.Info.standard_image(
        'MNI152_T1_2mm.nii.gz')

    # Calculate the inverse warp
    mni_2func_warp = Node(fsl.InvWarp(), name='mni_2func_warp')

    # Transform MNI templates to EPI space
    templates_2func_apply = MapNode(fsl.ApplyWarp(),
                                    iterfield=['in_file'],
                                    name='templates_2func_apply')

    # Threshold templates
    templates_threshold = MapNode(
        fsl.ImageMaths(op_string='-thr {0} -bin'.format(threshold)),
        iterfield=['in_file'],
        name='templates_threshold')

    # Output Node
    outputspec = Node(utility.IdentityInterface(
        fields=['templates_2func_files', 'func_2mni_warp']),
                      name='outputspec')

    # Connect the workflow nodes
    templates_2func_workflow.connect(inputspec, 'premat', func_2mni_warp,
                                     'premat')
    templates_2func_workflow.connect(inputspec, 'warp', func_2mni_warp,
                                     'warp1')
    templates_2func_workflow.connect(inputspec, 'func_file', mni_2func_warp,
                                     'reference')
    templates_2func_workflow.connect(func_2mni_warp, 'out_file',
                                     mni_2func_warp, 'warp')
    templates_2func_workflow.connect(inputspec, 'templates',
                                     templates_2func_apply, 'in_file')
    templates_2func_workflow.connect(inputspec, 'func_file',
                                     templates_2func_apply, 'ref_file')
    templates_2func_workflow.connect(mni_2func_warp, 'inverse_warp',
                                     templates_2func_apply, 'field_file')
    templates_2func_workflow.connect(templates_2func_apply, 'out_file',
                                     templates_threshold, 'in_file')
    templates_2func_workflow.connect(func_2mni_warp, 'out_file', outputspec,
                                     'func_2mni_warp')
    templates_2func_workflow.connect(templates_threshold, 'out_file',
                                     outputspec, 'templates_2func_files')

    return templates_2func_workflow
Esempio n. 15
0
def head_motion_correction(name='motion_correction'):
    workflow = Workflow(name)

    input_node = Node(
        niu.IdentityInterface(fields=['bold_file', 'raw_ref_image']),
        name='input')
    output_node = Node(niu.IdentityInterface(fields=['xforms', 'movpar_file']),
                       name='outputnode')

    mcflirt = Node(fsl.MCFLIRT(save_mats=True, save_plots=True),
                   name='mcflirt')

    fsl2itk = Node(MCFLIRT2ITK(), name='fsl2itk')

    normalize_motion = Node(NormalizeMotionParams(format='FSL'),
                            name="normalize_motion")

    workflow.connect([
        (input_node, mcflirt, [('raw_ref_image', 'ref_file'),
                               ('bold_file', 'in_file')]),
        (input_node, fsl2itk, [('raw_ref_image', 'in_source'),
                               ('raw_ref_image', 'in_reference')]),
        (mcflirt, fsl2itk, [('mat_file', 'in_files')]),
        (mcflirt, normalize_motion, [('par_file', 'in_file')]),
        (fsl2itk, output_node, [('out_file', 'xforms')]),
        (normalize_motion, output_node, [('out_file', 'movpar_file')]),
    ])

    return workflow
def run(output_dir: str, pipeline_name: str, fmri_file: str, conf_raw: str,
        conf_json: str):
    pipeline = load_pipeline_from_json(get_pipeline_path(pipeline_name))
    workflow = Workflow(name="test_workflow", base_dir=output_dir)
    conf_node = Node(Confounds(pipeline=pipeline,
                               conf_raw=conf_raw,
                               conf_json=conf_json,
                               subject="test",
                               task="test",
                               session="test",
                               output_dir=output_dir),
                     name="Confprep")
    denoising_node = Node(Denoise(pipeline=pipeline,
                                  task="test",
                                  output_dir=output_dir),
                          name="Denoise")
    if not is_IcaAROMA(pipeline):
        smoothing_node = Node(Smooth(fmri_prep=fmri_file,
                                     output_directory=output_dir),
                              name="Smooth")
        workflow.connect([(smoothing_node, denoising_node, [("fmri_smoothed",
                                                             "fmri_prep")])])
    else:
        denoising_node.inputs.fmri_prep_aroma = fmri_file
    workflow.connect([(conf_node, denoising_node, [("conf_prep", "conf_prep")])
                      ])
    workflow.run()
Esempio n. 17
0
def test_neuropythy_atlas():

    n = Node(function_neuropythy_atlas, 'atlas')
    n.base_dir = str(ANALYSIS_PATH)
    n.inputs.subject_id = 'sub-delft'
    n.inputs.subjects_dir = str(FREESURFER_PATH)
    n.run()
Esempio n. 18
0
def create_workflow_hrfpattern_7T(glm='spm'):
    input_node = Node(IdentityInterface(fields=[
        'bold',
        'events',
        't2star_fov',
        't2star_whole',
        't1w',
    ]),
                      name='input')

    coreg_tstat = Node(interface=FLIRT(), name='realign_result_to_anat')
    coreg_tstat.inputs.apply_xfm = True

    w = Workflow('hrf_7T')

    w_preproc = create_workflow_preproc_spm()
    if glm == 'spm':
        w_hrfpattern = create_workflow_hrfpattern_spm()
    elif glm == 'fsl':
        w_hrfpattern = create_workflow_hrfpattern_fsl()
    w_coreg = create_workflow_coreg_epi2t1w()

    w.connect(input_node, 'bold', w_preproc, 'input.bold')
    w.connect(input_node, 'events', w_hrfpattern, 'input.events')
    w.connect(input_node, 't2star_fov', w_coreg, 'input.t2star_fov')
    w.connect(input_node, 't2star_whole', w_coreg, 'input.t2star_whole')
    w.connect(input_node, 't1w', w_coreg, 'input.t1w')
    w.connect(input_node, 't1w', coreg_tstat, 'reference')
    w.connect(w_preproc, 'realign.realigned_files', w_hrfpattern, 'input.bold')
    w.connect(w_preproc, 'realign.mean_image', w_coreg, 'input.bold_mean')

    w.connect(w_hrfpattern, 'output.T_image', coreg_tstat, 'in_file')
    w.connect(w_coreg, 'output.mat_epi2t1w', coreg_tstat, 'in_matrix_file')

    return w
Esempio n. 19
0
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['refimg', 'T1_skullstrip'])
        self.set_output(['affine_func_2_anat', 'warp_func_2_anat'])

        # define datasink substitutions
        self.set_subs([
            ('_calc_calc_calc_calc_calc', ''),
            ('_roi', '_reference'),
            ('_unwarped_Warped', '_unwarped'),
            ('_masked_calc', '_skullstrip'),
            ('_Warped', '_anat'),
        ])

        # Skullstrip the EPI image
        self.epi_skullstrip = Node(fsl.BET(), name='epi_skullstrip')
        self.epi_automask = Node(afni.Automask(args='-overwrite',
                                               outputtype='NIFTI_GZ'),
                                 name='epi_automask')
        self.epi_3dcalc = Node(afni.Calc(expr='c*or(a,b)',
                                         overwrite=True,
                                         outputtype='NIFTI_GZ'),
                               name='epi_3dcalc')

        # create the output name for the registration
        self.create_prefix = Node(Function(input_names=['filename'],
                                           output_names=['basename'],
                                           function=get_prefix),
                                  name='create_prefix')

        # align func to anat
        self.align_func_2_anat = Node(ants.Registration(
            num_threads=settings['num_threads'],
            collapse_output_transforms=False,
            initial_moving_transform_com=1,
            write_composite_transform=True,
            initialize_transforms_per_stage=True,
            transforms=['Rigid', 'Affine'],
            transform_parameters=[(0.1, ), (0.1, )],
            metric=['MI', 'MI'],
            metric_weight=[1, 1],
            radius_or_number_of_bins=[32, 32],
            sampling_strategy=['Regular', 'Regular'],
            sampling_percentage=[0.25, 0.25],
            convergence_threshold=[1.e-6, 1.e-8],
            convergence_window_size=[10, 10],
            smoothing_sigmas=[[3, 2, 1, 0], [2, 1, 0]],
            sigma_units=['vox', 'vox'],
            shrink_factors=[[8, 4, 2, 1], [4, 2, 1]],
            number_of_iterations=[[1000, 500, 250, 100], [500, 250, 100]],
            use_estimate_learning_rate_once=[False, True],
            use_histogram_matching=False,
            verbose=True,
            output_warped_image=True),
                                      name='align_func_2_anat')
        self.align_func_2_anat.n_procs = settings['num_threads']
Esempio n. 20
0
def nipype_convert(item_dicoms,
                   prefix,
                   with_prov,
                   bids_options,
                   tmpdir,
                   dcmconfig=None):
    """
    Converts DICOMs grouped from heuristic using Nipype's Dcm2niix interface.

    Parameters
    ----------
    item_dicoms : List
        DICOM files to convert
    prefix : String
        Heuristic output path
    with_prov : Bool
        Store provenance information
    bids_options : List or None
        If not None then output BIDS sidecar JSONs
        List may contain bids specific options
    tmpdir : Directory
        Conversion working directory
    dcmconfig : File (optional)
        JSON file used for additional Dcm2niix configuration
    """
    import nipype
    if with_prov:
        from nipype import config
        config.enable_provenance()
    from nipype import Node
    from nipype.interfaces.dcm2nii import Dcm2niix

    item_dicoms = list(map(op.abspath, item_dicoms))  # absolute paths

    fromfile = dcmconfig if dcmconfig else None
    if fromfile:
        lgr.info("Using custom config file %s", fromfile)

    convertnode = Node(Dcm2niix(from_file=fromfile), name='convert')
    convertnode.base_dir = tmpdir
    convertnode.inputs.source_names = item_dicoms
    convertnode.inputs.out_filename = prefix

    if nipype.__version__.split('.')[0] == '0':
        # deprecated since 1.0, might be needed(?) before
        convertnode.inputs.terminal_output = 'allatonce'
    else:
        convertnode.terminal_output = 'allatonce'
    convertnode.inputs.bids_format = bids_options is not None
    eg = convertnode.run()

    # prov information
    prov_file = prefix + '_prov.ttl' if with_prov else None
    if prov_file:
        safe_copyfile(
            op.join(convertnode.base_dir, convertnode.name, 'provenance.ttl'),
            prov_file)

    return eg, prov_file
Esempio n. 21
0
def bids_node(parameters):
    bids = Node(BIDS, name='bids')
    bids.inputs.bids_dir = parameters['paths']['input']
    subjects = [
        sub.stem[4:] for sub in parameters['paths']['input'].glob('sub-*')
    ]
    bids.iterables = ('subject', subjects)
    return bids
Esempio n. 22
0
def embed_metadata_from_dicoms(bids, item_dicoms, outname, outname_bids,
                               prov_file, scaninfo, tempdirs, with_prov,
                               min_meta):
    """
    Enhance sidecar information file with more information from DICOMs

    Parameters
    ----------
    bids
    item_dicoms
    outname
    outname_bids
    prov_file
    scaninfo
    tempdirs
    with_prov
    min_meta

    Returns
    -------

    """
    from nipype import Node, Function
    tmpdir = tempdirs(prefix='embedmeta')

    embedfunc = Node(Function(input_names=[
        'dcmfiles', 'niftifile', 'infofile', 'bids_info', 'force', 'min_meta'
    ],
                              output_names=['outfile', 'meta'],
                              function=embed_nifti),
                     name='embedder')
    embedfunc.inputs.dcmfiles = item_dicoms
    embedfunc.inputs.niftifile = op.abspath(outname)
    embedfunc.inputs.infofile = op.abspath(scaninfo)
    embedfunc.inputs.min_meta = min_meta
    if bids:
        embedfunc.inputs.bids_info = load_json(op.abspath(outname_bids))
    else:
        embedfunc.inputs.bids_info = None
    embedfunc.inputs.force = True
    embedfunc.base_dir = tmpdir
    cwd = os.getcwd()
    try:
        if op.lexists(scaninfo):
            # TODO: handle annexed file case
            if not op.islink(scaninfo):
                set_readonly(scaninfo, False)
        res = embedfunc.run()
        set_readonly(scaninfo)
        if with_prov:
            g = res.provenance.rdf()
            g.parse(prov_file, format='turtle')
            g.serialize(prov_file, format='turtle')
            set_readonly(prov_file)
    except Exception as exc:
        lgr.error("Embedding failed: %s", str(exc))
        os.chdir(cwd)
Esempio n. 23
0
def create_bbregister_workflow(name="bbregister",
                               contrast_type="t2",
                               partial_brain=False,
                               init_with="fsl"):
    """Find a linear transformation to align the EPI file with the anatomy."""
    in_fields = ["subject_id", "timeseries"]
    if partial_brain:
        in_fields.append("whole_brain_template")
    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Take the mean over time to get a target volume
    meanvol = MapNode(fsl.MeanImage(), "in_file", "meanvol")

    # Do a rough skullstrip using BET
    skullstrip = MapNode(fsl.BET(), "in_file", "bet")

    # Estimate the registration to Freesurfer conformed space
    func2anat = MapNode(
        fs.BBRegister(contrast_type=contrast_type,
                      init=init_with,
                      epi_mask=True,
                      registered_file=True,
                      out_reg_file="func2anat_tkreg.dat",
                      out_fsl_file="func2anat_flirt.mat"), "source_file",
        "func2anat")

    # Make an image for quality control on the registration
    report = MapNode(CoregReport(), "in_file", "coreg_report")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["tkreg_mat", "flirt_mat", "report"]),
                      "outputs")

    bbregister = Workflow(name=name)

    # Connect the registration
    bbregister.connect([
        (inputnode, func2anat, [("subject_id", "subject_id")]),
        (inputnode, report, [("subject_id", "subject_id")]),
        (inputnode, meanvol, [("timeseries", "in_file")]),
        (meanvol, skullstrip, [("out_file", "in_file")]),
        (skullstrip, func2anat, [("out_file", "source_file")]),
        (func2anat, report, [("registered_file", "in_file")]),
        (func2anat, outputnode, [("out_reg_file", "tkreg_mat")]),
        (func2anat, outputnode, [("out_fsl_file", "flirt_mat")]),
        (report, outputnode, [("out_file", "report")]),
    ])

    # Possibly connect the full_fov image
    if partial_brain:
        bbregister.connect([
            (inputnode, func2anat, [("whole_brain_template",
                                     "intermediate_file")]),
        ])

    return bbregister
Esempio n. 24
0
def get_ants_cmd_normalize_T1_MNI ():
    """Prepare Workflow to 
    Parameters
    ----------
    
    Returns
    -------
 
    """
    from os import path, environ
    from nipype import Workflow, Node
    from nipype.interfaces import utility 
    from nipype.interfaces.base import CommandLine
    from nipype.interfaces.io import DataGrabber

     #Defines workflow
    wf=Workflow(name='Normalize_Struct2MNI_cmd', base_dir='');
       
    #Setting INPUT node...
    node_input = Node(utility.IdentityInterface(fields=[
        'T1_img',
        'MNI_ref_img',
    ]),
    name='input_node')   
    
    # Reading command file (including ants-registration parameters, not including --metric)
    with open("T12mni_ants_command.txt") as file:  
        cmd = file.read()
        print(cmd)
         
    
    node_T12mni_cmd = Node(CommandLine(
        command= 'antsRegistration',
        environ={'DISPLAY': ':1'}
    ),
    name='T12mni_cmd_node')

    node_output = Node(utility.IdentityInterface(fields=[
        'struct2MNI_warp', 
        'struct2MNI_img'
    ]),
    name='output_node') 
    
    wf.connect([
#inputs         
                (node_input, node_grabber, [("T1_img", "arg2")]),
                (node_input, node_grabber, [("MNI_ref_img", "arg1")]),
#connections
                (node_grabber, node_T12mni_cmd, [("", "args")]),               
#yeld relevant data to output node
                (node_T12mni , node_output, [("composite_transform", "struct2MNI_warp")]),
                (node_T12mni, node_output,[("warped_image","struct2MNI_img")]),               
    ])
                                 
    return(wf)
Esempio n. 25
0
def create_surface_projection_workflow(name="surfproj", exp_info=None):
    """Project the group mask and thresholded zstat file onto the surface."""
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["zstat_file", "mask_file"]), "inputs")

    # Sample the zstat image to the surface
    hemisource = Node(IdentityInterface(["mni_hemi"]), "hemisource")
    hemisource.iterables = ("mni_hemi", ["lh", "rh"])

    zstatproj = Node(freesurfer.SampleToSurface(
        sampling_method=exp_info["sampling_method"],
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        smooth_surf=exp_info["surf_smooth"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "zstatproj")

    # Sample the mask to the surface
    maskproj = Node(freesurfer.SampleToSurface(
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "maskproj")
    if exp_info["sampling_method"] == "point":
        maskproj.inputs.sampling_method = "point"
    else:
        maskproj.inputs.sampling_method = "max"

    outputnode = Node(IdentityInterface(["surf_zstat",
                                         "surf_mask"]), "outputs")

    # Define and connect the workflow
    proj = Workflow(name)
    proj.connect([
        (inputnode, zstatproj,
            [("zstat_file", "source_file")]),
        (inputnode, maskproj,
            [("mask_file", "source_file")]),
        (hemisource, zstatproj,
            [("mni_hemi", "hemi")]),
        (hemisource, maskproj,
            [("mni_hemi", "hemi")]),
        (zstatproj, outputnode,
            [("out_file", "surf_zstat")]),
        (maskproj, outputnode,
            [("out_file", "surf_mask")]),
        ])

    return proj
Esempio n. 26
0
def get_ants_normalize_T1_MNI():
    """Prepare Workflow to 
    Parameters
    ----------
    
    Returns
    -------
 
    """
    from os import path, environ
    from nipype import Workflow, Node
    from nipype.interfaces import ants, utility

    #Defines workflow
    wf = Workflow(name='Normalize_Struct2MNI', base_dir='')

    #Setting INPUT node...
    node_input = Node(utility.IdentityInterface(fields=[
        'T1_img',
        'MNI_ref_img',
    ]),
                      name='input_node')

    # ¿Se dan los inputs así?
    node_T12mni = Node(ants.Registration(
        transforms=['Rigid', 'Affine', 'SyN'],
        shrink_factors=[[8, 4, 2, 1], [8, 4, 2, 1], [8, 4, 2, 1]],
        smoothing_sigmas=[[3, 2, 1, 0], [3, 2, 1, 0], [3, 2, 1, 0]],
        radius_or_number_of_bins=[32] * 3,
        metric=['MI'] * 3,
        transform_parameters=[(0.1, ), (0.1, ), (0.1, 3, 0)],
        number_of_iterations=[[1000, 500, 250, 100], [1000, 500, 250, 100],
                              [1000, 500, 250, 100]],
        write_composite_transform=True,
        metric_weight=[1] * 3,
    ),
                       name='T12mni_node')

    # Se recomienda usar apply
    node_output = Node(utility.IdentityInterface(
        fields=['struct2MNI_warp', 'struct2MNI_img']),
                       name='output_node')

    wf.connect([
        #inputs
        (node_input, node_T12mni, [("T1_img", "moving_image")]),
        (node_input, node_T12mni, [("MNI_ref_img", "fixed_image")]),
        #yeld relevant data to output node
        (node_T12mni, node_output, [("composite_transform", "struct2MNI_warp")]
         ),
        (node_T12mni, node_output, [("warped_image", "struct2MNI_img")]),
    ])

    return (wf)
Esempio n. 27
0
def create_reg_workflow(name="reg",
                        space="mni",
                        regtype="model",
                        method="fsl",
                        residual=False,
                        cross_exp=False):
    """Flexibly register files into one of several common spaces."""

    # Define the input fields flexibly
    if regtype == "model":
        fields = ["copes", "varcopes", "sumsquares"]
    elif regtype == "timeseries":
        fields = ["timeseries"]

    if cross_exp:
        fields.extend(["first_rigid"])

    fields.extend(["means", "masks", "rigids"])

    if space == "mni":
        fields.extend(["affine", "warpfield"])
    else:
        fields.extend(["tkreg_rigid"])

    inputnode = Node(IdentityInterface(fields), "inputnode")

    # Grap the correct interface class dynamically
    interface_name = "{}{}Registration".format(space.upper(),
                                               regtype.capitalize())
    reg_interface = globals()[interface_name]
    transform = Node(reg_interface(method=method), "transform")

    # Sanity check on inputs
    if regtype == "model" and residual:
        raise ValueError("residual and regtype=model does not make sense")

    # Set the kind of timeseries
    if residual:
        transform.inputs.residual = True

    outputnode = Node(IdentityInterface(["out_files"]), "outputnode")

    # Define the workflow
    regflow = Workflow(name=name)

    # Connect the inputs programatically
    for field in fields:
        regflow.connect(inputnode, field, transform, field)

    # The transform node only ever has one output
    regflow.connect(transform, "out_files", outputnode, "out_files")

    return regflow, inputnode, outputnode
Esempio n. 28
0
def workflow_spec(name="{workflow_name}", exp_info=None):
    """Return a Nipype workflow for MR processing.

    Parameters
    ----------
    name : string
        workflow object name
    exp_info : dict
        dictionary with experimental information
    """
    workflow = Workflow(name)

    if exp_info is None:
        exp_info = fitz.default_experiment_parameters()

    # Define the inputs for the preprocessing workflow
    in_fields = [""]  # "timeseries"]

    inputnode = Node(IdentityInterface(in_fields), "inputs")
    """
    # Define Actual Nipype Nodes, Workflows, etc.
    # e.g. The start of an example SPM preproc workflow
    # --------------------------------------------------

    slicetiming = pe.Node(interface=spm.SliceTiming(), name="slicetiming")
    slicetiming.inputs.ref_slice = 1
    realign = pe.Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True
    """
    workflow.connect([
        """
        (inputnode, slicetiming,
            [('timeseries', 'in_files')]),
        (slicetiming, realign,
            [('timecorrected_files', 'in_files')]),
        """
    ])

    output_fields = [""]  # realigned_files", "realignment_parameters"]

    outputnode = Node(IdentityInterface(output_fields), "outputs")

    workflow.connect([
        """
        (realign, outputnode,
            [("realigned_files", "realigned_files"),
             ("realignment_parameters", "realignment_parameters")]),
        """
    ])

    # Return the workflow itself and input and output nodes.
    return workflow, inputnode, outputnode
Esempio n. 29
0
    def test_grab_entities_subjects(self):
        subject_outputs = [{'subject': '01'}, {'subject': '02'}, {'subject': '03'}, {'subject': '04'}, {'subject': '05'}, {'subject': '06'}, {'subject': '07'}, {'subject': '08'}, {'subject': '09'}, {'subject': '10'}, {'subject': '11'}, {'subject': '12'}, {'subject': '13'}]
        bl = Node(BIDSGrab(bids_dir=TEST_DATASETS['NEW_FMRIPREP_DUMMY']), name="TestBIDSGrab")
        outs = bl.run()
        bl_subjects = [{'subject': element['subject']} for element in outs.outputs.entities]
        self.assertEqual(bl_subjects, subject_outputs)


# if __name__ == '__main__':
#     import cProfile
#     bl = Node(BIDSGrab(bids_dir=TEST_DATASETS['NEW_FMRIPREP_DUMMY']), name="TestBIDSGrab")
#     bl.run()
    #   cProfile.run('bl.run()')
Esempio n. 30
0
    def test_execute(self, lyman_dir, execdir):

        info = frontend.info(lyman_dir=lyman_dir)

        def f(x):
            return x**2

        assert f(2) == 4

        n1 = Node(Function("x", "y", f), "n1")
        n2 = Node(Function("x", "y", f), "n2")

        wf = Workflow("test", base_dir=info.cache_dir)
        wf.connect(n1, "y", n2, "x")
        wf.inputs.n1.x = 2

        cache_dir = execdir.join("cache").join("test")

        class args(object):
            graph = False
            n_procs = 1
            debug = False
            clear_cache = True
            execute = True

        frontend.execute(wf, args, info)
        assert not cache_dir.exists()

        args.debug = True
        frontend.execute(wf, args, info)
        assert cache_dir.exists()

        args.debug = False
        info.remove_cache = False
        frontend.execute(wf, args, info)
        assert cache_dir.exists()

        args.execute = False
        res = frontend.execute(wf, args, info)
        assert res is None

        args.execute = True
        fname = str(execdir.join("graph").join("workflow.dot"))
        args.graph = fname
        res = frontend.execute(wf, args, info)
        assert res == fname[:-4] + ".svg"

        args.graph = True
        args.stage = "preproc"
        res = frontend.execute(wf, args, info)
        assert res == cache_dir.join("preproc.svg")
Esempio n. 31
0
def create_slicetime_workflow(name="slicetime",
                              TR=2,
                              slice_order="up",
                              interleaved=False):

    inputnode = Node(IdentityInterface(["timeseries"]), "inputs")

    if isinstance(interleaved, str) and interleaved.lower() == "siemens":

        sliceorder = MapNode(SiemensSliceOrder(), "in_file", "sliceorder")
        slicetimer_set_interleaved = False
        slicetimer_iterfields = ["in_file", "custom_order"]

    elif isinstance(interleaved, bool):

        sliceorder = None
        slicetimer_set_interleaved = interleaved
        slicetimer_iterfields = ["in_file"]

    else:

        raise ValueError("interleaved must be True, False, or 'siemens'")

    slicetimer = MapNode(fsl.SliceTimer(time_repetition=TR),
                         slicetimer_iterfields, "slicetime")

    if slicetimer_set_interleaved:
        slicetimer.inputs.interleaved = True

    if slice_order == "down":
        slicetimer.inputs.index_dir = True
    elif slice_order != "up":
        raise ValueError("slice_order must be 'up' or 'down'")

    outputnode = Node(IdentityInterface(["timeseries"]), "outputs")

    slicetime = Workflow(name)
    slicetime.connect([
        (inputnode, slicetimer, [("timeseries", "in_file")]),
        (slicetimer, outputnode, [("slice_time_corrected_file", "timeseries")
                                  ]),
    ])

    if sliceorder is not None:
        slicetime.connect([
            (inputnode, sliceorder, [("timeseries", "in_file")]),
            (sliceorder, slicetimer, [("out_file", "custom_order")]),
        ])

    return slicetime
Esempio n. 32
0
def run_bet(T1_image, workdir):
    """Run freesurfer, convert to nidm and extract stats
    """
    from nipype import fsl
    from nipype import Node
    from fs_dir_to_graph import to_graph
    from query_convert_fs_stats import get_collections, process_collection

    strip = Node(fsl.BET(), name='skullstripper')
    strip.inputs.in_file = T1_image
    strip.base_dir = workdir

    results = strip.run()
    provgraph = results.provenance
    return provgraph
Esempio n. 33
0
def rawdataChecker(input_file):
    # If the input is a single DCM-file instead of a multi-dim-NifTI, we have to fetch all the other files in the series
    if input_file.endswith('.dcm'):
        from nipype.interfaces.io import DataFinder
        from os import path
        from nipype import Node

        # Setup a datafinder to find the paths to the specific DICOM files
        t1FinderNode = Node(DataFinder(), name='t1Finder')
        t1FinderNode.inputs.match_regex = '.*\.dcm'
        t1FinderNode.inputs.root_paths = path.split(input_file)[0]

        return t1FinderNode.run().outputs.out_paths
    else:
        return input_file  # If other datatype just return the same path
Esempio n. 34
0
def rawdataChecker(input_file):
    # If the input is a single DCM-file instead of a multi-dim-NifTI, we have to fetch all the other files in the series
    if input_file.endswith('.dcm'):
        from nipype.interfaces.io import DataFinder
        from os import path
        from nipype import Node

        # Setup a datafinder to find the paths to the specific DICOM files
        t1FinderNode = Node(DataFinder(), name = 't1Finder')
        t1FinderNode.inputs.match_regex = '.*\.dcm'
        t1FinderNode.inputs.root_paths = path.split(input_file)[0]

        return t1FinderNode.run().outputs.out_paths
    else:
        return input_file  # If other datatype just return the same path
Esempio n. 35
0
def run_bet(T1_image, workdir):
    """Run freesurfer, convert to nidm and extract stats
    """
    from nipype import fsl
    from nipype import Node
    from fs_dir_to_graph import to_graph
    from query_convert_fs_stats import get_collections, process_collection

    strip = Node(fsl.BET(), name='skullstripper')
    strip.inputs.in_file = T1_image
    strip.base_dir = workdir

    results = strip.run()
    provgraph = results.provenance
    return provgraph
Esempio n. 36
0
def create_fs_reg_workflow(name="registration"):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

    ::

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space

    Example
    -------

    """

    register = Workflow(name=name)

    inputnode = Node(
        interface=IdentityInterface(
            fields=["source_files", "mean_image", "subject_id", "subjects_dir", "target_image"]
        ),
        name="inputspec",
    )

    outputnode = Node(
        interface=IdentityInterface(
            fields=[
                "func2anat_transform",
                "out_reg_file",
                "anat2target_transform",
                "transforms",
                "transformed_mean",
                "transformed_files",
                "min_cost_file",
                "anat2target",
                "aparc",
                "mean2anat_mask",
            ]
        ),
        name="outputspec",
    )

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(), name="fssource")
    fssource.run_without_submitting = True
    register.connect(inputnode, "subject_id", fssource, "subject_id")
    register.connect(inputnode, "subjects_dir", fssource, "subjects_dir")

    convert = Node(freesurfer.MRIConvert(out_type="nii"), name="convert")
    register.connect(fssource, "T1", convert, "in_file")

    # Coregister the median to the surface
    bbregister = Node(freesurfer.BBRegister(registered_file=True), name="bbregister")
    bbregister.inputs.init = "fsl"
    bbregister.inputs.contrast_type = "t2"
    bbregister.inputs.out_fsl_file = True
    bbregister.inputs.epi_mask = True
    register.connect(inputnode, "subject_id", bbregister, "subject_id")
    register.connect(inputnode, "mean_image", bbregister, "source_file")
    register.connect(inputnode, "subjects_dir", bbregister, "subjects_dir")

    # Create a mask of the median coregistered to the anatomical image
    mean2anat_mask = Node(fsl.BET(mask=True), name="mean2anat_mask")
    register.connect(bbregister, "registered_file", mean2anat_mask, "in_file")

    """
    use aparc+aseg's brain mask
    """

    binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), name="binarize_aparc")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, "in_file")

    stripper = Node(fsl.ApplyMask(), name="stripper")
    register.connect(binarize, "binary_file", stripper, "mask_file")
    register.connect(convert, "out_file", stripper, "in_file")

    """
    Apply inverse transform to aparc file
    """
    aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True, interp="nearest"), name="aparc_inverse_transform")
    register.connect(inputnode, "subjects_dir", aparcxfm, "subjects_dir")
    register.connect(bbregister, "out_reg_file", aparcxfm, "reg_file")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), aparcxfm, "target_file")
    register.connect(inputnode, "mean_image", aparcxfm, "source_file")

    """
    Convert the BBRegister transformation to ANTS ITK format
    """

    convert2itk = Node(C3dAffineTool(), name="convert2itk")
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    register.connect(bbregister, "out_fsl_file", convert2itk, "transform_file")
    register.connect(inputnode, "mean_image", convert2itk, "source_file")
    register.connect(stripper, "out_file", convert2itk, "reference_file")

    """
    Compute registration between the subject's structural and MNI template
    This is currently set to perform a very quick registration. However, the
    registration can be made significantly more accurate for cortical
    structures by increasing the number of iterations
    All parameters are set using the example from:
    #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    """

    reg = Node(ants.Registration(), name="antsRegister")
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ["Rigid", "Affine", "SyN"]
    reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.2, 3.0, 0.0)]
    reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[100, 30, 20]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = True
    reg.inputs.initial_moving_transform_com = True
    reg.inputs.metric = ["Mattes"] * 2 + [["Mattes", "CC"]]
    reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
    reg.inputs.sampling_strategy = ["Regular"] * 2 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
    reg.inputs.convergence_threshold = [1.0e-8] * 2 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 2 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ["vox"] * 3
    reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 3
    reg.inputs.use_histogram_matching = [False] * 2 + [True]
    reg.inputs.winsorize_lower_quantile = 0.005
    reg.inputs.winsorize_upper_quantile = 0.995
    reg.inputs.args = "--float"
    reg.inputs.output_warped_image = "output_warped_image.nii.gz"
    reg.inputs.num_threads = 4
    reg.plugin_args = {"qsub_args": "-pe orte 4", "sbatch_args": "--mem=6G -c 4"}
    register.connect(stripper, "out_file", reg, "moving_image")
    register.connect(inputnode, "target_image", reg, "fixed_image")

    """
    Concatenate the affine and ants transforms into a list
    """

    pickfirst = lambda x: x[0]

    merge = Node(Merge(2), iterfield=["in2"], name="mergexfm")
    register.connect(convert2itk, "itk_transform", merge, "in2")
    register.connect(reg, ("composite_transform", pickfirst), merge, "in1")

    """
    Transform the mean image. First to anatomical and then to target
    """
    warpmean = Node(ants.ApplyTransforms(), name="warpmean")
    warpmean.inputs.input_image_type = 0
    warpmean.inputs.interpolation = "Linear"
    warpmean.inputs.invert_transform_flags = [False, False]
    warpmean.inputs.terminal_output = "file"
    warpmean.inputs.args = "--float"
    # warpmean.inputs.num_threads = 4
    # warpmean.plugin_args = {'sbatch_args': '--mem=4G -c 4'}

    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = pe.MapNode(ants.ApplyTransforms(), iterfield=["input_image"], name="warpall")
    warpall.inputs.input_image_type = 0
    warpall.inputs.interpolation = "Linear"
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = "file"
    warpall.inputs.args = "--float"
    warpall.inputs.num_threads = 2
    warpall.plugin_args = {"sbatch_args": "--mem=6G -c 2"}

    """
    Assign all the output files
    """

    register.connect(warpmean, "output_image", outputnode, "transformed_mean")
    register.connect(warpall, "output_image", outputnode, "transformed_files")

    register.connect(inputnode, "target_image", warpmean, "reference_image")
    register.connect(inputnode, "mean_image", warpmean, "input_image")
    register.connect(merge, "out", warpmean, "transforms")
    register.connect(inputnode, "target_image", warpall, "reference_image")
    register.connect(inputnode, "source_files", warpall, "input_image")
    register.connect(merge, "out", warpall, "transforms")

    """
    Assign all the output files
    """

    register.connect(reg, "warped_image", outputnode, "anat2target")
    register.connect(aparcxfm, "transformed_file", outputnode, "aparc")
    register.connect(bbregister, "out_fsl_file", outputnode, "func2anat_transform")
    register.connect(bbregister, "out_reg_file", outputnode, "out_reg_file")
    register.connect(bbregister, "min_cost_file", outputnode, "min_cost_file")
    register.connect(mean2anat_mask, "mask_file", outputnode, "mean2anat_mask")
    register.connect(reg, "composite_transform", outputnode, "anat2target_transform")
    register.connect(merge, "out", outputnode, "transforms")

    return register
Esempio n. 37
0

# Perform the registration between subject T1 space and dwMRI space
bbregNode = Node(freesurfer.preprocess.BBRegister(), name = 'BBRegister')
bbregNode.inputs.init = "fsl"
bbregNode.inputs.contrast_type = "t2"
bbregNode.inputs.epi_mask = True
bbregNode.inputs.out_fsl_file = True
bbregNode.inputs.args = "--tol1d 1e-3"
#bbregNode.inputs.subject_id = reconallFolderName


# ### Surface2Vol

# Transform Left Hemisphere
surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name = 'surf2vol_lh')
surf2volNode_lh.inputs.hemi = 'lh'
surf2volNode_lh.inputs.mkmask = True
#surf2volNode_lh.inputs.subject_id = reconallFolderName
surf2volNode_lh.inputs.vertexvol_file = 'test'

# Transform right hemisphere
surf2volNode_rh = surf2volNode_lh.clone('surf2vol_rh')
surf2volNode_rh.inputs.hemi = 'rh'

# Merge the hemispheres
mergeHemisNode = Node(fsl.BinaryMaths(), name = 'mergeHemis')
mergeHemisNode.inputs.operation = 'add'
mergeHemisNode.inputs.output_type = 'NIFTI_GZ'

def group_onesample_openfmri(dataset_dir,model_id=None,task_id=None,l1output_dir=None,out_dir=None, no_reversal=False):

    wk = Workflow(name='one_sample')
    wk.base_dir = os.path.abspath(work_dir)

    info = Node(util.IdentityInterface(fields=['model_id','task_id','dataset_dir']),
                                        name='infosource')
    info.inputs.model_id=model_id
    info.inputs.task_id=task_id
    info.inputs.dataset_dir=dataset_dir
    
    num_copes=contrasts_num(model_id,task_id,dataset_dir)

    dg = Node(DataGrabber(infields=['model_id','task_id','cope_id'], 
                          outfields=['copes', 'varcopes']),name='grabber')
    dg.inputs.template = os.path.join(l1output_dir,'model%03d/task%03d/*/%scopes/mni/%scope%02d.nii.gz')
    dg.inputs.template_args['copes'] = [['model_id','task_id','', '', 'cope_id']]
    dg.inputs.template_args['varcopes'] = [['model_id','task_id','var', 'var', 'cope_id']]
    dg.iterables=('cope_id',num_copes)

    dg.inputs.sort_filelist = True

    wk.connect(info,'model_id',dg,'model_id')
    wk.connect(info,'task_id',dg,'task_id')

    model = Node(L2Model(), name='l2model')

    wk.connect(dg, ('copes', get_len), model, 'num_copes')

    mergecopes = Node(Merge(dimension='t'), name='merge_copes')
    wk.connect(dg, 'copes', mergecopes, 'in_files')

    mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
    wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')

    mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
    flame = Node(FLAMEO(), name='flameo')
    flame.inputs.mask_file =  mask_file
    flame.inputs.run_mode = 'flame1'

    wk.connect(model, 'design_mat', flame, 'design_file')
    wk.connect(model, 'design_con', flame, 't_con_file')
    wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
    wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
    wk.connect(model, 'design_grp', flame, 'cov_split_file')

    smoothest = Node(SmoothEstimate(), name='smooth_estimate') 
    wk.connect(flame, 'zstats', smoothest, 'zstat_file')
    smoothest.inputs.mask_file = mask_file

  
    cluster = Node(Cluster(), name='cluster')
    wk.connect(smoothest,'dlh', cluster, 'dlh')
    wk.connect(smoothest, 'volume', cluster, 'volume')
    cluster.inputs.connectivity = 26
    cluster.inputs.threshold=2.3
    cluster.inputs.pthreshold = 0.05
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_index_file = True
    cluster.inputs.out_localmax_txt_file = True

    wk.connect(flame, 'zstats', cluster, 'in_file')
	 
    ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                   name='z2pval')
    wk.connect(flame, 'zstats', ztopval,'in_file')
    
    

    sinker = Node(DataSink(), name='sinker')  
    sinker.inputs.base_directory = os.path.abspath(out_dir)
    sinker.inputs.substitutions = [('_cope_id', 'contrast'),
			            ('_maths__', '_reversed_')]
    
    wk.connect(flame, 'zstats', sinker, 'stats')
    wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
    wk.connect(cluster, 'index_file', sinker, 'stats.@index')
    wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')
    
    if no_reversal == False:
        zstats_reverse = Node( BinaryMaths()  , name='zstats_reverse')
        zstats_reverse.inputs.operation = 'mul'
        zstats_reverse.inputs.operand_value= -1
        wk.connect(flame, 'zstats', zstats_reverse, 'in_file')

        cluster2=cluster.clone(name='cluster2')
        wk.connect(smoothest,'dlh',cluster2,'dlh')
        wk.connect(smoothest,'volume',cluster2,'volume')
        wk.connect(zstats_reverse,'out_file',cluster2,'in_file')
   
        ztopval2 = ztopval.clone(name='ztopval2')
        wk.connect(zstats_reverse,'out_file',ztopval2,'in_file')

        wk.connect(zstats_reverse,'out_file',sinker,'stats.@neg')
        wk.connect(cluster2,'threshold_file',sinker,'stats.@neg_thr')
        wk.connect(cluster2,'index_file',sinker,'stats.@neg_index')
        wk.connect(cluster2,'localmax_txt_file',sinker,'stats.@neg_localmax')

    return wk
Esempio n. 39
0
                            radius_or_number_of_bins=[32, 32, 4],
                            sampling_percentage=[0.25, 0.25, 1],
                            sampling_strategy=['Regular', 'Regular', 'None'],
                            shrink_factors=[[8, 4, 2, 1]] * 3,
                            smoothing_sigmas=[[3, 2, 1, 0]] * 3,
                            transform_parameters=[(0.1,), (0.1,),
                                                  (0.1, 3.0, 0.0)],
                            use_histogram_matching=True,
                            write_composite_transform=True),
               name='antsreg')

###
# Input & Output Stream

# Infosource - a function free node to iterate over the list of subject names
infosource = Node(IdentityInterface(fields=['subject_id']),
                  name="infosource")
infosource.iterables = [('subject_id', subject_list)]

# SelectFiles - to grab the data (alternative to DataGrabber)
anat_file = opj('sub-{subject_id}', 'ses-test', 'anat', 'sub-{subject_id}_ses-test_T1w.nii.gz')
templates = {'anat': anat_file}

selectfiles = Node(SelectFiles(templates,
                               base_directory='/data/ds000114'),
                   name="selectfiles")

# Datasink - creates output folder for important outputs
datasink = Node(DataSink(base_directory=experiment_dir,
                         container=output_dir),
                name="datasink")
Esempio n. 40
0
def create_surface_ols_workflow(name="surface_group",
                                subject_list=None,
                                exp_info=None):
    """Workflow to project ffx copes onto surface and run ols."""
    if subject_list is None:
        subject_list = []
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["l1_contrast",
                                        "copes",
                                        "reg_file",
                                        "subject_id"]),
                     "inputnode")

    hemisource = Node(IdentityInterface(["hemi"]), "hemisource")
    hemisource.iterables = ("hemi", ["lh", "rh"])

    # Sample the volume-encoded native data onto the fsaverage surface
    # manifold with projection + spherical transform
    surfsample = MapNode(fs.SampleToSurface(
        sampling_method=exp_info["sampling_method"],
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        smooth_surf=exp_info["surf_smooth"],
        target_subject="fsaverage"),
        ["subject_id", "reg_file", "source_file"], "surfsample")

    # Remove subjects with completely empty images
    removeempty = Node(RemoveEmpty(), "removeempty")

    # Concatenate the subject files into a 4D image
    mergecope = Node(fs.Concatenate(), "mergecope")

    # Run the one-sample OLS model
    glmfit = Node(fs.GLMFit(one_sample=True,
                            surf=True,
                            cortex=True,
                            glm_dir="_glm_results",
                            subject_id="fsaverage"),
                  "glmfit")

    # Use the cached Monte-Carlo simulations for correction
    cluster = Node(Function(["y_file",
                             "glm_dir",
                             "sign",
                             "cluster_zthresh",
                             "p_thresh"],
                            ["glm_dir",
                             "thresholded_file"],
                            glm_corrections,
                            imports),
                   "cluster")
    cluster.inputs.cluster_zthresh = exp_info["cluster_zthresh"]
    cluster.inputs.p_thresh = exp_info["grf_pthresh"]
    cluster.inputs.sign = exp_info["surf_corr_sign"]

    # Return the outputs
    outputnode = Node(IdentityInterface(["glm_dir", "sig_file"]), "outputnode")

    # Define and connect the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, surfsample,
            [("copes", "source_file"),
             ("reg_file", "reg_file"),
             ("subject_id", "subject_id")]),
        (hemisource, surfsample,
            [("hemi", "hemi")]),
        (surfsample, removeempty,
            [("out_file", "in_files")]),
        (removeempty, mergecope,
            [("out_files", "in_files")]),
        (mergecope, glmfit,
            [("concatenated_file", "in_file")]),
        (hemisource, glmfit,
            [("hemi", "hemi")]),
        (mergecope, cluster,
            [("concatenated_file", "y_file")]),
        (glmfit, cluster,
            [("glm_dir", "glm_dir")]),
        (glmfit, outputnode,
            [("glm_dir", "glm_dir")]),
        (cluster, outputnode,
            [("thresholded_file", "sig_file")]),
        ])

    return group, inputnode, outputnode
Esempio n. 41
0
def embed_metadata_from_dicoms(bids, item_dicoms, outname, outname_bids,
                               prov_file, scaninfo, tempdirs, with_prov,
                               min_meta):
    """
    Enhance sidecar information file with more information from DICOMs

    Parameters
    ----------
    bids
    item_dicoms
    outname
    outname_bids
    prov_file
    scaninfo
    tempdirs
    with_prov
    min_meta

    Returns
    -------

    """
    from nipype import Node, Function
    tmpdir = tempdirs(prefix='embedmeta')

    # We need to assure that paths are absolute if they are relative
    item_dicoms = list(map(op.abspath, item_dicoms))

    embedfunc = Node(Function(input_names=['dcmfiles', 'niftifile', 'infofile',
                                           'bids_info', 'force', 'min_meta'],
                              output_names=['outfile', 'meta'],
                              function=embed_nifti),
                     name='embedder')
    embedfunc.inputs.dcmfiles = item_dicoms
    embedfunc.inputs.niftifile = op.abspath(outname)
    embedfunc.inputs.infofile = op.abspath(scaninfo)
    embedfunc.inputs.min_meta = min_meta
    if bids:
        embedfunc.inputs.bids_info = load_json(op.abspath(outname_bids))
    else:
        embedfunc.inputs.bids_info = None
    embedfunc.inputs.force = True
    embedfunc.base_dir = tmpdir
    cwd = os.getcwd()
    lgr.debug("Embedding into %s based on dicoms[0]=%s for nifti %s",
              scaninfo, item_dicoms[0], outname)
    try:
        if op.lexists(scaninfo):
            # TODO: handle annexed file case
            if not op.islink(scaninfo):
                set_readonly(scaninfo, False)
        res = embedfunc.run()
        set_readonly(scaninfo)
        if with_prov:
            g = res.provenance.rdf()
            g.parse(prov_file,
                    format='turtle')
            g.serialize(prov_file, format='turtle')
            set_readonly(prov_file)
    except Exception as exc:
        lgr.error("Embedding failed: %s", str(exc))
        os.chdir(cwd)
Esempio n. 42
0
def create_machine_learning_workflow(
    name="CreateEdgeProbabilityMap", resample=True, plugin_args=None
):
    """
    This function...

    :param name:
    :param resample:
    :param plugin_args:
    :return:
    """
    workflow = Workflow(name)
    input_spec = Node(
        IdentityInterface(
            [
                "rho",
                "phi",
                "theta",
                "posteriors",
                "t1_file",
                "acpc_transform",
                "gm_classifier_file",
                "wm_classifier_file",
            ]
        ),
        name="input_spec",
    )

    predict_edge_probability = Node(
        PredictEdgeProbability(), name="PredictEdgeProbability"
    )
    if plugin_args:
        predict_edge_probability.plugin_args = plugin_args
    workflow.connect(
        [
            (
                input_spec,
                predict_edge_probability,
                [
                    ("t1_file", "t1_file"),
                    ("gm_classifier_file", "gm_classifier_file"),
                    ("wm_classifier_file", "wm_classifier_file"),
                ],
            )
        ]
    )

    if resample:
        collect_features = Node(CollectFeatureFiles(), name="CollectFeatureFiles")
        collect_features.inputs.inverse_transform = True
        workflow.connect(
            [
                (
                    input_spec,
                    collect_features,
                    [
                        ("rho", "rho"),
                        ("phi", "phi"),
                        ("theta", "theta"),
                        ("posteriors", "posterior_files"),
                        ("t1_file", "reference_file"),
                        ("acpc_transform", "transform_file"),
                    ],
                )
            ]
        )

        workflow.connect(
            [
                (
                    collect_features,
                    predict_edge_probability,
                    [("feature_files", "additional_files")],
                )
            ]
        )
    else:
        print("workflow not yet created")
        # TODO: create workflow that does not resample the input images
        return

    output_spec = Node(
        IdentityInterface(["gm_probability_map", "wm_probability_map"]),
        name="output_spec",
    )
    workflow.connect(
        predict_edge_probability,
        "gm_edge_probability",
        output_spec,
        "gm_probability_map",
    )
    workflow.connect(
        predict_edge_probability,
        "wm_edge_probability",
        output_spec,
        "wm_probability_map",
    )

    return workflow
Esempio n. 43
0
def group_multregress_openfmri(dataset_dir, model_id=None, task_id=None, l1output_dir=None, out_dir=None, 
                               no_reversal=False, plugin=None, plugin_args=None, flamemodel='flame1',
                               nonparametric=False, use_spm=False):

    meta_workflow = Workflow(name='mult_regress')
    meta_workflow.base_dir = work_dir
    for task in task_id:
        task_name = get_taskname(dataset_dir, task)
        cope_ids = l1_contrasts_num(model_id, task_name, dataset_dir)
        regressors_needed, contrasts, groups, subj_list = get_sub_vars(dataset_dir, task_name, model_id)
        for idx, contrast in enumerate(contrasts):
            wk = Workflow(name='model_%03d_task_%03d_contrast_%s' % (model_id, task, contrast[0][0]))

            info = Node(util.IdentityInterface(fields=['model_id', 'task_id', 'dataset_dir', 'subj_list']),
                        name='infosource')
            info.inputs.model_id = model_id
            info.inputs.task_id = task
            info.inputs.dataset_dir = dataset_dir
            
            dg = Node(DataGrabber(infields=['model_id', 'task_id', 'cope_id'],
                                  outfields=['copes', 'varcopes']), name='grabber')
            dg.inputs.template = os.path.join(l1output_dir,
                                              'model%03d/task%03d/%s/%scopes/%smni/%scope%02d.nii%s')
            if use_spm:
                dg.inputs.template_args['copes'] = [['model_id', 'task_id', subj_list, '', 'spm/',
                                                     '', 'cope_id', '']]
                dg.inputs.template_args['varcopes'] = [['model_id', 'task_id', subj_list, 'var', 'spm/',
                                                        'var', 'cope_id', '.gz']]
            else:
                dg.inputs.template_args['copes'] = [['model_id', 'task_id', subj_list, '', '', '', 
                                                     'cope_id', '.gz']]
                dg.inputs.template_args['varcopes'] = [['model_id', 'task_id', subj_list, 'var', '',
                                                        'var', 'cope_id', '.gz']]
            dg.iterables=('cope_id', cope_ids)
            dg.inputs.sort_filelist = False

            wk.connect(info, 'model_id', dg, 'model_id')
            wk.connect(info, 'task_id', dg, 'task_id')

            model = Node(MultipleRegressDesign(), name='l2model')
            model.inputs.groups = groups
            model.inputs.contrasts = contrasts[idx]
            model.inputs.regressors = regressors_needed[idx]
            
            mergecopes = Node(Merge(dimension='t'), name='merge_copes')
            wk.connect(dg, 'copes', mergecopes, 'in_files')
            
            if flamemodel != 'ols':
                mergevarcopes = Node(Merge(dimension='t'), name='merge_varcopes')
                wk.connect(dg, 'varcopes', mergevarcopes, 'in_files')
            
            mask_file = fsl.Info.standard_image('MNI152_T1_2mm_brain_mask.nii.gz')
            flame = Node(FLAMEO(), name='flameo')
            flame.inputs.mask_file =  mask_file
            flame.inputs.run_mode = flamemodel
            #flame.inputs.infer_outliers = True

            wk.connect(model, 'design_mat', flame, 'design_file')
            wk.connect(model, 'design_con', flame, 't_con_file')
            wk.connect(mergecopes, 'merged_file', flame, 'cope_file')
            if flamemodel != 'ols':
                wk.connect(mergevarcopes, 'merged_file', flame, 'var_cope_file')
            wk.connect(model, 'design_grp', flame, 'cov_split_file')
            
            if nonparametric:
                palm = Node(Function(input_names=['cope_file', 'design_file', 'contrast_file', 
                                                  'group_file', 'mask_file', 'cluster_threshold'],
                                     output_names=['palm_outputs'],
                                     function=run_palm),
                            name='palm')
                palm.inputs.cluster_threshold = 3.09
                palm.inputs.mask_file = mask_file
                palm.plugin_args = {'sbatch_args': '-p om_all_nodes -N1 -c2 --mem=10G', 'overwrite': True}
                wk.connect(model, 'design_mat', palm, 'design_file')
                wk.connect(model, 'design_con', palm, 'contrast_file')
                wk.connect(mergecopes, 'merged_file', palm, 'cope_file')
                wk.connect(model, 'design_grp', palm, 'group_file')
                
            smoothest = Node(SmoothEstimate(), name='smooth_estimate')
            wk.connect(flame, 'zstats', smoothest, 'zstat_file')
            smoothest.inputs.mask_file = mask_file
        
            cluster = Node(Cluster(), name='cluster')
            wk.connect(smoothest,'dlh', cluster, 'dlh')
            wk.connect(smoothest, 'volume', cluster, 'volume')
            cluster.inputs.connectivity = 26
            cluster.inputs.threshold = 2.3
            cluster.inputs.pthreshold = 0.05
            cluster.inputs.out_threshold_file = True
            cluster.inputs.out_index_file = True
            cluster.inputs.out_localmax_txt_file = True
            
            wk.connect(flame, 'zstats', cluster, 'in_file')
    
            ztopval = Node(ImageMaths(op_string='-ztop', suffix='_pval'),
                           name='z2pval')
            wk.connect(flame, 'zstats', ztopval,'in_file')
            
            sinker = Node(DataSink(), name='sinker')
            sinker.inputs.base_directory = os.path.join(out_dir, 'task%03d' % task, contrast[0][0])
            sinker.inputs.substitutions = [('_cope_id', 'contrast'),
                                           ('_maths_', '_reversed_')]
            
            wk.connect(flame, 'zstats', sinker, 'stats')
            wk.connect(cluster, 'threshold_file', sinker, 'stats.@thr')
            wk.connect(cluster, 'index_file', sinker, 'stats.@index')
            wk.connect(cluster, 'localmax_txt_file', sinker, 'stats.@localmax')
            if nonparametric:
                wk.connect(palm, 'palm_outputs', sinker, 'stats.palm')

            if not no_reversal:
                zstats_reverse = Node( BinaryMaths()  , name='zstats_reverse')
                zstats_reverse.inputs.operation = 'mul'
                zstats_reverse.inputs.operand_value = -1
                wk.connect(flame, 'zstats', zstats_reverse, 'in_file')
                
                cluster2=cluster.clone(name='cluster2')
                wk.connect(smoothest, 'dlh', cluster2, 'dlh')
                wk.connect(smoothest, 'volume', cluster2, 'volume')
                wk.connect(zstats_reverse, 'out_file', cluster2, 'in_file')
                
                ztopval2 = ztopval.clone(name='ztopval2')
                wk.connect(zstats_reverse, 'out_file', ztopval2, 'in_file')
                
                wk.connect(zstats_reverse, 'out_file', sinker, 'stats.@neg')
                wk.connect(cluster2, 'threshold_file', sinker, 'stats.@neg_thr')
                wk.connect(cluster2, 'index_file',sinker, 'stats.@neg_index')
                wk.connect(cluster2, 'localmax_txt_file', sinker, 'stats.@neg_localmax')
            meta_workflow.add_nodes([wk])
    return meta_workflow
Esempio n. 44
0
    # Releasing Mr Loggins...
    dangerZone.setLevel('NOTSET')
    print('Done!')

    return SC_cap_row_filename, SC_dist_row_filename

import numpy as np
debugPath = '/Users/srothmei/Desktop/charite/toronto/Adalberto/debug/'

roi = 68
subid = 'Adalberto'
tracksPath = debugPath

wmBorder_file = debugPath + 'wmborder.npy'

wmborder = np.load(wmBorder_file)

affine_matrix_file = debugPath + 'affine_matrix.npy'

affine_matrix = np.load(affine_matrix_file)

from nipype import Node
from nipype.interfaces.io import DataFinder
tckFinder = Node(DataFinder(match_regex = '.*\.npy', root_paths = tracksPath), name = 'tckFinder')

res = tckFinder.run()
track_files = res.outputs.out_paths

#
compute_connectivity_row(roi, subid, affine_matrix, wmborder, tracksPath, track_files)
Esempio n. 45
0
bbregNode = Node(freesurfer.preprocess.BBRegister(), name="BBRegister")
bbregNode.inputs.init = "fsl"
bbregNode.inputs.contrast_type = "t2"
bbregNode.inputs.epi_mask = True
bbregNode.inputs.out_fsl_file = True
bbregNode.inputs.args = "--tol1d 1e-3"
bbregNode.inputs.subject_id = reconallFolderName


# ### Surface2Vol

# In[ ]:

# Transform Left Hemisphere
lhWhiteFilename = "lh_white.nii.gz"
surf2volNode_lh = Node(freesurfer.utils.Surface2VolTransform(), name="surf2vol_lh")
surf2volNode_lh.inputs.hemi = "lh"
surf2volNode_lh.inputs.mkmask = True
surf2volNode_lh.inputs.subject_id = reconallFolderName

# Transform right hemisphere
surf2volNode_rh = surf2volNode_lh.clone("surf2vol_rh")
surf2volNode_rh.inputs.hemi = "rh"

# Merge the hemispheres
mergeHemisNode = Node(fsl.BinaryMaths(), name="mergeHemis")
mergeHemisNode.inputs.operation = "add"
mergeHemisNode.inputs.output_type = "NIFTI_GZ"


# ### Registration
Esempio n. 46
0
def convert(items, anonymizer=None, symlink=True, converter=None):
    prov_files = []
    tmpdir = mkdtemp()
    for item in items:
        if isinstance(item[1], (list, tuple)):
            outtypes = item[1]
        else:
            outtypes = [item[1]]
        prefix = item[0]
        print('Converting %s' % prefix)
        dirname = os.path.dirname(prefix + '.ext')
        print(dirname)
        if not os.path.exists(dirname):
            os.makedirs(dirname)
        for outtype in outtypes:
            print(outtype)
            if outtype == 'dicom':
                dicomdir = prefix + '_dicom'
                if os.path.exists(dicomdir):
                    shutil.rmtree(dicomdir)
                os.mkdir(dicomdir)
                for filename in item[2]:
                    outfile = os.path.join(dicomdir, os.path.split(filename)[1])
                    if not os.path.islink(outfile):
                        if symlink:
                            os.symlink(filename, outfile)
                        else:
                            os.link(filename, outfile)
            elif outtype in ['nii', 'nii.gz']:
                outname = prefix + '.' + outtype
                scaninfo = prefix + '_scaninfo.json'
                if not os.path.exists(outname):
                    from nipype import config
                    config.enable_provenance()
                    from nipype import Function, Node
                    from nipype.interfaces.base import isdefined
                    print converter
                    if converter == 'mri_convert':
                        from nipype.interfaces.freesurfer.preprocess import MRIConvert
                        convertnode = Node(MRIConvert(), name = 'convert')
                        convertnode.base_dir = tmpdir
                        if outtype == 'nii.gz':
                            convertnode.inputs.out_type = 'niigz'
                        convertnode.inputs.in_file = item[2][0]
                        convertnode.inputs.out_file = outname
                        #cmd = 'mri_convert %s %s' % (item[2][0], outname)
                        #print(cmd)
                        #os.system(cmd)
                        res=convertnode.run()
                    elif converter == 'dcm2nii':
                        from nipype.interfaces.dcm2nii import Dcm2nii
                        convertnode = Node(Dcm2nii(), name='convert')
                        convertnode.base_dir = tmpdir
                        convertnode.inputs.source_names = item[2]
                        convertnode.inputs.gzip_output = outtype == 'nii.gz'
                        convertnode.inputs.terminal_output = 'allatonce'
                        res = convertnode.run()
                        if isinstance(res.outputs.converted_files, list):
                            print("Cannot convert dicom files - series likely has multiple orientations: ", item[2])
                            continue
                        else:
                            shutil.copyfile(res.outputs.converted_files, outname)
                        if isdefined(res.outputs.bvecs):
                            outname_bvecs = prefix + '.bvecs'
                            outname_bvals = prefix + '.bvals'
                            shutil.copyfile(res.outputs.bvecs, outname_bvecs)
                            shutil.copyfile(res.outputs.bvals, outname_bvals)
                    prov_file = prefix + '_prov.ttl'
                    shutil.copyfile(os.path.join(convertnode.base_dir,
                                                 convertnode.name,
                                                 'provenance.ttl'),
                                    prov_file)
                    prov_files.append(prov_file)
                    embedfunc = Node(Function(input_names=['dcmfiles',
                                                           'niftifile',
                                                           'infofile',
                                                           'force'],
                                              output_names=['outfile',
                                                            'meta'],
                                              function=embed_nifti),
                                     name='embedder')
                    embedfunc.inputs.dcmfiles = item[2]
                    embedfunc.inputs.niftifile = outname
                    embedfunc.inputs.infofile = scaninfo
                    embedfunc.inputs.force = True
                    embedfunc.base_dir = tmpdir
                    res = embedfunc.run()
                    g = res.provenance.rdf()
                    g.parse(prov_file,
                            format='turtle')
                    g.serialize(prov_file, format='turtle')
                    #out_file, meta_dict = embed_nifti(item[2], outname, force=True)
                    os.chmod(outname, 0440)
                    os.chmod(scaninfo, 0440)
                    os.chmod(prov_file, 0440)
    shutil.rmtree(tmpdir)
Esempio n. 47
0
def create_surfdist_workflow(subjects_dir,
                             subject_list,
                             sources,
                             target,
                             hemi,
                             atlas,
                             labs,
                             name):

  sd = Workflow(name=name)
    
  # Run a separate tree for each template, hemisphere and source structure
  infosource = Node(IdentityInterface(fields=['template','hemi','source']), name="infosource")
  infosource.iterables = [('template', target),('hemi', hemi),('source',sources)]

  # Get template files
  fsst = Node(FreeSurferSource(),name='FS_Source_template')
  fsst.inputs.subjects_dir = subjects_dir

  sd.connect(infosource,'template',fsst,'subject_id')
  sd.connect(infosource,'hemi',fsst,'hemi')

  # Generate folder name for output
  genfoldname = Node(Function(input_names=['hemi','source','target'],
                      output_names=['cname'], function=genfname),
                      name='genfoldname')
  sd.connect(infosource,'hemi',genfoldname,'hemi')
  sd.connect(infosource,'source',genfoldname,'source')
  sd.connect(infosource,'template',genfoldname,'target')

  # Get subjects
  fss = Node(FreeSurferSource(),name='FS_Source')
  fss.iterables = ('subject_id', subject_list)
  fss.inputs.subjects_dir = subjects_dir
  fss.inputs.subject_id = subject_list

  sd.connect(infosource,'hemi',fss,'hemi')

  # Trim labels
  tlab = Node(Function(input_names=['itemz','phrase'],
                        output_names=['item'], function=trimming),
                        name='tlab')
  tlab.inputs.phrase = labs
  sd.connect(fss,'label',tlab,'itemz')

  # Trim annotations
  tannot = Node(Function(input_names=['itemz','phrase'],
                        output_names=['item'], function=trimming),
                        name='tannot')
  tannot.inputs.phrase = atlas
  sd.connect(fss,'annot',tannot,'itemz')

  # Calculate distances for each hemi
  sdist = Node(Function(input_names=['surface','labels','annot','reg','origin','target'],
                        output_names=['distances'], function=calc_surfdist), 
                        name='sdist')
  sd.connect(infosource,'source',sdist,'origin')
  sd.connect(fss,'pial',sdist,'surface')
  sd.connect(tlab,'item',sdist,'labels')
  sd.connect(tannot,'item',sdist,'annot')
  sd.connect(fss,'sphere_reg',sdist,'reg')
  sd.connect(fsst,'sphere_reg',sdist,'target')
  
  # Gather data for each hemi from all subjects
  bucket = JoinNode(Function(input_names=['files','hemi','source','target'],output_names=['group_dist'], 
                         function=stack_files), joinsource = fss, joinfield = 'files', name='bucket')
  sd.connect(infosource,'source',bucket,'source')
  sd.connect(infosource,'template',bucket,'target')
  sd.connect(infosource,'hemi',bucket,'hemi')
  sd.connect(sdist,'distances',bucket,'files')

  # Sink the data
  datasink = Node(DataSink(), name='sinker')
  datasink.inputs.parameterization = False
  datasink.inputs.base_directory = os.path.abspath(args.sink)
  sd.connect(genfoldname,'cname',datasink,'container')
  sd.connect(bucket,'group_dist',datasink,'group_distances')

  return sd
Esempio n. 48
0
# reconallNode.inputs.T1_files = firstFile
# reconallNode.inputs.subjects_dir = subPath
reconallNode.inputs.subject_id = reconallFolderName
reconallNode.inputs.directive = 'all'
reconallNode.inputs.openmp = cpu_count()
# reconallNode.inputs.args = '-notal-check'

# OAR Workaround
# reconallNode.plugin_args = {'overwrite': True, 'oarsub_args': '-l nodes=1,walltime=16:00:00'}

# Convert the T1 mgz image to nifti format for later usage
# mriConverter = Node(freesurfer.preprocess.MRIConvert(), name = 'convertAparcAseg')
# mriConverter.inputs.out_type = 'niigz'
# mriConverter.inputs.out_orientation = 'RAS'
mriConverter = Node(Function(input_names = ['in_file', 'out_file'],
                            output_names = ['out_file'],
                            function = mri_convert_bm),
                   name = 'convertAparcAseg')

# Convert the Brainmask file
# brainmaskConv = Node(freesurfer.preprocess.MRIConvert(), name = 'convertBrainmask')
# brainmaskConv.inputs.out_type = 'niigz'
# brainmaskConv.inputs.out_orientation = 'RAS'
brainmaskConv = mriConverter.clone('convertBrainmask')


# ### Diffusion Data (dwMRI) preprocessing
# First extract the diffusion vectors and the pulse intensity (bvec and bval)
# Use dcm2nii for this task
dcm2niiNode = Node(Dcm2nii(), name = 'dcm2niiAndBvecs')
dcm2niiNode.inputs.gzip_output = True
dcm2niiNode.inputs.date_in_filename = False
Esempio n. 49
0
def create_workflow(files,
                    subject_id,
                    n_vol=0,
                    despike=True,
                    TR=None,
                    slice_times=None,
                    slice_thickness=None,
                    fieldmap_images=[],
                    norm_threshold=1,
                    num_components=6,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    sink_directory=os.getcwd(),
                    FM_TEdiff=2.46,
                    FM_sigma=2,
                    FM_echo_spacing=.7,
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Skip starting volumes
    remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1),
                         iterfield=['in_file'],
                         name="remove_volumes")
    remove_vol.inputs.in_file = files

    # Run AFNI's despike. This is always run, however, whether this is fed to
    # realign depends on the input configuration
    despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'),
                       iterfield=['in_file'],
                       name='despike')
    #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='}

    wf.connect(remove_vol, 'roi_file', despiker, 'in_file')

    # Run Nipy joint slice timing and realignment algorithm
    realign = Node(nipy.SpaceTimeRealigner(), name='realign')
    realign.inputs.tr = TR
    realign.inputs.slice_times = slice_times
    realign.inputs.slice_info = 2

    if despike:
        wf.connect(despiker, 'out_file', realign, 'in_file')
    else:
        wf.connect(remove_vol, 'roi_file', realign, 'in_file')

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(realign, 'out_file', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')

    # Coregister the median to the surface
    register = Node(freesurfer.BBRegister(),
                    name='bbregister')
    register.inputs.subject_id = subject_id
    register.inputs.init = 'fsl'
    register.inputs.contrast_type = 't2'
    register.inputs.out_fsl_file = True
    register.inputs.epi_mask = True

    # Compute fieldmaps and unwarp using them
    if fieldmap_images:
        fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp')
        fieldmap.inputs.tediff = FM_TEdiff
        fieldmap.inputs.esp = FM_echo_spacing
        fieldmap.inputs.sigma = FM_sigma
        fieldmap.inputs.mag_file = fieldmap_images[0]
        fieldmap.inputs.dph_file = fieldmap_images[1]
        wf.connect(calc_median, 'median_file', fieldmap, 'exf_file')

        dewarper = MapNode(interface=fsl.FUGUE(), iterfield=['in_file'],
                           name='dewarper')
        wf.connect(tsnr, 'detrended_file', dewarper, 'in_file')
        wf.connect(fieldmap, 'exf_mask', dewarper, 'mask_file')
        wf.connect(fieldmap, 'vsm_file', dewarper, 'shift_in_file')
        wf.connect(fieldmap, 'exfdw', register, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', register, 'source_file')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(),
                    name='fssource')
    fssource.inputs.subject_id = subject_id
    fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR']

    # Extract wm+csf, brain masks by eroding freesurfer lables and then
    # transform the masks into the space of the median
    wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask')
    mask = wmcsf.clone('anatmask')
    wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True,
                                                       interp='nearest'),
                          name='wmcsftransform')
    wmcsftransform.inputs.subjects_dir = os.environ['SUBJECTS_DIR']
    wmcsf.inputs.wm_ven_csf = True
    wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63]
    wmcsf.inputs.binary_file = 'wmcsf.nii.gz'
    wmcsf.inputs.erode = int(np.ceil(slice_thickness))
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file')
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file')
    wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file')
    wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file')

    mask.inputs.binary_file = 'mask.nii.gz'
    mask.inputs.dilate = int(np.ceil(slice_thickness)) + 1
    mask.inputs.erode = int(np.ceil(slice_thickness))
    mask.inputs.min = 0.5
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file')
    masktransform = wmcsftransform.clone("masktransform")
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', masktransform, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', masktransform, 'source_file')
    wf.connect(register, 'out_reg_file', masktransform, 'reg_file')
    wf.connect(mask, 'binary_file', masktransform, 'target_file')

    # Compute Art outliers
    art = Node(interface=ArtifactDetect(use_differences=[True, False],
                                        use_norm=True,
                                        norm_threshold=norm_threshold,
                                        zintensity_threshold=3,
                                        parameter_source='NiPy',
                                        bound_by_brainmask=True,
                                        save_plot=False,
                                        mask_type='file'),
               name="art")
    if fieldmap_images:
        wf.connect(dewarper, 'unwarped_file', art, 'realigned_files')
    else:
        wf.connect(tsnr, 'detrended_file', art, 'realigned_files')
    wf.connect(realign, 'par_file',
               art, 'realignment_parameters')
    wf.connect(masktransform, 'transformed_file', art, 'mask_file')

    # Compute motion regressors
    motreg = Node(Function(input_names=['motion_params', 'order',
                                        'derivatives'],
                           output_names=['out_files'],
                           function=motion_regressors,
                           imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'par_file', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm',
                                               'outliers'],
                                  output_names=['out_files'],
                                  function=build_filter1,
                                  imports=imports),
                         name='makemotionbasedfilter')
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    # Filter the motion and art confounds
    filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz',
                              demean=True),
                      iterfield=['in_file', 'design'],
                      name='filtermotion')
    if fieldmap_images:
        wf.connect(dewarper, 'unwarped_file', filter1, 'in_file')
    else:
        wf.connect(tsnr, 'detrended_file', filter1, 'in_file')
    wf.connect(createfilter1, 'out_files', filter1, 'design')
    wf.connect(masktransform, 'transformed_file', filter1, 'mask')

    # Create a filter to remove noise components based on white matter and CSF
    createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file',
                                                  'num_components'],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file')

    # Filter noise components
    filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz',
                              demean=True),
                      iterfield=['in_file', 'design'],
                      name='filtercompcorr')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(masktransform, 'transformed_file', filter2, 'mask')

    # Smoothing using surface and volume smoothing
    smooth = MapNode(freesurfer.Smooth(),
                     iterfield=['in_file'],
                     name='smooth')
    smooth.inputs.proj_frac_avg = (0.1, 0.9, 0.1)
    if surf_fwhm is None:
        surf_fwhm = 5 * slice_thickness
    smooth.inputs.surface_fwhm = surf_fwhm
    if vol_fwhm is None:
        vol_fwhm = 2 * slice_thickness
    smooth.inputs.vol_fwhm = vol_fwhm
    wf.connect(filter2, 'out_res',  smooth, 'in_file')
    wf.connect(register, 'out_reg_file', smooth, 'reg_file')

    # Bandpass filter the data
    bandpass = MapNode(fsl.TemporalFilter(),
                       iterfield=['in_file'],
                       name='bandpassfilter')
    if highpass_freq < 0:
            bandpass.inputs.highpass_sigma = -1
    else:
            bandpass.inputs.highpass_sigma = 1. / (2 * TR * highpass_freq)
    if lowpass_freq < 0:
            bandpass.inputs.lowpass_sigma = -1
    else:
            bandpass.inputs.lowpass_sigma = 1. / (2 * TR * lowpass_freq)
    wf.connect(smooth, 'smoothed_file', bandpass, 'in_file')

    # Convert aparc to subject functional space
    aparctransform = wmcsftransform.clone("aparctransform")
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', aparctransform, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', aparctransform, 'source_file')
    wf.connect(register, 'out_reg_file', aparctransform, 'reg_file')
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg),
               aparctransform, 'target_file')

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(freesurfer.SegStats(avgwf_txt_file=True,
                                              default_color_table=True),
                          iterfield=['in_file'],
                          name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] +
                                     range(49, 55) + [58] + range(1001, 1036) +
                                     range(2001, 2036))

    wf.connect(aparctransform, 'transformed_file',
               sampleaparc, 'segmentation_file')
    wf.connect(bandpass, 'out_file', sampleaparc, 'in_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    #samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR']

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(bandpass, 'out_file', samplerlh, 'source_file')
    wf.connect(register, 'out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(bandpass, 'out_file', samplerrh, 'source_file')
    wf.connect(register, 'out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Compute registration between the subject's structural and MNI template
    # This is currently set to perform a very quick registration. However, the
    # registration can be made significantly more accurate for cortical
    # structures by increasing the number of iterations
    # All parameters are set using the example from:
    # https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    reg = Node(ants.Registration(), name='antsRegister')
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)]
    # reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 +
    #                                    [[100, 50, 30]])
    reg.inputs.number_of_iterations = [[100, 100, 100]] * 3 + [[100, 20, 10]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = False
    reg.inputs.metric = ['Mattes'] * 3 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 3 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 3 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 3 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 3 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 3 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 3 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 4
    reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]]*2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 4
    reg.inputs.use_histogram_matching = [False] * 3 + [True]
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.fixed_image = \
        os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz')
    reg.inputs.num_threads = 4
    reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'}

    # Convert T1.mgz to nifti for using with ANTS
    convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii')
    wf.connect(fssource, 'T1', convert, 'in_file')

    # Mask the T1.mgz file with the brain mask computed earlier
    maskT1 = Node(fsl.BinaryMaths(operation='mul'), name='maskT1')
    wf.connect(mask, 'binary_file', maskT1, 'operand_file')
    wf.connect(convert, 'out_file', maskT1, 'in_file')
    wf.connect(maskT1, 'out_file', reg, 'moving_image')

    # Convert the BBRegister transformation to ANTS ITK format
    convert2itk = MapNode(C3dAffineTool(),
                          iterfield=['transform_file', 'source_file'],
                          name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    wf.connect(register, 'out_fsl_file', convert2itk, 'transform_file')
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', convert2itk, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', convert2itk, 'source_file')
    wf.connect(convert, 'out_file', convert2itk, 'reference_file')

    # Concatenate the affine and ants transforms into a list
    pickfirst = lambda x: x[0]
    merge = MapNode(Merge(2), iterfield=['in2'], name='mergexfm')
    wf.connect(convert2itk, 'itk_transform', merge, 'in2')
    wf.connect(reg, ('composite_transform', pickfirst), merge, 'in1')

    # Apply the combined transform to the time series file
    sample2mni = MapNode(ants.ApplyTransforms(),
                         iterfield=['input_image', 'transforms'],
                         name='sample2mni')
    sample2mni.inputs.input_image_type = 3
    sample2mni.inputs.interpolation = 'BSpline'
    sample2mni.inputs.invert_transform_flags = [False, False]
    sample2mni.inputs.reference_image = \
        os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz')
    sample2mni.inputs.terminal_output = 'file'
    wf.connect(bandpass, 'out_file', sample2mni, 'input_image')
    wf.connect(merge, 'out', sample2mni, 'transforms')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file',
                                           'indices'],
                              output_names=['out_file'],
                              function=extract_subrois,
                              imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] +\
                            range(49, 55) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm.nii.gz'))
    wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file')

    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = [('_target_subject_', '')]
    datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike')
    wf.connect(realign, 'par_file', datasink, 'resting.qa.motion')
    wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr')
    wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean')
    wf.connect(tsnr, 'stddev_file', datasink, 'resting.qa.@tsnr_stddev')
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', datasink, 'resting.reference')
    else:
        wf.connect(calc_median, 'median_file', datasink, 'resting.reference')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(mask, 'binary_file', datasink, 'resting.mask')
    wf.connect(masktransform, 'transformed_file',
               datasink, 'resting.mask.@transformed_file')
    wf.connect(register, 'out_reg_file', datasink, 'resting.registration.bbreg')
    wf.connect(reg, ('composite_transform', pickfirst),
               datasink, 'resting.registration.ants')
    wf.connect(register, 'min_cost_file',
               datasink, 'resting.qa.bbreg.@mincost')
    wf.connect(smooth, 'smoothed_file', datasink, 'resting.timeseries.fullpass')
    wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed')
    wf.connect(sample2mni, 'output_image', datasink, 'resting.timeseries.mni')
    wf.connect(createfilter1, 'out_files',
               datasink, 'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files',
               datasink, 'resting.regress.@compcorr')
    wf.connect(sampleaparc, 'summary_file',
               datasink, 'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file',
               datasink, 'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file',
               datasink, 'resting.parcellations.grayo.@subcortical')
    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = [('_target_subject_', '')]
    datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file',
               datasink2, 'resting.parcellations.grayo.@surface')
    return wf
Esempio n. 50
0
from glob import glob

from nipype import Node, Function, Workflow, IdentityInterface
from nipype.interfaces.freesurfer import ReconAll
from nipype.interfaces.io import DataGrabber

#curr_dir_age = 'cmind_age00_raw'
#data_dir = '/home/data/madlab/data/mri/cmind/raw_data'

#sids = os.listdir('%s/%s' % (data_dir, curr_dir_age))
#sids = sids [:-1] 	#REMOVES THE .tar file
sids = ['783125', '783126', '783127', '783128', '783129', '783131', '783132', '783133']

info = dict(T1=[['subject_id']])

infosource = Node(IdentityInterface(fields=['subject_id']), name='infosource')
infosource.iterables = ('subject_id', sids)

# Create a datasource node to get the T1 file
datasource = Node(DataGrabber(infields=['subject_id'],outfields=info.keys()),name = 'datasource')
datasource.inputs.template = '%s/%s'
datasource.inputs.base_directory = os.path.abspath('/home/data/madlab/data/mri/seqtrd/')
datasource.inputs.field_template = dict(T1='%s/anatomy/T1_*.nii.gz')
datasource.inputs.template_args = info
datasource.inputs.sort_filelist = True

reconall_node = Node(ReconAll(), name='reconall_node')
reconall_node.inputs.openmp = 2
reconall_node.inputs.subjects_dir = os.environ['SUBJECTS_DIR']
reconall_node.inputs.terminal_output = 'allatonce'
reconall_node.plugin_args={'bsub_args': ('-q PQ_madlab -n 2'), 'overwrite': True}
def create_workflow(files,
                    anat_file,
                    subject_id,
                    TR,
                    num_slices,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    sink_directory=os.getcwd(),
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = range(1, len(files) + 1)
    name_unique.inputs.in_file = files

    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.jobtype = 'estwrite'

    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.num_slices = num_slices
    slice_timing.inputs.time_repetition = TR
    slice_timing.inputs.time_acquisition = TR - TR/float(num_slices)
    slice_timing.inputs.slice_order = range(1, num_slices + 1, 2) + range(2, num_slices + 1, 2)
    slice_timing.inputs.ref_slice = int(num_slices/2)

    """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid
    body registration of the functional data to the structural data.
    """

    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estimate'
    coregister.inputs.target = anat_file

    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'SPM'

    segment = Node(interface=spm.Segment(), name="segment")
    segment.inputs.save_bias_corrected = True
    segment.inputs.data = anat_file

    """Uncomment the following line for faster execution
    """

    #segment.inputs.gaussians_per_class = [1, 1, 1, 4]

    """Warp functional and structural data to SPM's T1 template using
    :class:`nipype.interfaces.spm.Normalize`.  The tutorial data set
    includes the template image, T1.nii.
    """

    normalize_func = Node(interface=spm.Normalize(), name = "normalize_func")
    normalize_func.inputs.jobtype = "write"
    normalize_func.inputs.write_voxel_sizes =[2., 2., 2.]

    """Smooth the functional data using
    :class:`nipype.interfaces.spm.Smooth`.
    """

    smooth = Node(interface=spm.Smooth(), name = "smooth")
    smooth.inputs.fwhm = vol_fwhm

    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([(name_unique, realign, [('out_file', 'in_files')]),
                (realign, coregister, [('mean_image', 'source')]),
                (segment, normalize_func, [('transformation_mat', 'parameter_file')]),
                (realign, slice_timing, [('realigned_files', 'in_files')]),
                (slice_timing, normalize_func, [('timecorrected_files', 'apply_to_files')]),
                (normalize_func, smooth, [('normalized_files', 'in_files')]),
                (realign, art, [('realignment_parameters', 'realignment_parameters')]),
                (smooth, art, [('smoothed_files', 'realigned_files')]),
                ])

    def selectN(files, N=1):
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(filename_to_list(files)[:N])

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(normalize_func, ('normalized_files', selectN, 1), mask, 'in_file')
    # get segmentation in normalized functional space

    segment.inputs.wm_output_type = [False, False, True]
    segment.inputs.csf_output_type = [False, False, True]
    segment.inputs.gm_output_type = [False, False, True]

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    merge = Node(Merge(3), name='merge')
    wf.connect(segment, 'native_wm_image', merge, 'in1')
    wf.connect(segment, 'native_csf_image', merge, 'in2')
    wf.connect(segment, 'native_gm_image', merge, 'in3')

    normalize_segs = Node(interface=spm.Normalize(), name = "normalize_segs")
    normalize_segs.inputs.jobtype = "write"
    normalize_segs.inputs.write_voxel_sizes = [2., 2., 2.]

    wf.connect(merge, 'out', normalize_segs, 'apply_to_files')
    wf.connect(segment, 'transformation_mat', normalize_segs, 'parameter_file')

    # binarize and erode
    bin_and_erode = MapNode(fsl.ImageMaths(),
                            iterfield=['in_file'],
                            name='bin_and_erode')
    bin_and_erode.inputs.op_string = '-thr 0.99 -bin -ero'

    wf.connect(normalize_segs, 'normalized_files',
               bin_and_erode, 'in_file')

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(input_names=['motion_params', 'order',
                                        'derivatives'],
                           output_names=['out_files'],
                           function=motion_regressors,
                           imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'realignment_parameters', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm',
                                               'outliers', 'detrend_poly'],
                                  output_names=['out_files'],
                                  function=build_filter1,
                                  imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    # Filter the motion and art confounds and detrend
    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
                              out_pf_name='pF_mcart.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(normalize_func, 'normalized_files', filter1, 'in_file')
    wf.connect(normalize_func, ('normalized_files', rename, '_filtermotart'),
               filter1, 'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')
    #wf.connect(masktransform, 'transformed_file', filter1, 'mask')

    # Create a filter to remove noise components based on white matter and CSF
    createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file',
                                                  'num_components',
                                                  'extra_regressors'],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components
    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(bin_and_erode, ('out_file', selectN, 2), createfilter2, 'mask_file')

    # Filter noise components from unsmoothed data
    filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(normalize_func, 'normalized_files', filter2, 'in_file')
    wf.connect(normalize_func, ('normalized_files', rename, '_unsmooth_cleaned'),
               filter2, 'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    # Filter noise components from smoothed data
    filter3 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_smooth')
    wf.connect(smooth, ('smoothed_files', rename, '_cleaned'),
               filter3, 'out_res_name')
    wf.connect(smooth, 'smoothed_files', filter3, 'in_file')
    wf.connect(createfilter2, 'out_files', filter3, 'design')
    wf.connect(mask, 'mask_file', filter3, 'mask')

    # Bandpass filter the data
    bandpass1 = Node(Function(input_names=['files', 'lowpass_freq',
                                           'highpass_freq', 'fs'],
                              output_names=['out_files'],
                              function=bandpass_filter,
                              imports=imports),
                     name='bandpass_unsmooth')
    bandpass1.inputs.fs = 1./TR

    bandpass1.inputs.highpass_freq = highpass_freq
    bandpass1.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass1, 'files')

    bandpass2 = bandpass1.clone(name='bandpass_smooth')
    wf.connect(filter3, 'out_res', bandpass2, 'files')

    bandpass = Node(Function(input_names=['in1', 'in2'],
                              output_names=['out_file'],
                              function=merge_files,
                              imports=imports),
                     name='bandpass_merge')
    wf.connect(bandpass1, 'out_files', bandpass, 'in1')
    wf.connect(bandpass2, 'out_files', bandpass, 'in2')

    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    #datasink.inputs.substitutions = [('_target_subject_', '')]
    #datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'realignment_parameters', datasink, 'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(smooth, 'smoothed_files', datasink, 'resting.timeseries.fullpass')
    wf.connect(bin_and_erode, 'out_file', datasink, 'resting.mask_files')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(filter3, 'out_f', datasink, 'resting.qa.compmaps.@sF')
    wf.connect(filter3, 'out_pf', datasink, 'resting.qa.compmaps.@sp')
    wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed')
    wf.connect(createfilter1, 'out_files',
               datasink, 'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files',
               datasink, 'resting.regress.@compcorr')
    return wf
Esempio n. 52
0
# ### MRTrix specific preprocessing

# First convert the FSL-like input of bval and bvec into mrtrix format
fsl2mrtrixNode = Node(mrtrix.FSL2MRTrix(), name = 'fsl_2_mrtrix')

# Diffusion tensor images
dwi2tensorNode = Node(mrtrix.DWI2Tensor(), name = 'dwi_2_tensor')


# Fractional anisotropy (FA) map
tensor2faNode = Node(mrtrix.Tensor2FractionalAnisotropy(), name = 'tensor_2_FA')

# Remove noisy background by multiplying the FA Image with the binary brainmask
mrmultNode = Node(Function(input_names = ['in1', 'in2', 'out_file'],
                           output_names = ['out_file'],
                           function = multiplyMRTrix),
                  name = 'mrmult')

# Eigenvector (EV) map
tensor2vectorNode = Node(mrtrix.Tensor2Vector(), name = 'tensor_2_vector')

# Scale the EV map by the FA Image
scaleEvNode = mrmultNode.clone('scale_ev')

# Mask of single-fibre voxels
erodeNode = Node(mrtrix.Erode(), name = 'erode_wmmask')
erodeNode.inputs.number_of_passes = number_of_passes

cleanFaNode = mrmultNode.clone('multiplyFA_Mask')

thresholdFANode = Node(mrtrix.Threshold(), name = 'threshold_FA')
def create_reg_workflow(name='registration'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.anatomical_image : anatomical image to coregister to
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space
    """

    register = Workflow(name=name)

    inputnode = Node(interface=IdentityInterface(fields=['source_files',
                                                         'mean_image',
                                                         'subject_id',
                                                         'subjects_dir',
                                                         'target_image']),
                     name='inputspec')

    outputnode = Node(interface=IdentityInterface(fields=['func2anat_transform',
                                                          'out_reg_file',
                                                          'anat2target_transform',
                                                          'transforms',
                                                          'transformed_mean',
                                                          'segmentation_files',
                                                          'anat2target',
                                                          'aparc',
                                                          'min_cost_file'
                                                          ]),
                      name='outputspec')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(),
                    name='fssource')
    fssource.run_without_submitting = True
    register.connect(inputnode, 'subject_id', fssource, 'subject_id')
    register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir')

    convert = Node(freesurfer.MRIConvert(out_type='nii'),
                   name="convert")
    register.connect(fssource, 'T1', convert, 'in_file')

    # Coregister the median to the surface
    bbregister = Node(freesurfer.BBRegister(),
                      name='bbregister')
    bbregister.inputs.init = 'fsl'
    bbregister.inputs.contrast_type = 't2'
    bbregister.inputs.out_fsl_file = True
    bbregister.inputs.epi_mask = True
    register.connect(inputnode, 'subject_id', bbregister, 'subject_id')
    register.connect(inputnode, 'mean_image', bbregister, 'source_file')
    register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir')

    """
    Estimate the tissue classes from the anatomical image. But use aparc+aseg's brain mask
    """

    binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), name="binarize_aparc")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, "in_file")
    stripper = Node(fsl.ApplyMask(), name='stripper')
    register.connect(binarize, "binary_file", stripper, "mask_file")
    register.connect(convert, 'out_file', stripper, 'in_file')

    fast = Node(fsl.FAST(), name='fast')
    register.connect(stripper, 'out_file', fast, 'in_files')

    """
    Binarize the segmentation
    """

    binarize = MapNode(fsl.ImageMaths(op_string='-nan -thr 0.9 -ero -bin'),
                       iterfield=['in_file'],
                       name='binarize')
    register.connect(fast, 'partial_volume_files', binarize, 'in_file')

    """
    Apply inverse transform to take segmentations to functional space
    """

    applyxfm = MapNode(freesurfer.ApplyVolTransform(inverse=True,
                                                    interp='nearest'),
                       iterfield=['target_file'],
                       name='inverse_transform')
    register.connect(inputnode, 'subjects_dir', applyxfm, 'subjects_dir')
    register.connect(bbregister, 'out_reg_file', applyxfm, 'reg_file')
    register.connect(binarize, 'out_file', applyxfm, 'target_file')
    register.connect(inputnode, 'mean_image', applyxfm, 'source_file')

    """
    Apply inverse transform to aparc file
    """

    aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True,
                                                 interp='nearest'),
                    name='aparc_inverse_transform')
    register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir')
    register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file')
    register.connect(fssource, ('aparc_aseg', get_aparc_aseg),
                     aparcxfm, 'target_file')
    register.connect(inputnode, 'mean_image', aparcxfm, 'source_file')

    """
    Convert the BBRegister transformation to ANTS ITK format
    """

    convert2itk = Node(C3dAffineTool(), name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file')
    register.connect(inputnode, 'mean_image', convert2itk, 'source_file')
    register.connect(stripper, 'out_file', convert2itk, 'reference_file')

    """
    Compute registration between the subject's structural and MNI template
    This is currently set to perform a very quick registration. However, the
    registration can be made significantly more accurate for cortical
    structures by increasing the number of iterations
    All parameters are set using the example from:
    #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    """

    reg = Node(ants.Registration(), name='antsRegister')
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.2, 3.0, 0.0)]
    reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[100, 30, 20]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = True
    reg.inputs.initial_moving_transform_com = True
    reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 2 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 3
    reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 3
    reg.inputs.use_histogram_matching = [False] * 2 + [True]
    reg.inputs.winsorize_lower_quantile = 0.005
    reg.inputs.winsorize_upper_quantile = 0.995
    reg.inputs.float = True
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.num_threads = 4
    reg.plugin_args = {'sbatch_args': '-c%d' % 4}
    register.connect(stripper, 'out_file', reg, 'moving_image')
    register.connect(inputnode, 'target_image', reg, 'fixed_image')

    """
    Concatenate the affine and ants transforms into a list
    """

    merge = Node(Merge(2), iterfield=['in2'], name='mergexfm')
    register.connect(convert2itk, 'itk_transform', merge, 'in2')
    register.connect(reg, 'composite_transform', merge, 'in1')

    """
    Transform the mean image. First to anatomical and then to target
    """

    warpmean = Node(ants.ApplyTransforms(), name='warpmean')
    warpmean.inputs.input_image_type = 3
    warpmean.inputs.interpolation = 'Linear'
    warpmean.inputs.invert_transform_flags = [False, False]
    warpmean.inputs.terminal_output = 'file'
    warpmean.inputs.args = '--float'
    warpmean.inputs.num_threads = 4
    warpmean.plugin_args = {'sbatch_args': '-c%d' % 4}

    register.connect(inputnode, 'target_image', warpmean, 'reference_image')
    register.connect(inputnode, 'mean_image', warpmean, 'input_image')
    register.connect(merge, 'out', warpmean, 'transforms')

    """
    Assign all the output files
    """

    register.connect(reg, 'warped_image', outputnode, 'anat2target')
    register.connect(warpmean, 'output_image', outputnode, 'transformed_mean')
    register.connect(applyxfm, 'transformed_file',
                     outputnode, 'segmentation_files')
    register.connect(aparcxfm, 'transformed_file',
                     outputnode, 'aparc')
    register.connect(bbregister, 'out_fsl_file',
                     outputnode, 'func2anat_transform')
    register.connect(bbregister, 'out_reg_file',
                     outputnode, 'out_reg_file')
    register.connect(reg, 'composite_transform',
                     outputnode, 'anat2target_transform')
    register.connect(merge, 'out', outputnode, 'transforms')
    register.connect(bbregister, 'min_cost_file',
                     outputnode, 'min_cost_file')

    return register
def create_workflow(files,
                    target_file,
                    subject_id,
                    TR,
                    slice_times,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    subjects_dir=None,
                    sink_directory=os.getcwd(),
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = list(range(1, len(files) + 1))
    name_unique.inputs.in_file = files

    realign = Node(nipy.SpaceTimeRealigner(), name="spacetime_realign")
    realign.inputs.slice_times = slice_times
    realign.inputs.tr = TR
    realign.inputs.slice_info = 2
    realign.plugin_args = {'sbatch_args': '-c%d' % 4}

    # Compute TSNR on realigned data regressing polynomials up to order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(realign, "out_file", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')

    """Segment and Register
    """

    registration = create_reg_workflow(name='registration')
    wf.connect(calc_median, 'median_file', registration, 'inputspec.mean_image')
    registration.inputs.inputspec.subject_id = subject_id
    registration.inputs.inputspec.subjects_dir = subjects_dir
    registration.inputs.inputspec.target_image = target_file

    """Quantify TSNR in each freesurfer ROI
    """

    get_roi_tsnr = MapNode(fs.SegStats(default_color_table=True),
                           iterfield=['in_file'], name='get_aparc_tsnr')
    get_roi_tsnr.inputs.avgwf_txt_file = True
    wf.connect(tsnr, 'tsnr_file', get_roi_tsnr, 'in_file')
    wf.connect(registration, 'outputspec.aparc', get_roi_tsnr, 'segmentation_file')

    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'NiPy'

    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([(name_unique, realign, [('out_file', 'in_file')]),
                (realign, art, [('out_file', 'realigned_files')]),
                (realign, art, [('par_file', 'realignment_parameters')]),
                ])

    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(np.array(filename_to_list(files))[idx].tolist())

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(calc_median, 'median_file', mask, 'in_file')
    # get segmentation in normalized functional space

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(input_names=['motion_params', 'order',
                                        'derivatives'],
                           output_names=['out_files'],
                           function=motion_regressors,
                           imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'par_file', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(input_names=['motion_params', 'comp_norm',
                                               'outliers', 'detrend_poly'],
                                  output_names=['out_files'],
                                  function=build_filter1,
                                  imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii.gz',
                              out_pf_name='pF_mcart.nii.gz',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(realign, 'out_file', filter1, 'in_file')
    wf.connect(realign, ('out_file', rename, '_filtermotart'),
               filter1, 'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')

    createfilter2 = MapNode(ACompCor(),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.components_file = 'noise_components.txt'
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration, ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')

    filter2 = MapNode(fsl.GLM(out_f_name='F.nii.gz',
                              out_pf_name='pF.nii.gz',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'),
               filter2, 'out_res_name')
    wf.connect(createfilter2, 'components_file', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(input_names=['files', 'lowpass_freq',
                                          'highpass_freq', 'fs'],
                             output_names=['out_files'],
                             function=bandpass_filter,
                             imports=imports),
                    name='bandpass_unsmooth')
    bandpass.inputs.fs = 1. / TR
    bandpass.inputs.highpass_freq = highpass_freq
    bandpass.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass, 'files')

    """Smooth the functional data using
    :class:`nipype.interfaces.fsl.IsotropicSmooth`.
    """

    smooth = MapNode(interface=fsl.IsotropicSmooth(), name="smooth", iterfield=["in_file"])
    smooth.inputs.fwhm = vol_fwhm

    wf.connect(bandpass, 'out_files', smooth, 'in_file')

    collector = Node(Merge(2), name='collect_streams')
    wf.connect(smooth, 'out_file', collector, 'in1')
    wf.connect(bandpass, 'out_files', collector, 'in2')

    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = MapNode(ants.ApplyTransforms(), iterfield=['input_image'],
                      name='warpall')
    warpall.inputs.input_image_type = 3
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.reference_image = target_file
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 2
    warpall.plugin_args = {'sbatch_args': '-c%d' % 2}

    # transform to target
    wf.connect(collector, 'out', warpall, 'input_image')
    wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')

    mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')

    wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')

    maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
    wf.connect(warpall, 'output_image', maskts, 'in_file')
    wf.connect(mask_target, 'out_file', maskts, 'mask_file')

    # map to surface
    # extract aparc+aseg ROIs
    # extract subcortical ROIs
    # extract target space ROIs
    # combine subcortical and cortical rois into a single cifti file

    #######
    # Convert aparc to subject functional space

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(freesurfer.SegStats(default_color_table=True),
                          iterfield=['in_file', 'summary_file',
                                     'avgwf_txt_file'],
                          name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + list(range(10, 14)) + [17, 18, 26, 47] +
                                     list(range(49, 55)) + [58] + list(range(1001, 1036)) +
                                     list(range(2001, 2036)))

    wf.connect(registration, 'outputspec.aparc',
               sampleaparc, 'segmentation_file')
    wf.connect(collector, 'out', sampleaparc, 'in_file')

    def get_names(files, suffix):
        """Generate appropriate names for output files
        """
        from nipype.utils.filemanip import (split_filename, filename_to_list,
                                            list_to_filename)
        import os
        out_names = []
        for filename in files:
            path, name, _ = split_filename(filename)
            out_names.append(os.path.join(path, name + suffix))
        return list_to_filename(out_names)

    wf.connect(collector, ('out', get_names, '_avgwf.txt'),
               sampleaparc, 'avgwf_txt_file')
    wf.connect(collector, ('out', get_names, '_summary.stats'),
               sampleaparc, 'summary_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    samplerlh.inputs.smooth_surf = surf_fwhm
    # samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = subjects_dir

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(collector, 'out', samplerlh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(collector, 'out', samplerrh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(input_names=['timeseries_file', 'label_file',
                                           'indices'],
                              output_names=['out_file'],
                              function=extract_subrois,
                              imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\
        list(range(49, 55)) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm_v2.nii.gz'))
    wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')

    ######

    substitutions = [('_target_subject_', ''),
                     ('_filtermotart_cleaned_bp_trans_masked', ''),
                     ('_filtermotart_cleaned_bp', ''),
                     ]
    substitutions += [("_smooth%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_ts_masker%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_getsubcortts%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_combiner%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_filtermotion%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_filter_noise_nosmooth%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_makecompcorfilter%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_get_aparc_tsnr%d/" % i, "run%d_" % (i + 1)) for i in range(11)[::-1]]

    substitutions += [("T1_out_brain_pve_0_maths_warped", "compcor_csf"),
                      ("T1_out_brain_pve_1_maths_warped", "compcor_gm"),
                      ("T1_out_brain_pve_2_maths_warped", "compcor_wm"),
                      ("output_warped_image_maths", "target_brain_mask"),
                      ("median_brain_mask", "native_brain_mask"),
                      ("corr_", "")]

    regex_subs = [('_combiner.*/sar', '/smooth/'),
                  ('_combiner.*/ar', '/unsmooth/'),
                  ('_aparc_ts.*/sar', '/smooth/'),
                  ('_aparc_ts.*/ar', '/unsmooth/'),
                  ('_getsubcortts.*/sar', '/smooth/'),
                  ('_getsubcortts.*/ar', '/unsmooth/'),
                  ('series/sar', 'series/smooth/'),
                  ('series/ar', 'series/unsmooth/'),
                  ('_inverse_transform./', ''),
                  ]
    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = substitutions
    datasink.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'par_file', datasink, 'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.segmentation_files', datasink, 'resting.mask_files')
    wf.connect(registration, 'outputspec.anat2target', datasink, 'resting.qa.ants')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(registration, 'outputspec.min_cost_file', datasink, 'resting.qa.mincost')
    wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr.@map')
    wf.connect([(get_roi_tsnr, datasink, [('avgwf_txt_file', 'resting.qa.tsnr'),
                                          ('summary_file', 'resting.qa.tsnr.@summary')])])

    wf.connect(bandpass, 'out_files', datasink, 'resting.timeseries.@bandpassed')
    wf.connect(smooth, 'out_file', datasink, 'resting.timeseries.@smoothed')
    wf.connect(createfilter1, 'out_files',
               datasink, 'resting.regress.@regressors')
    wf.connect(createfilter2, 'components_file',
               datasink, 'resting.regress.@compcorr')
    wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
    wf.connect(sampleaparc, 'summary_file',
               datasink, 'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file',
               datasink, 'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file',
               datasink, 'resting.parcellations.grayo.@subcortical')

    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = substitutions
    datasink2.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file',
               datasink2, 'resting.parcellations.grayo.@surface')
    return wf
Esempio n. 55
0
import os
import numpy as np
from nipype import Function
from nipype import Node
from nipype import Workflow
from nipype import IdentityInterface

ds="/storage/gablab001/data/genus/GIT/genus/fs_cog/pred_diag/data_sets"
data_sets = [os.path.join(ds, x) for x in os.listdir(ds) if ".csv" in x]
response_var = os.path.join(ds, "response.txt")

wf = Workflow(name="classify_disease")
wf.base_dir = "/om/scratch/Sat/ysa"

Iternode = Node(IdentityInterface(fields=['data', 'classifier']), name="Iternode")
Iternode.iterables = [
     ('data', data_sets), 
     ('classifier', ['et', 'lg'])
]

def run(data, classifier, response):
    import numpy as np
    import pandas as pd
    from custom import Mods
    from custom import utils
    
    y = np.genfromtxt(response)
    X = pd.read_csv(data)
    data_mod = data.split('/')[-1].replace('.csv', '')    

    if classifier == 'et':