Ejemplo n.º 1
0
Archivo: rt.py Proyecto: TRO-HIT/PyCURT
    def workflow(self):

        self.datasource()
        datasource = self.data_source
        nipype_cache = self.nipype_cache
        result_dir = self.result_dir
        sub_id = self.sub_id
        regex = self.regex
        roi_selection = self.roi_selection
        if datasource is not None:

            workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache)
        
            datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink")
            substitutions = [('subid', sub_id)]
            substitutions += [('results/', '{}/'.format(self.workflow_name))]
    
            ss_convert = nipype.MapNode(interface=RTStructureCoverter(),
                                       iterfield=['reference_ct', 'input_ss'],
                                       name='ss_convert')
            mha_convert = nipype.MapNode(interface=MHA2NIIConverter(),
                                         iterfield=['input_folder'],
                                         name='mha_convert')
            
            if roi_selection:
                select = nipype.MapNode(interface=CheckRTStructures(),
                                        iterfield=['rois', 'dose_file'],
                                        name='select_gtv')
                workflow.connect(mha_convert, 'out_files', select, 'rois')
                workflow.connect(datasource, 'rt_dose', select, 'dose_file')
                workflow.connect(select, 'checked_roi', datasink,
                                 'results.subid.@masks')
            else:
                workflow.connect(mha_convert, 'out_files', datasink,
                                 'results.subid.@masks')

            for i, session in enumerate(self.rt['session']):
                substitutions += [(('_select_gtv{}/'.format(i), session+'/'))]
                substitutions += [(('_voxelizer{}/'.format(i), session+'/'))]
                substitutions += [(('_mha_convert{}/'.format(i), session+'/'))]

            datasink.inputs.substitutions =substitutions
        
            workflow.connect(datasource, 'rtct_nifti', ss_convert, 'reference_ct')
            workflow.connect(datasource, 'rts_dcm', ss_convert, 'input_ss')
            workflow.connect(ss_convert, 'out_structures', mha_convert, 'input_folder')
    
            workflow = self.datasink(workflow, datasink)
        else:
            workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache)

        return workflow
Ejemplo n.º 2
0
    def workflow(self):

        images = self.images
        rois = self.rois
        datasource = self.data_source
        dict_sequences = self.dict_sequences
        nipype_cache = self.nipype_cache
        result_dir = self.result_dir
        sub_id = self.sub_id

        toextract = {**dict_sequences['MR-RT'], **dict_sequences['OT']}
        workflow = nipype.Workflow('features_extraction_workflow',
                                   base_dir=nipype_cache)
        datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                               "datasink")
        substitutions = [('subid', sub_id)]
        substitutions += [('results/', '{}/'.format(self.workflow_name))]

        for key in toextract:
            session = toextract[key]
            if session['scans'] is not None:
                scans = session['scans']
                reg_scans = [x for x in scans if x.endswith('_reg')]
                segmented_masks = [x for x in scans if x in ['GTVPredicted',
                                                             'TumorPredicted',
                                                             'GTVPredicted-2modalities']]
                add_scans = [x for x in scans if x in images]
                add_masks = [x for x in scans if x in rois]
                
                for image in reg_scans:
                    for roi in segmented_masks:
                        image_name = '{}_{}_reg'.format(key, image.split('_')[0])
                        roi_name = '{}_{}'.format(key, roi.split('.nii.gz')[0])
                        features = nipype.Node(
                            interface=FeatureExtraction(),
                            name='features_extraction_{}{}'.format(image_name, roi_name))
                        features.inputs.parameter_file = '/home/fsforazz/git/core/resources/Params_MR.yaml'
                        workflow.connect(datasource, image_name, features, 'input_image')
                        workflow.connect(datasource, roi_name, features, 'rois')
                        workflow.connect(features, 'feature_files', datasink,
                                         'results.subid.{0}.@csv_file_{1}{2}'.format(
                                             key, image_name, roi_name))
                for image in add_scans:
                    for roi in add_masks:
                        image_name = '{}_{}'.format(key, image)
                        roi_name = '{}_{}'.format(key, roi.split('.nii.gz')[0])
                        features = nipype.Node(
                            interface=FeatureExtraction(),
                            name='features_extraction_{}{}'.format(image_name, roi_name))
                        features.inputs.parameter_file = '/home/fsforazz/git/core/resources/Params_MR.yaml'
                        workflow.connect(datasource, image_name, features, 'input_image')
                        workflow.connect(datasource, roi_name, features, 'rois')
                        workflow.connect(features, 'feature_files', datasink,
                                         'results.subid.{0}.@csv_file_{1}{2}'.format(
                                             key, image_name, roi_name))

        datasink.inputs.substitutions = substitutions

        return workflow
def run_mean_correl():

    main_workflow = pe.Workflow(name=mean_spectral_permut_analysis_name)
    main_workflow.base_dir = main_path

    #### infosource

    infosource = create_infosource()

    #### Data source
    #datasource = create_datasource_rada_by_reg_memory_signif_conf()
    datasource = create_datasource_correl()

    main_workflow.connect(infosource, 'freq_band_name', datasource,
                          'freq_band_name')

    #### prepare_mean_correl
    prepare_mean_correl = pe.Node(interface=PrepareMeanCorrel(),
                                  name='prepare_mean_correl')

    #prepare_mean_correl.inputs.gm_mask_coords_file = ref_coords_file
    prepare_mean_correl.inputs.gm_mask_labels_file = ref_labels_file

    main_workflow.connect(datasource, ('Z_cor_mat_files', force_list),
                          prepare_mean_correl, 'cor_mat_files')
    main_workflow.connect(datasource, ('labels_files', force_list),
                          prepare_mean_correl, 'labels_files')
    #main_workflow.connect(datasource, ('coords_files',force_list),prepare_mean_correl,'coords_files')

    ### shuffle matrix
    shuffle_matrix = pe.Node(interface=ShuffleMatrix(), name='shuffle_matrix')

    main_workflow.connect(prepare_mean_correl, 'avg_cor_mat_matrix_file',
                          shuffle_matrix, 'original_matrix_file')
    main_workflow.connect(infosource, 'permut', shuffle_matrix, 'seed')

    ################################################ modular decomposition on norm_coclass ############################################

    if 'rada' in mean_spectral_permut_analysis_name.split('_'):

        graph_den_pipe = create_pipeline_conmat_to_graph_density(
            pipeline_name="graph_den_pipe",
            main_path=main_path,
            multi=False,
            con_den=mean_con_den,
            mod=True,
            plot=False,
            optim_seq=mean_radatools_optim)
        #graph_den_pipe = create_pipeline_conmat_to_graph_density("graph_den_pipe",main_path,multi = False, con_den = con_den)

        main_workflow.connect(shuffle_matrix, 'shuffled_matrix_file',
                              graph_den_pipe, 'inputnode.conmat_file')

        graph_den_pipe.inputs.inputnode.labels_file = ref_labels_file
        graph_den_pipe.inputs.inputnode.coords_file = ref_coords_file

    return main_workflow
Ejemplo n.º 4
0
    def workflow(self):

        #         self.datasource()

        datasource = self.data_source
        dict_sequences = self.dict_sequences
        nipype_cache = self.nipype_cache
        result_dir = self.result_dir
        sub_id = self.sub_id

        tobet = {**dict_sequences['MR-RT'], **dict_sequences['OT']}
        workflow = nipype.Workflow('brain_extraction_workflow',
                                   base_dir=nipype_cache)
        datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                               "datasink")
        substitutions = [('subid', sub_id)]
        substitutions += [('results/', '{}/'.format(self.workflow_name))]
        substitutions += [('_preproc_corrected.', '_preproc.')]
        datasink.inputs.substitutions = substitutions

        for key in tobet:
            files = []
            #             if tobet[key]['ref'] is not None:
            #                 files.append(tobet[key]['ref'])
            if tobet[key]['scans'] is not None:
                files = files + tobet[key]['scans']
            for el in files:
                el = el.strip(self.extention)
                node_name = '{0}_{1}'.format(key, el)
                bet = nipype.Node(interface=HDBet(),
                                  name='{}_bet'.format(node_name),
                                  serial=True)
                bet.inputs.save_mask = 1
                bet.inputs.out_file = '{}_preproc'.format(el)
                reorient = nipype.Node(interface=Reorient2Std(),
                                       name='{}_reorient'.format(node_name))
                if el in TON4:
                    n4 = nipype.Node(interface=N4BiasFieldCorrection(),
                                     name='{}_n4'.format(node_name))
                    workflow.connect(bet, 'out_file', n4, 'input_image')
                    workflow.connect(bet, 'out_mask', n4, 'mask_image')
                    workflow.connect(
                        n4, 'output_image', datasink,
                        'results.subid.{0}.@{1}_preproc'.format(key, el))
                else:
                    workflow.connect(
                        bet, 'out_file', datasink,
                        'results.subid.{0}.@{1}_preproc'.format(key, el))
                workflow.connect(
                    bet, 'out_mask', datasink,
                    'results.subid.{0}.@{1}_preproc_mask'.format(key, el))
                workflow.connect(reorient, 'out_file', bet, 'input_file')
                workflow.connect(datasource, node_name, reorient, 'in_file')

        return workflow
Ejemplo n.º 5
0
def fMRI2QC(qcname, tag="", SinkDir=".", QCDIR="QC", indiv_atlas=False):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.plot.image as plot

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func', 'atlas', 'confounds']),
        name='inputspec')
    inputspec.inputs.atlas = globals._FSLDIR_ + '/data/atlases/HarvardOxford/HarvardOxford-cort-maxprob-thr25-3mm.nii.gz'

    if indiv_atlas:
        plotfmri = pe.MapNode(interface=Function(
            input_names=['func', 'atlaslabels', 'confounds', 'output_file'],
            output_names=['plotfile'],
            function=plot.plot_fmri_qc),
                              iterfield=['func', 'confounds', 'atlaslabels'],
                              name="qc_fmri")
    else:
        plotfmri = pe.MapNode(interface=Function(
            input_names=['func', 'atlaslabels', 'confounds', 'output_file'],
            output_names=['plotfile'],
            function=plot.plot_fmri_qc),
                              iterfield=['func', 'confounds'],
                              name="qc_fmri")

    plotfmri.inputs.output_file = "qc_fmri.png"
    # default atlas works only for standardized, 3mm-resoultion data

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    analysisflow.connect(inputspec, 'func', plotfmri, 'func')
    analysisflow.connect(inputspec, 'atlas', plotfmri, 'atlaslabels')
    analysisflow.connect(inputspec, 'confounds', plotfmri, 'confounds')

    analysisflow.connect(plotfmri, 'plotfile', ds_qc, qcname)

    return analysisflow
Ejemplo n.º 6
0
def create_anat_noise_roi_workflow(SinkTag="func_preproc",
                                   wf_name="create_noise_roi"):
    """
    Creates an anatomical noise ROI for use with compcor

    inputs are awaited from the (BBR-based) func2anat registration
    and are already transformed to functional space

    Tamas Spisak
    2018


    """
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.utils.globals as globals

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['wm_mask', 'ventricle_mask']),
        name='inputspec')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['noise_roi']),
                         name='outputspec')

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    wf = nipype.Workflow(wf_name)

    # erode WM mask in functional space
    erode_mask = pe.MapNode(fsl.ErodeImage(),
                            iterfield=['in_file'],
                            name="erode_wm_mask")
    wf.connect(inputspec, 'wm_mask', erode_mask, 'in_file')

    # add ventricle and eroded WM masks
    add_masks = pe.MapNode(fsl.ImageMaths(op_string=' -add'),
                           iterfield=['in_file', 'in_file2'],
                           name="addimgs")

    wf.connect(inputspec, 'ventricle_mask', add_masks, 'in_file')
    wf.connect(erode_mask, 'out_file', add_masks, 'in_file2')

    wf.connect(add_masks, 'out_file', outputspec, 'noise_roi')

    return wf
Ejemplo n.º 7
0
def brain_extraction(sub_id,
                     datasource,
                     sessions,
                     RESULT_DIR,
                     NIPYPE_CACHE,
                     reference,
                     t10=True):

    bet = nipype.MapNode(interface=HDBet(),
                         iterfield=['input_file'],
                         name='bet')
    bet.inputs.save_mask = 1
    bet.inputs.out_file = 'T1_preproc'

    if t10:
        bet_t10 = nipype.Node(interface=HDBet(), name='t1_0_bet')
        bet_t10.inputs.save_mask = 1
        bet_t10.inputs.out_file = 'T1_0_bet'

    datasink = nipype.Node(nipype.DataSink(base_directory=RESULT_DIR),
                           "datasink")

    substitutions = [('subid', sub_id)]
    for i, session in enumerate(sessions):

        substitutions += [('_bet{}/'.format(i), session + '/')]

    datasink.inputs.substitutions = substitutions
    # Create Workflow
    workflow = nipype.Workflow('brain_extraction_workflow',
                               base_dir=NIPYPE_CACHE)

    workflow.connect(datasource, 't1', bet, 'input_file')
    if t10:
        workflow.connect(datasource, 't1_0', bet_t10, 'input_file')
        workflow.connect(bet_t10, 'out_file', datasink,
                         'results.subid.T10.@T1_ref_bet')

    workflow.connect(bet, 'out_file', datasink, 'results.subid.@T1_preproc')
    workflow.connect(bet, 'out_mask', datasink, 'results.subid.@T1_mask')

    workflow = datasink_base(datasink,
                             datasource,
                             workflow,
                             sessions,
                             reference,
                             t10=t10)

    return workflow
Ejemplo n.º 8
0
def regTimeseriesQC(qcname, tag="", SinkDir=".", QCDIR="QC"):
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.plot.timeseries as plot

    QCDir = os.path.abspath(globals._SinkDir_ + "/" + globals._QCDir_)
    if not os.path.exists(QCDir):
        os.makedirs(QCDir)

    if tag:
        tag = "_" + tag

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['timeseries', 'modules', 'atlas']),
        name='inputspec')
    inputspec.inputs.atlas = None

    plotregts = pe.MapNode(interface=Function(
        input_names=['timeseries', 'modules', 'output_file', 'atlas'],
        output_names=['plotfile'],
        function=plot.plot_carpet_ts),
                           iterfield=['timeseries'],
                           name="qc_timeseries")
    plotregts.inputs.output_file = "qc_timeseries.png"

    # Save outputs which are important
    ds_qc = pe.Node(interface=io.DataSink(), name='ds_qc')
    ds_qc.inputs.base_directory = QCDir
    ds_qc.inputs.regexp_substitutions = [("(\/)[^\/]*$", tag + ".png")]

    # Create a workflow
    analysisflow = nipype.Workflow(name=qcname + tag + '_qc')

    analysisflow.connect(inputspec, 'timeseries', plotregts, 'timeseries')
    analysisflow.connect(inputspec, 'atlas', plotregts, 'atlas')
    analysisflow.connect(inputspec, 'modules', plotregts, 'modules')
    analysisflow.connect(plotregts, 'plotfile', ds_qc, qcname)

    return analysisflow
Ejemplo n.º 9
0
    def workflow(self):

        datasource = self.data_source
        dict_sequences = self.dict_sequences
        nipype_cache = self.nipype_cache
        result_dir = self.result_dir
        sub_id = self.sub_id

        toseg = {**dict_sequences['OT']}
        workflow = nipype.Workflow('lung_segmentation_workflow',
                                   base_dir=nipype_cache)
        datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                               "datasink")
        substitutions = [('subid', sub_id)]
        substitutions += [('results/', '{}/'.format(self.workflow_name))]
        substitutions += [('_preproc_corrected.', '_preproc.')]
        datasink.inputs.substitutions = substitutions

        for key in toseg:
            files = []
            #             if tobet[key]['ref'] is not None:
            #                 files.append(tobet[key]['ref'])
            if toseg[key]['scans'] is not None:
                files = files + toseg[key]['scans']
            for el in files:
                el = el.strip(self.extention)
                node_name = '{0}_{1}'.format(key, el)
                preproc = nipype.Node(interface=LungSegmentationPreproc(),
                                      name='{}_ls_preproc'.format(node_name))
                preproc.inputs.new_spacing = self.new_spacing
                lung_seg = nipype.Node(interface=LungSegmentationInference(),
                                       name='{}_ls'.format(node_name))
                lung_seg.inputs.weights = self.network_weights

                workflow.connect(datasource, node_name, preproc, 'in_file')
                workflow.connect(preproc, 'tensor', lung_seg, 'tensor')
                workflow.connect(preproc, 'image_info', lung_seg, 'image_info')
                workflow.connect(
                    lung_seg, 'segmented_lungs', datasink,
                    'results.subid.{0}.@{1}_segmented_lungs'.format(key, el))

        return workflow
Ejemplo n.º 10
0
                                                   reference=[['sub_id', 'ref_tp', '']])
            datasource.inputs.raise_on_empty = False
            datasource.inputs.contrasts = contrast
            datasource.inputs.sub_id = sub
            datasource.inputs.sessions = sessions
            datasource.inputs.ref_tp = ref_tp

            reg = nipype.MapNode(interface=AntsRegSyn(), iterfield=['input_file'], name='ants_reg')
            reg.inputs.transformation = 'r'
            reg.inputs.num_dimensions = 3
            reg.inputs.num_threads = 4

            datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink")
            substitutions = [('contrast', contrast), ('sub', sub)]
            for i, session in enumerate(sessions):
                substitutions += [('_ants_reg{}/'.format(i), session+'/')]
            datasink.inputs.substitutions =substitutions

            workflow = nipype.Workflow('registration_workflow', base_dir=cache_dir)
            workflow.connect(datasource, 'reference', reg, 'ref_file')
            workflow.connect(datasource, 'to_reg', reg, 'input_file')
            workflow.connect(reg, 'reg_file', datasink, 'registration.contrast.sub.@reg_image')
            workflow.connect(reg, 'regmat', datasink, 'registration.contrast.sub.@affine_mat')
            workflow.connect(datasource, 'reference', datasink,
                             'registration.contrast.sub.@reference')

#             workflow.run()
            workflow.run('MultiProc', plugin_args={'n_procs': 4})

print('Done!')
Ejemplo n.º 11
0
    def workflow(self):

        datasource = self.data_source
        dict_sequences = self.dict_sequences
        nipype_cache = self.nipype_cache
        result_dir = self.result_dir
        sub_id = self.sub_id

        toreg = {**dict_sequences['MR-RT'], **dict_sequences['OT']}
        workflow = nipype.Workflow('registration_workflow',
                                   base_dir=nipype_cache)
        datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                               "datasink")
        substitutions = [('subid', sub_id)]
        substitutions += [('results/', '{}/'.format(self.workflow_name))]

        mr_rt_ref = None
        rtct = None

        if dict_sequences['MR-RT'] and self.normilize_mr_rt:
            ref_session = list(dict_sequences['MR-RT'].keys())[0]
            ref_scans = dict_sequences['MR-RT'][ref_session]['scans']
            for pr in POSSIBLE_REF:
                for scan in ref_scans:
                    if pr in scan.split('_')[0]:
                        mr_rt_ref = '{0}_{1}_preproc'.format(
                            ref_session,
                            scan.split('_')[0])
                        mr_rt_ref_name = '{}_preproc'.format(
                            scan.split('_')[0])
                        break
                else:
                    continue
                break
        if dict_sequences['RT'] and self.normilize_rtct:
            rt_session = list(dict_sequences['RT'].keys())[0]
            ct_name = dict_sequences['RT'][rt_session]['rtct']
            if ct_name is not None and mr_rt_ref is not None:
                rtct = '{0}_rtct'.format(rt_session, ct_name)
                reg_mr2ct = nipype.Node(interface=AntsRegSyn(),
                                        name='{}_lin_reg'.format(rt_session))
                reg_mr2ct.inputs.transformation = 'r'
                reg_mr2ct.inputs.num_dimensions = 3
                reg_mr2ct.inputs.num_threads = 4
                reg_mr2ct.inputs.out_prefix = '{}_reg2RTCT'.format(
                    mr_rt_ref_name)
                reg_mr2ct.inputs.interpolation = 'BSpline'
                workflow.connect(datasource, mr_rt_ref, reg_mr2ct,
                                 'input_file')
                workflow.connect(datasource, rtct, reg_mr2ct, 'ref_file')
                workflow.connect(
                    reg_mr2ct, 'regmat', datasink,
                    'results.subid.{0}.@{1}_reg2RTCT_mat'.format(
                        ref_session, mr_rt_ref_name))
                workflow.connect(
                    reg_mr2ct, 'reg_file', datasink,
                    'results.subid.{0}.@{1}_reg2RTCT'.format(
                        ref_session, mr_rt_ref_name))
                substitutions += [
                    ('{}_reg2RTCTWarped.nii.gz'.format(mr_rt_ref_name),
                     '{}_reg2RTCT.nii.gz'.format(mr_rt_ref_name))
                ]
                substitutions += [
                    ('{}_reg2RTCT0GenericAffine.mat'.format(mr_rt_ref_name),
                     '{}_reg2RTCT_linear_mat.mat'.format(mr_rt_ref_name))
                ]

        for key in toreg:
            session = toreg[key]
            if session['scans'] is not None:
                scans = session['scans']
                scans = [x for x in scans if 'mask' not in x]
                ref = None
                for pr in POSSIBLE_REF:
                    for scan in scans:
                        if pr in scan:
                            ref = '{0}_{1}_preproc'.format(
                                key,
                                scan.split('_')[0])
                            scans.remove('{}_preproc'.format(
                                scan.split('_')[0]))
                            ref_name = scan.split('_')[0]
                            workflow.connect(
                                datasource, ref, datasink,
                                'results.subid.{0}.@{1}_reg'.format(
                                    key, ref_name))
                            substitutions += [
                                ('{}_preproc'.format(scan.split('_')[0]),
                                 '{}_reg'.format(scan.split('_')[0]))
                            ]
                            break
                    else:
                        continue
                    break
                if ref is not None:
                    if mr_rt_ref is not None and key != ref_session:
                        reg_mr_rt = nipype.Node(interface=AntsRegSyn(),
                                                name='{}_def_reg'.format(key))
                        reg_mr_rt.inputs.transformation = 's'
                        reg_mr_rt.inputs.num_dimensions = 3
                        reg_mr_rt.inputs.num_threads = 6
                        reg_mr_rt.inputs.out_prefix = '{}_reg2MR_RT'.format(
                            ref_name)
                        workflow.connect(datasource, ref, reg_mr_rt,
                                         'input_file')
                        workflow.connect(datasource, mr_rt_ref, reg_mr_rt,
                                         'ref_file')
                        workflow.connect(
                            reg_mr_rt, 'regmat', datasink,
                            'results.subid.{0}.@{1}_reg2MR_RT_linear_mat'.
                            format(key, ref_name))
                        workflow.connect(
                            reg_mr_rt, 'reg_file', datasink,
                            'results.subid.{0}.@{1}_reg2MR_RT'.format(
                                key, ref_name))
                        workflow.connect(
                            reg_mr_rt, 'warp_file', datasink,
                            'results.subid.{0}.@{1}_reg2MR_RT_warp'.format(
                                key, ref_name))
                        substitutions += [
                            ('{}_reg2MR_RT0GenericAffine.mat'.format(ref_name),
                             '{}_reg2MR_RT_linear_mat.mat'.format(ref_name))
                        ]
                        substitutions += [
                            ('{}_reg2MR_RT1Warp.nii.gz'.format(ref_name),
                             '{}_reg2MR_RT_warp.nii.gz'.format(ref_name))
                        ]
                        substitutions += [
                            ('{}_reg2MR_RTWarped.nii.gz'.format(ref_name),
                             '{}_reg2MR_RT.nii.gz'.format(ref_name))
                        ]
                    if rtct is not None and key != ref_session:
                        apply_ts_rt_ref = nipype.Node(
                            interface=ApplyTransforms(),
                            name='{}_norm2RT'.format(ref_name))
                        apply_ts_rt_ref.inputs.output_image = (
                            '{}_reg2RTCT.nii.gz'.format(ref_name))
                        workflow.connect(datasource, ref, apply_ts_rt_ref,
                                         'input_image')
                        workflow.connect(datasource, rtct, apply_ts_rt_ref,
                                         'reference_image')
                        workflow.connect(
                            apply_ts_rt_ref, 'output_image', datasink,
                            'results.subid.{0}.@{1}_reg2RTCT'.format(
                                key, ref_name))
                        merge_rt_ref = nipype.Node(
                            interface=Merge(4),
                            name='{}_merge_rt'.format(ref_name))
                        merge_rt_ref.inputs.ravel_inputs = True
                        workflow.connect(reg_mr2ct, 'regmat', merge_rt_ref,
                                         'in1')
                        workflow.connect(reg_mr_rt, 'regmat', merge_rt_ref,
                                         'in3')
                        workflow.connect(reg_mr_rt, 'warp_file', merge_rt_ref,
                                         'in2')
                        workflow.connect(merge_rt_ref, 'out', apply_ts_rt_ref,
                                         'transforms')

                    for el in scans:
                        el = el.strip(self.extention)
                        el_name = el.split('_')[0]
                        node_name = '{0}_{1}'.format(key, el)
                        reg = nipype.Node(interface=AntsRegSyn(),
                                          name='{}_lin_reg'.format(node_name))
                        reg.inputs.transformation = 'r'
                        reg.inputs.num_dimensions = 3
                        reg.inputs.num_threads = 4
                        reg.inputs.interpolation = 'BSpline'
                        reg.inputs.out_prefix = '{}_reg'.format(el_name)
                        workflow.connect(datasource, node_name, reg,
                                         'input_file')
                        workflow.connect(datasource, ref, reg, 'ref_file')
                        workflow.connect(
                            reg, 'reg_file', datasink,
                            'results.subid.{0}.@{1}_reg'.format(key, el_name))
                        workflow.connect(
                            reg, 'regmat', datasink,
                            'results.subid.{0}.@{1}_regmat'.format(
                                key, el_name))
                        substitutions += [
                            ('{}_regWarped.nii.gz'.format(el_name),
                             '{}_reg.nii.gz'.format(el_name))
                        ]
                        substitutions += [
                            ('{}_reg0GenericAffine.mat'.format(el_name),
                             '{}_linear_regmat.mat'.format(el_name))
                        ]
                        if mr_rt_ref is not None and key != ref_session:
                            merge = nipype.Node(
                                interface=Merge(3),
                                name='{}_merge_MR_RT'.format(node_name))
                            merge.inputs.ravel_inputs = True
                            workflow.connect(reg, 'regmat', merge, 'in3')
                            workflow.connect(reg_mr_rt, 'regmat', merge, 'in2')
                            workflow.connect(reg_mr_rt, 'warp_file', merge,
                                             'in1')
                            apply_ts = nipype.Node(
                                interface=ApplyTransforms(),
                                name='{}_norm2MR_RT'.format(node_name))
                            apply_ts.inputs.output_image = '{}_reg2MR_RT.nii.gz'.format(
                                el_name)
                            workflow.connect(merge, 'out', apply_ts,
                                             'transforms')
                            workflow.connect(datasource, node_name, apply_ts,
                                             'input_image')
                            workflow.connect(datasource, mr_rt_ref, apply_ts,
                                             'reference_image')
                            workflow.connect(
                                apply_ts, 'output_image', datasink,
                                'results.subid.{0}.@{1}_reg2MR_RT'.format(
                                    key, el_name))
                        if rtct is not None:
                            apply_ts_rt = nipype.Node(
                                interface=ApplyTransforms(),
                                name='{}_norm2RT'.format(node_name))
                            apply_ts_rt.inputs.output_image = '{}_reg2RTCT.nii.gz'.format(
                                el_name)
                            workflow.connect(datasource, node_name,
                                             apply_ts_rt, 'input_image')
                            workflow.connect(datasource, rtct, apply_ts_rt,
                                             'reference_image')
                            workflow.connect(
                                apply_ts_rt, 'output_image', datasink,
                                'results.subid.{0}.@{1}_reg2RTCT'.format(
                                    key, el_name))
                            if key != ref_session:
                                merge_rt = nipype.Node(
                                    interface=Merge(4),
                                    name='{}_merge_rt'.format(node_name))
                                merge_rt.inputs.ravel_inputs = True
                                workflow.connect(reg_mr2ct, 'regmat', merge_rt,
                                                 'in1')
                                workflow.connect(reg, 'regmat', merge_rt,
                                                 'in4')
                                workflow.connect(reg_mr_rt, 'regmat', merge_rt,
                                                 'in3')
                                workflow.connect(reg_mr_rt, 'warp_file',
                                                 merge_rt, 'in2')
                                workflow.connect(merge_rt, 'out', apply_ts_rt,
                                                 'transforms')
                            else:
                                merge_rt = nipype.Node(
                                    interface=Merge(2),
                                    name='{}_merge_rt'.format(node_name))
                                merge_rt.inputs.ravel_inputs = True
                                workflow.connect(reg_mr2ct, 'regmat', merge_rt,
                                                 'in1')
                                workflow.connect(reg, 'regmat', merge_rt,
                                                 'in2')
                                workflow.connect(merge_rt, 'out', apply_ts_rt,
                                                 'transforms')

        datasink.inputs.substitutions = substitutions

        return workflow
Ejemplo n.º 12
0
_ATLAS_FILE = _MISTDIR_ + '/Parcellations/MIST_122.nii.gz'
# a list of labels, where index+1 corresponds to the label in the labelmap
_ATLAS_LABELS = tsext.mist_labels(mist_directory=_MISTDIR_, resolution="122")
# a list of labels, where index i corresponds to the module of the i+1th region, this is optional
_ATLAS_MODULES = tsext.mist_modules(mist_directory=_MISTDIR_, resolution="122")
##############################
##############################
#_regtype_ = globals._RegType_.FSL
globals._regType_ = globals._RegType_.ANTS
##############################

print("Starting RPN-signature...")
print("Memory usage limit: " + str(opts.mem_gb) + "GB")
print("Number of CPUs used: " + str(opts.nthreads))

totalWorkflow = nipype.Workflow('RPN')
if opts.debug:
    totalWorkflow.base_dir = globals._SinkDir_
else:
    totalWorkflow.base_dir = opts.tempdir  # preferably a fast temporary mount (working dir by default)

########################
# parse command line args
bids_dir = opts.bids_dir

# create BIDS data grabber
datagrab = pe.Node(io.BIDSDataGrabber(), name='data_grabber')
datagrab.inputs.base_dir = bids_dir

# BIDS filtering
if opts.task_id and opts.echo_idx:
Ejemplo n.º 13
0
def addimgs_workflow(numimgs=2,
                    SinkDir=".",
                    SinkTag="func_preproc",
                    WorkingDirectory="."):
    """


               `source: -`


               Add any number of images whic are in the same space. The input files must be NIFTI files.

               Workflow inputs:
                   :param any number of .nii(.gz) files.
                   :param SinkDir:
                   :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

               Workflow outputs:


                   :return: addimgs_workflow - workflow




               Balint Kincses
               [email protected]
               2018


     """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.utils.utils_convert as utils_convert
    from nipype.interfaces.utility import Function
    import nipype.interfaces.fsl as fsl


    SinkDir = os.path.abspath(SinkDir + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    inputs=[]
    for i in range(1, numimgs + 1):
        inputs.append("par" + str(i))


    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(fields=inputs),
                        name='inputspec')
    # Add masks with FSL
    add_masks = pe.MapNode(fsl.ImageMaths(op_string=' -add'),
                         iterfield=inputs,
                         name="addimgs")

    outputspec = pe.Node(utility.IdentityInterface(fields=['added_imgs']),
                                    name='outputspec')
    # Create workflow
    analysisflow = nipype.Workflow('addimgsWorkflow')
    analysisflow.base_dir = '.'
    #connect
    for i in range(1, numimgs + 1):
        actparam = "par" + str(i)
        analysisflow.connect(inputspec, actparam, add_masks, actparam)
    #analysisflow.connect(inputspec, inputs, add_masks, inputs)
    analysisflow.connect(add_masks, 'out_file', outputspec, 'added_imgs')


    return analysisflow
Ejemplo n.º 14
0
def compcor_workflow(SinkTag="func_preproc", wf_name="compcor"):
    """


               `source: -`


               Component based noise reduction method (Behzadi et al.,2007): Regressing out principal components from noise ROIs.
               Here the aCompCor is used.

               Workflow inputs:
                   :param func_aligned: The reoriented and realigned functional image.
                   :param mask_files: Mask files which determine ROI(s). The default mask is the
                   :param components_file
                   :param num_componenets:
                   :param pre_filter: Detrend time series prior to component extraction.
                   :param TR
                   :param SinkDir:
                   :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

               Workflow outputs:




                   :return: slt_workflow - workflow




               Balint Kincses
               [email protected]
               2018


     """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.algorithms.confounds as cnf
    import PUMI.func_preproc.info.info_get as info_get
    import PUMI.utils.utils_convert as utils_convert
    import nipype.interfaces.io as io
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=['func_aligned', 'mask_file']),
        name='inputspec')

    myqc = qc.vol2png("compcor_noiseroi")

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_nii')
    ds_nii.inputs.base_directory = SinkDir
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    # standardize timeseries prior to compcor. added by tspisak
    scale = pe.MapNode(interface=utility.Function(input_names=['in_file'],
                                                  output_names=['scaled_file'],
                                                  function=scale_vol),
                       iterfield=['in_file'],
                       name='scale_func')

    # Calculate compcor files
    compcor = pe.MapNode(
        interface=cnf.ACompCor(pre_filter='polynomial',
                               header_prefix="",
                               num_components=5),
        iterfield=['realigned_file', 'repetition_time', 'mask_files'],
        name='compcor')

    # Custom interface wrapping function Float2Str
    func_str2float = pe.MapNode(interface=utils_convert.Str2Float,
                                iterfield=['str'],
                                name='func_str2float')
    # Drop first line of the Acompcor function output
    drop_firstline = pe.MapNode(interface=utils_convert.DropFirstLine,
                                iterfield=['txt'],
                                name='drop_firstline')
    # Custom interface wrapping function TR
    TRvalue = pe.MapNode(interface=info_get.TR,
                         iterfield=['in_file'],
                         name='TRvalue')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['components_file']),
                         name='outputspec')

    # save data out with Datasink
    ds_text = pe.Node(interface=io.DataSink(), name='ds_txt')
    ds_text.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".txt")]
    ds_text.inputs.base_directory = SinkDir

    # Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func_aligned', scale, 'in_file')
    analysisflow.connect(scale, 'scaled_file', compcor, 'realigned_file')
    analysisflow.connect(inputspec, 'func_aligned', TRvalue, 'in_file')
    analysisflow.connect(TRvalue, 'TR', func_str2float, 'str')
    analysisflow.connect(func_str2float, 'float', compcor, 'repetition_time')
    #analysisflow.connect(TRvalue, 'TR', compcor, 'repetition_time')
    analysisflow.connect(inputspec, 'mask_file', compcor, 'mask_files')
    analysisflow.connect(compcor, 'components_file', drop_firstline, 'txt')
    analysisflow.connect(drop_firstline, 'droppedtxtfloat', outputspec,
                         'components_file')
    analysisflow.connect(compcor, 'components_file', ds_text, 'compcor_noise')

    analysisflow.connect(inputspec, 'func_aligned', myqc, 'inputspec.bg_image')
    analysisflow.connect(inputspec, 'mask_file', myqc,
                         'inputspec.overlay_image')

    analysisflow.connect(inputspec, 'mask_file', ds_nii, 'compcor_noise_mask')

    return analysisflow
Ejemplo n.º 15
0
def onevol_workflow(SinkTag="anat_preproc", wf_name="get_example_vol"):
    '''
    This function receive the raw functional image and return its last volume for registration purposes.
    MORE: It also returns information from the header file.
        Workflow inputs:
            :param func: Functional image.
            :param SinkDir:
            :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

        Workflow outputs:


            :return: onevol_workflow - workflow

        Balint Kincses
        [email protected]
        2018

    '''

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import PUMI.func_preproc.info.info_get as info_get
    import nipype.interfaces.io as io
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(fields=['func']),
                        name='inputspec')
    #inputspec.inputs.func = "/home/balint/Dokumentumok/phd/essen/PAINTER/probe/s002/func_data.nii.gz"

    # Get dimension infos
    idx = pe.MapNode(interface=info_get.tMinMax,
                     iterfield=['in_files'],
                     name='idx')

    # Get the last volume of the func image
    fslroi = pe.MapNode(fsl.ExtractROI(),
                        iterfield=['in_file', 't_min'],
                        name='fslroi')
    fslroi.inputs.t_size = 1

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['func1vol']),
                         name='outputspec')

    # Generic datasink module to store structured outputs
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'func', idx, 'in_files')
    analysisflow.connect(inputspec, 'func', fslroi, 'in_file')
    analysisflow.connect(idx, 'refvolidx', fslroi, 't_min')
    analysisflow.connect(fslroi, 'roi_file', ds, 'funclastvol')
    analysisflow.connect(fslroi, 'roi_file', outputspec, 'func1vol')

    return analysisflow
Ejemplo n.º 16
0
_refvolplace_ = globals._RefVolPos_.first

# specify atlas for network construction:
# name of labelmap nii (or list of probmaps)
_ATLAS_FILE = _MISTDIR_ + '/Parcellations/MIST_122.nii.gz'
# a list of labels, where index+1 corresponds to the label in the labelmap
_ATLAS_LABELS = tsext.mist_labels(mist_directory=_MISTDIR_, resolution="122")
# a list of labels, where index i corresponds to the module of the i+1th region, this is optional
_ATLAS_MODULES = tsext.mist_modules(mist_directory=_MISTDIR_, resolution="122")
##############################
##############################
#_regtype_ = globals._RegType_.FSL
globals._regType_ = globals._RegType_.ANTS
##############################

totalWorkflow = nipype.Workflow('pumi')
totalWorkflow.base_dir = '.'

# create data grabber
datagrab = pe.Node(io.DataGrabber(outfields=['func', 'struct']),
                   name='data_grabber')

datagrab.inputs.base_directory = os.getcwd()  # do we need this?
datagrab.inputs.template = "*"  # do we need this?
datagrab.inputs.field_template = dict(
    func=sys.argv[2],
    struct=sys.argv[1])  # specified by command line arguments
datagrab.inputs.sort_filelist = True

# sink: file - idx relationship!!
pop_id = pe.Node(interface=utils_convert.List2TxtFile, name='pop_id')
Ejemplo n.º 17
0
def fast_workflow(SinkTag="anat_preproc", wf_name="tissue_segmentation"):
    """
    Borrowed from the PUMI project: https://github.com/spisakt/PUMI
    Balint Kincses
    [email protected]
    2019
     Modified version of CPAC.seg_preproc.seg_preproc

     `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/seg_preproc/seg_preproc.html`


        Do the segmentation of a brain extracted T1w image.


        Workflow inputs:
            :param brain: The brain extracted image, the output of the better_workflow.
            :param init_transform: The standard to anat linear transformation matrix (which is calculated in the Anat2MNI.py script). Beware of the resolution of the reference (standard) image, the default value is 2mm.
            :param priorprob: A list of tissue probability maps in the prior (=reference=standard) space. By default it must be 3 element(in T1w images the CSF, GM, WM order is valid)
            :param SinkDir:
            :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found.

        Workflow outputs:




            :return: fast_workflow - workflow




        Balint Kincses
        [email protected]
        2018


        """

    #This is a Nipype generator. Warning, here be dragons.
    #!/usr/bin/env python
    import sys
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    #import PUMI.utils.QC as qc
    #import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    #Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['brain', 'stand2anat_xfm', 'priorprob']),
                        name='inputspec')
    # inputspec.inputs.stand2anat_xfm='/home/analyser/Documents/PAINTER/probewith2subj/preprocess_solvetodos/anat2mni_fsl/inv_linear_reg0_xfm/mapflow/_inv_linear_reg0_xfm0/anat_brain_flirt_inv.mat'

    #TODO_ready set standard mask to 2mm

    inputspec.inputs.priorprob = [
        globals._FSLDIR_ + '/data/standard/tissuepriors/avg152T1_csf.hdr',
        globals._FSLDIR_ + '/data/standard/tissuepriors/avg152T1_gray.hdr',
        globals._FSLDIR_ + '/data/standard/tissuepriors/avg152T1_white.hdr'
    ]

    # TODO_ready: use prior probabilioty maps
    # Wraps command **fast**
    fast = pe.MapNode(interface=fsl.FAST(),
                      iterfield=['in_files', 'init_transform'],
                      name='fast')
    fast.inputs.img_type = 1
    fast.inputs.segments = True
    fast.inputs.probability_maps = True
    fast.inputs.out_basename = 'fast_'

    #myqc = qc.vol2png("tissue_segmentation", overlay=False)
    #myqc.inputs.slicer.colour_map = globals._FSLDIR_ + '/etc/luts/renderjet.lut'

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'probmap_csf', 'probmap_gm', 'probmap_wm', 'mixeltype', 'parvol_csf',
        'parvol_gm', 'parvol_wm', 'partial_volume_map'
    ]),
                         name='outputspec')

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    def pickindex(vec, i):
        #print "************************************************************************************************************************************************"
        #print vec
        #print i
        return [x[i] for x in vec]

    #Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'brain', fast, 'in_files')
    analysisflow.connect(inputspec, 'stand2anat_xfm', fast, 'init_transform')
    analysisflow.connect(inputspec, 'priorprob', fast, 'other_priors')
    # analysisflow.connect(inputspec, 'stand_csf' ,fast,('other_priors', pickindex, 0))
    # analysisflow.connect(inputspec, 'stand_gm' ,fast,('other_priors', pickindex, 1))
    # analysisflow.connect(inputspec, 'stand_wm' ,fast,('other_priors', pickindex, 2))

    #nalysisflow.connect(fast, 'probability_maps', outputspec, 'probability_maps')
    analysisflow.connect(fast, ('probability_maps', pickindex, 0), outputspec,
                         'probmap_csf')
    analysisflow.connect(fast, ('probability_maps', pickindex, 1), outputspec,
                         'probmap_gm')
    analysisflow.connect(fast, ('probability_maps', pickindex, 2), outputspec,
                         'probmap_wm')
    analysisflow.connect(fast, 'mixeltype', outputspec, 'mixeltype')
    #analysisflow.connect(fast, 'partial_volume_files', outputspec, 'partial_volume_files')
    analysisflow.connect(fast, ('partial_volume_files', pickindex, 0),
                         outputspec, 'parvol_csf')
    analysisflow.connect(fast, ('partial_volume_files', pickindex, 0),
                         outputspec, 'parvol_gm')
    analysisflow.connect(fast, ('partial_volume_files', pickindex, 0),
                         outputspec, 'parvol_wm')
    analysisflow.connect(fast, 'partial_volume_map', outputspec,
                         'partial_volume_map')
    analysisflow.connect(fast, ('probability_maps', pickindex, 0), ds,
                         'fast_csf')
    analysisflow.connect(fast, ('probability_maps', pickindex, 1), ds,
                         'fast_gm')
    analysisflow.connect(fast, ('probability_maps', pickindex, 2), ds,
                         'fast_wm')
    #analysisflow.connect(fast, 'partial_volume_map', myqc, 'inputspec.bg_image')

    return analysisflow
Ejemplo n.º 18
0
datagrab.inputs.template = "*/*"  # do we need this?
datagrab.inputs.field_template = dict(func=sys.argv[1],
                                      phase=sys.argv[2],
                                      magnitude=sys.argv[3])
datagrab.inputs.sort_filelist = True

reorient_func = pe.MapNode(fsl.utils.Reorient2Std(),
                           iterfield=['in_file'],
                           name="reorient_func")

myfm = fm.fieldmapper(TE1=4.9,
                      TE2=7.3,
                      dwell_time=0.00035,
                      unwarp_direction="y-")

totalWorkflow = nipype.Workflow('fm_probe')
totalWorkflow.base_dir = '.'

totalWorkflow.connect([
    (datagrab, reorient_func, [('func', 'in_file')]),
    (reorient_func, myfm, [('out_file', 'inputspec.in_file')]),
    (datagrab, myfm, [('phase', 'inputspec.phase')]),
    (datagrab, myfm, [('magnitude', 'inputspec.magnitude')]),
])

totalWorkflow.write_graph('graph-orig.dot', graph2use='orig', simple_form=True)
totalWorkflow.write_graph('graph-exec-detailed.dot',
                          graph2use='exec',
                          simple_form=False)
totalWorkflow.write_graph('graph.dot', graph2use='colored')
totalWorkflow.run()
Ejemplo n.º 19
0
def extract_timeseries_nativespace(SinkTag="connectivity",
                                   wf_name="extract_timeseries_nativespace",
                                   global_signal=True):
    # this workflow transforms atlas back to native space and uses TsExtractor

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.io as io
    import nipype.interfaces.utility as utility
    import PUMI.func_preproc.func2standard as transform
    import PUMI.utils.globals as globals
    import PUMI.utils.QC as qc

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    wf = nipype.Workflow(wf_name)

    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'atlas',
            'labels',
            'modules',
            'anat',  # only obligatory if stdreg==globals._RegType_.ANTS
            'inv_linear_reg_mtrx',
            'inv_nonlinear_reg_mtrx',
            'func',
            'gm_mask',
            'confounds',
            'confound_names'
        ]),
        name="inputspec")

    # transform atlas back to native EPI spaces!
    atlas2native = transform.atlas2func(stdreg=globals._regType_)
    wf.connect(inputspec, 'atlas', atlas2native, 'inputspec.atlas')
    wf.connect(inputspec, 'anat', atlas2native, 'inputspec.anat')
    wf.connect(inputspec, 'inv_linear_reg_mtrx', atlas2native,
               'inputspec.inv_linear_reg_mtrx')
    wf.connect(inputspec, 'inv_nonlinear_reg_mtrx', atlas2native,
               'inputspec.inv_nonlinear_reg_mtrx')
    wf.connect(inputspec, 'func', atlas2native, 'inputspec.func')
    wf.connect(inputspec, 'gm_mask', atlas2native, 'inputspec.example_func')
    wf.connect(inputspec, 'confounds', atlas2native, 'inputspec.confounds')
    wf.connect(inputspec, 'confound_names', atlas2native,
               'inputspec.confound_names')

    # extract timeseries
    extract_timeseries = pe.MapNode(interface=utility.Function(
        input_names=['labels', 'labelmap', 'func', 'mask', 'global_signal'],
        output_names=['out_file', 'labels', 'out_gm_label'],
        function=TsExtractor),
                                    iterfield=['labelmap', 'func', 'mask'],
                                    name='extract_timeseries')
    extract_timeseries.inputs.global_signal = global_signal
    wf.connect(atlas2native, 'outputspec.atlas2func', extract_timeseries,
               'labelmap')
    wf.connect(inputspec, 'labels', extract_timeseries, 'labels')
    wf.connect(inputspec, 'gm_mask', extract_timeseries, 'mask')
    wf.connect(inputspec, 'func', extract_timeseries, 'func')

    # Save outputs which are important
    ds_regts = pe.Node(interface=io.DataSink(), name='ds_regts')
    ds_regts.inputs.base_directory = globals._SinkDir_
    ds_regts.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")]
    wf.connect(extract_timeseries, 'out_file', ds_regts, 'regional_timeseries')

    # QC
    timeseries_qc = qc.regTimeseriesQC("regional_timeseries", tag=wf_name)
    wf.connect(inputspec, 'modules', timeseries_qc, 'inputspec.modules')
    wf.connect(inputspec, 'atlas', timeseries_qc, 'inputspec.atlas')
    wf.connect(extract_timeseries, 'out_file', timeseries_qc,
               'inputspec.timeseries')

    # Basic interface class generates identity mappings
    outputspec = pe.Node(
        utility.IdentityInterface(fields=['timeseries', 'out_gm_label']),
        name='outputspec')
    wf.connect(extract_timeseries, 'out_file', outputspec, 'timeseries')
    wf.connect(extract_timeseries, 'out_gm_label', outputspec, 'out_gm_label')

    return wf
Ejemplo n.º 20
0
def PickAtlas(SinkTag="connectivity", wf_name="pick_atlas", reorder=True):
    # reorder if modules is given (like for MIST atlases)
    # if no module information available, pass a text file with a constant value x number of regions
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.afni as afni
    import PUMI.utils.globals as globals
    import nipype.interfaces.io as io

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)
    wf = nipype.Workflow(wf_name)

    inputspec = pe.Node(
        utility.IdentityInterface(fields=['labelmap', 'modules', 'labels']),
        name="inputspec")

    # create atlas matching the stabndard space used
    resample_atlas = pe.Node(
        interface=afni.Resample(outputtype='NIFTI_GZ',
                                master=globals._FSLDIR_ + globals._brainref),
        name='resample_atlas')  # default interpolation is nearest neighbour

    # Save outputs which are important
    ds_newlabels = pe.Node(interface=io.DataSink(), name='ds_newlabels')
    ds_newlabels.inputs.base_directory = globals._SinkDir_
    ds_newlabels.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".tsv")]

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(
        fields=['relabeled_atlas', 'reordered_labels', 'reordered_modules']),
                         name='outputspec')

    if reorder:
        relabel_atls = pe.Node(interface=utility.Function(
            input_names=['atlas_file', 'modules', 'labels'],
            output_names=[
                'relabelled_atlas_file', 'reordered_modules',
                'reordered_labels', 'newlabels_file'
            ],
            function=relabel_atlas),
                               name='relabel_atlas')
        wf.connect(inputspec, 'labelmap', relabel_atls, 'atlas_file')
        wf.connect(inputspec, 'modules', relabel_atls, 'modules')
        wf.connect(inputspec, 'labels', relabel_atls, 'labels')

        wf.connect(relabel_atls, 'relabelled_atlas_file', resample_atlas,
                   'in_file')

        wf.connect(relabel_atls, 'reordered_labels', ds_newlabels,
                   'reordered_labels')
        #wf.connect(relabel_atls, 'reordered_modules', ds_newlabels, 'reordered_modules')

        wf.connect(relabel_atls, 'reordered_labels', outputspec,
                   'reordered_labels')
        wf.connect(relabel_atls, 'reordered_modules', outputspec,
                   'reordered_modules')

    else:
        wf.connect(inputspec, 'labelmap', resample_atlas, 'in_file')
        wf.connect(inputspec, 'labels', ds_newlabels, 'atlas_labels')
        # wf.connect(relabel_atls, 'reordered_modules', ds_newlabels, 'reordered_modules')
        wf.connect(inputspec, 'labels', outputspec, 'reordered_labels')
        wf.connect(inputspec, 'modules', outputspec, 'reordered_modules')

    # Save outputs which are important
    ds_nii = pe.Node(interface=io.DataSink(), name='ds_relabeled_atlas')
    ds_nii.inputs.base_directory = globals._SinkDir_
    ds_nii.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]
    wf.connect(resample_atlas, 'out_file', ds_nii, 'atlas')

    wf.connect(resample_atlas, 'out_file', outputspec, 'relabeled_atlas')

    return wf
Ejemplo n.º 21
0
def slt_workflow(slicetiming_txt="alt+z",SinkTag="func_preproc",wf_name="slicetiming_correction"):

    """
    Modified version of porcupine generated slicetiming code:

    `source: -`


    Creates a slice time corrected functional image.

    Workflow inputs:
        :param func: The reoriented functional file.
        :param SinkDir:
        :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found in a subdirectory directory specific for this workflow.

    Workflow outputs:




        :return: slt_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    """



    # This is a Nipype generator. Warning, here be dragons.
    # !/usr/bin/env python

    import sys
    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import PUMI.func_preproc.info.info_get as info_get
    import PUMI.utils.utils_convert as utils_convert
    import nipype.interfaces.afni as afni
    import PUMI.utils.globals as globals

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(fields=['func',
                                                          'slicetiming_txt']),
                                    name='inputspec')
    inputspec.inputs.func = func
    inputspec.inputs.slicetiming_txt = slicetiming_txt

    # Custom interface wrapping function TR
    #NodeHash_6000004b9860 = pe.MapNode(interface=info_get.TR, name='NodeName_6000004b9860', iterfield=['in_file'])
    TRvalue = pe.Node(interface=info_get.TR,
                      name='TRvalue')

    # Custom interface wrapping function Str2Float
    func_str2float = pe.Node(interface=utils_convert.Str2Float,
                                name='func_str2float')

    # Custom interface wrapping function Float2Str
    func_str2float_2 = pe.Node(interface=utils_convert.Float2Str,
                               name='func_str2float_2')

    # Wraps command **3dTshift**
    sltcor = pe.Node(interface=afni.TShift(),
                     name='sltcor')
    sltcor.inputs.rltplus = True
    sltcor.inputs.outputtype = "NIFTI_GZ"
    #sltcor.inputs.terminal_output = 'allatonce'

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=['slicetimed', 'TR']),
                                    name='outputspec')

    #todo: qc timeseries

    # Custom interface wrapping function JoinVal2Dict
    #func_joinval2dict = pe.Node(interface=utils_convert.JoinVal2Dict,
    #                            name='func_joinval2dict')

    # Generic datasink module to store structured outputs
    ds = pe.Node(interface=io.DataSink(),
                 name='ds')
    ds.inputs.base_directory = SinkDir
    #ds.inputs.regexp_substitutions = [("func_slicetimed/_NodeName_.{13}", "")]





    # Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(wf_name)
    analysisflow.connect(inputspec, 'slicetiming_txt', sltcor, 'tpattern')
    analysisflow.connect(func_str2float, 'float', outputspec, 'TR')
    analysisflow.connect(inputspec, 'func', sltcor, 'in_file')
    analysisflow.connect(inputspec, 'func', TRvalue, 'in_file')
    analysisflow.connect(func_str2float_2, 'str', sltcor, 'tr')
    analysisflow.connect(TRvalue, 'TR', func_str2float_2, 'float')
    #analysisflow.connect(ds, 'out_file', func_joinval2dict, 'keys')
    #analysisflow.connect(func_str2float, 'float', func_joinval2dict, 'vals')
    analysisflow.connect(TRvalue, 'TR', func_str2float, 'str')
    analysisflow.connect(sltcor, 'out_file', ds, 'slicetimed')
    analysisflow.connect(sltcor, 'out_file', outputspec, 'slicetimed')



    return analysisflow
Ejemplo n.º 22
0
            bet_1.inputs.out_file = '{}_bet'.format(contrast)
            bet_ref = nipype.Node(interface=HDBet(), name='bet_ref')
            bet_ref.inputs.save_mask = 1
            bet_ref.inputs.out_file = '{}_bet'.format(contrast)

            datasink = nipype.Node(nipype.DataSink(base_directory=RESULT_DIR),
                                   "datasink")
            substitutions = [('contrast', contrast),
                             ('sub', sub.split('/')[-1]),
                             ('session', ref_tp + '_reference_tp')]
            for i, session in enumerate(sessions):
                substitutions += [('_bet_1{}/'.format(i), session + '/')]
            datasink.inputs.substitutions = substitutions

            workflow = nipype.Workflow('temporal_analysis_preproc_workflow',
                                       base_dir=os.path.join(
                                           CACHE_DIR,
                                           sub_name + '_' + contrast))
            workflow.connect(datasource, 'reference', rf_ref, 'in_file')
            workflow.connect(datasource, 'to_reg', rf_1, 'in_file')
            workflow.connect(rf_1, 'out_roi', bet_1, 'input_file')
            workflow.connect(rf_ref, 'out_roi', bet_ref, 'input_file')
            workflow.connect(bet_1, 'out_file', datasink,
                             'preprocessing.contrast.sub.@bet_file')
            workflow.connect(bet_1, 'out_mask', datasink,
                             'preprocessing.contrast.sub.@bet_mask')
            workflow.connect(
                bet_ref, 'out_file', datasink,
                'preprocessing.contrast.sub.session.@bet_ref_file')
            workflow.connect(
                bet_ref, 'out_mask', datasink,
                'preprocessing.contrast.sub.session.@bet_ref_mask')
Ejemplo n.º 23
0
                                wf_name="func2mni_1")
myfunc2mni_cc = transform.func2mni(stdreg=_regtype_,
                                   carpet_plot="2_cc",
                                   wf_name="func2mni_2_cc")
myfunc2mni_cc_bpf = transform.func2mni(stdreg=_regtype_,
                                       carpet_plot="3_cc_bpf",
                                       wf_name="func2mni_3_cc_bpf")
myfunc2mni_cc_bpf_cens = transform.func2mni(stdreg=_regtype_,
                                            carpet_plot="4_cc_bpf_cens",
                                            wf_name="func2mni_4_cc_bpf_cens")
myfunc2mni_cc_bpf_cens_mac = transform.func2mni(
    stdreg=_regtype_,
    carpet_plot="5_cc_bpf_cens_mac",
    wf_name="func2mni_5_cc_bpf_cens_mac")

totalWorkflow = nipype.Workflow('preprocess_all')
totalWorkflow.base_dir = '.'

# anatomical part and func2anat
totalWorkflow.connect([
    (datagrab, pop_id, [('func', 'in_list')]),
    (pop_id, ds_id, [('txt_file', 'subjects')]),
    (datagrab, reorient_struct, [('struct', 'in_file')]),
    (reorient_struct, myanatproc, [('out_file', 'inputspec.anat')]),
    (reorient_struct, mybbr, [('out_file', 'inputspec.skull')]),
    (datagrab, reorient_func, [('func', 'in_file')]),
    (reorient_func, mybbr, [('out_file', 'inputspec.func')]),
    (myanatproc, mybbr,
     [('outputspec.probmap_wm', 'inputspec.anat_wm_segmentation'),
      ('outputspec.probmap_csf', 'inputspec.anat_csf_segmentation'),
      ('outputspec.probmap_gm', 'inputspec.anat_gm_segmentation'),
Ejemplo n.º 24
0
def AnatProc(stdreg, SinkTag="anat_preproc", wf_name="anatproc"):
    """
    stdreg: either globals._RegType_.ANTS or globals._RegType_.FSL (do default value to make sure the user has to decide explicitly)


        Performs processing of anatomical images:
        - brain extraction
        - tissue type segmentation
        - spatial standardization (with either FSL or ANTS)

        Images should be already "reoriented", e.g. with fsl fslreorient2std (see scripts/ex_pipeline.py)

        Workflow inputs:
            :param func: The functional image file.
            :param SinkDir: where to write important ouputs
            :param SinkTag: The output directory in which the returned images (see workflow outputs) could be found.

        Workflow outputs:
            :param brain: brain extracted image in subject space
            :param brain_mask: brain mask in subject space
            :param skull: full head image in subjacet space
            :param probmap_gm: gray matter probability map
            :param probmap_wm: white matter probability map
            :param probmap_csf: CSF probability map
            :param parvol_gm: gray matter partial volume map
            :param parvol_wm: white matter partial volume map
            :param parvol_csf: CSF partial volume map
            :param partvol_map: hard segmented tissue map
            :param anat2mni_warpfield: spatial standardization warping field
            :param std_brain: spatially standardised brain extracted image
            :param stdregtype: type of stabndard registration: FSL=1, ANTS=2



            :return: anatproc_workflow


        Tamas Spisak
        [email protected]
        2018

        """
    import PUMI.utils.globals as globals
    import os

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    # Basic interface class generates identity mappings
    inputspec = pe.Node(utility.IdentityInterface(
        fields=['anat', 'bet_vertical_gradient', 'bet_fract_int_thr']),
                        name='inputspec')

    inputspec.inputs.bet_fract_int_thr = globals._fsl_bet_fract_int_thr_anat_
    inputspec.inputs.bet_vertical_gradient = globals._fsl_bet_vertical_gradient_

    # build the actual pipeline
    mybet = bet.bet_workflow()
    myfast = fast.fast_workflow(
        priormap=True
    )  # this uses no prior map right now ToDo: make it settable

    if stdreg == globals._RegType_.FSL:
        myanat2mni = anat2mni.anat2mni_fsl_workflow()
    else:  # ANTS
        myanat2mni = anat2mni.anat2mni_ants_workflow_harcoded(
        )  # currently hardcoded
        #TODO_read set fsl linear reg matrix here: the anat2mni_ants_workflow_harcoded contains has the output

    #resample 2mm-std ventricle to the actual standard space
    resample_std_ventricle = pe.Node(
        interface=afni.Resample(
            outputtype='NIFTI_GZ',
            in_file=globals._FSLDIR_ +
            "/data/standard/MNI152_T1_2mm_VentricleMask.nii.gz"),
        name='resample_std_ventricle'
    )  #default interpolation is nearest neighbour

    #transform std ventricle mask to anat space, applying the invers warping filed
    if (stdreg == globals._RegType_.FSL):
        unwarp_ventricle = pe.MapNode(interface=fsl.ApplyWarp(),
                                      iterfield=['ref_file', 'field_file'],
                                      name='unwarp_ventricle')
    else:  # ANTS
        unwarp_ventricle = pe.MapNode(
            interface=ants.ApplyTransforms(),
            iterfield=['reference_image', 'transforms'],
            name='unwarp_ventricle')

    # mask csf segmentation with anat-space ventricle mask
    ventricle_mask = pe.MapNode(fsl.ImageMaths(op_string=' -mas'),
                                iterfield=['in_file', 'in_file2'],
                                name="ventricle_mask")

    # Basic interface class generates identity mappings
    outputspec = pe.Node(utility.IdentityInterface(fields=[
        'brain', 'brain_mask', 'skull', 'probmap_gm', 'probmap_wm',
        'probmap_csf', 'probmap_ventricle', 'parvol_gm', 'parvol_wm',
        'parvol_csf', 'partvol_map', 'anat2mni_warpfield',
        'mni2anat_warpfield', 'std_brain', 'stdregtype', 'std_template'
    ]),
                         name='outputspec')

    outputspec.inputs.stdregtype = stdreg
    # return regtype as well

    # pickindex = lambda x, i: x[i]
    def pickindex(vec, i):
        return [x[i] for x in vec]

    totalWorkflow = nipype.Workflow(wf_name)
    totalWorkflow.connect([
        (inputspec, mybet, [('anat', 'inputspec.in_file'),
                            ('bet_fract_int_thr', 'inputspec.fract_int_thr'),
                            ('bet_vertical_gradient',
                             'inputspec.vertical_gradient')]),
        (mybet, myfast, [('outputspec.brain', 'inputspec.brain')]),
        (myanat2mni, myfast, [
            ('outputspec.invlinear_xfm', 'inputspec.stand2anat_xfm')
        ]),  # this uses no propr right now ToDo: make settable
        (mybet, myanat2mni, [('outputspec.brain', 'inputspec.brain')]),
        (inputspec, myanat2mni, [('anat', 'inputspec.skull')]),
        (mybet, outputspec, [('outputspec.brain', 'brain'),
                             ('outputspec.brain_mask', 'brain_mask')]),
        (inputspec, outputspec, [('anat', 'skull')]),
        (myanat2mni, resample_std_ventricle, [('outputspec.std_template',
                                               'master')]),
        (myfast, ventricle_mask, [('outputspec.probmap_csf', 'in_file')]),
        (ventricle_mask, outputspec, [('out_file', 'probmap_ventricle')]),
        (myfast, outputspec, [('outputspec.partial_volume_map', 'parvol_map'),
                              ('outputspec.probmap_csf', 'probmap_csf'),
                              ('outputspec.probmap_gm', 'probmap_gm'),
                              ('outputspec.probmap_wm', 'probmap_wm'),
                              ('outputspec.parvol_csf', 'parvol_csf'),
                              ('outputspec.parvol_gm', 'parvol_gm'),
                              ('outputspec.parvol_wm', 'parvol_wm')]),
        (myanat2mni, outputspec,
         [('outputspec.nonlinear_xfm', 'anat2mni_warpfield'),
          ('outputspec.output_brain', 'std_brain'),
          ('outputspec.std_template', 'std_template')]),
    ])

    if stdreg == globals._RegType_.FSL:
        totalWorkflow.connect(resample_std_ventricle, 'out_file',
                              unwarp_ventricle, 'in_file')
        totalWorkflow.connect(inputspec, 'anat', unwarp_ventricle, 'ref_file')
        totalWorkflow.connect(myanat2mni, 'outputspec.invnonlinear_xfm',
                              unwarp_ventricle, 'field_file')
        totalWorkflow.connect(myanat2mni, 'outputspec.invnonlinear_xfm',
                              outputspec, 'mni2anat_warpfield')
        totalWorkflow.connect(unwarp_ventricle, 'out_file', ventricle_mask,
                              'in_file2')
    else:  #ANTs
        totalWorkflow.connect(resample_std_ventricle, 'out_file',
                              unwarp_ventricle, 'input_image')
        totalWorkflow.connect(inputspec, 'anat', unwarp_ventricle,
                              'reference_image')
        totalWorkflow.connect(myanat2mni, 'outputspec.invnonlinear_xfm',
                              unwarp_ventricle, 'transforms')
        totalWorkflow.connect(myanat2mni, 'outputspec.invnonlinear_xfm',
                              outputspec, 'mni2anat_warpfield')
        totalWorkflow.connect(unwarp_ventricle, 'output_image', ventricle_mask,
                              'in_file2')

    return totalWorkflow
Ejemplo n.º 25
0
def bet_workflow(Robust=True,
                 fmri=False,
                 SinkTag="anat_preproc",
                 wf_name="brain_extraction"):
    """
    Modified version of CPAC.anat_preproc.anat_preproc:

    `source: https://fcp-indi.github.io/docs/developer/_modules/CPAC/anat_preproc/anat_preproc.html`


    Creates a brain extracted image and its mask from a T1w anatomical image.

    Workflow inputs:
        :param anat: The reoriented anatomical file.
        :param SinkDir:
        :param SinkTag: The output directiry in which the returned images (see workflow outputs) could be found.

    Workflow outputs:




        :return: bet_workflow - workflow




    Balint Kincses
    [email protected]
    2018


    """

    import os
    import nipype
    import nipype.pipeline as pe
    import nipype.interfaces.utility as utility
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as io
    import PUMI.utils.QC as qc
    import PUMI.utils.globals as globals
    import PUMI.func_preproc.Onevol as onevol

    SinkDir = os.path.abspath(globals._SinkDir_ + "/" + SinkTag)
    if not os.path.exists(SinkDir):
        os.makedirs(SinkDir)

    #Basic interface class generates identity mappings
    inputspec = pe.Node(
        utility.IdentityInterface(fields=[
            'in_file',
            'opt_R',
            'fract_int_thr',  # optional
            'vertical_gradient'
        ]),  # optional
        name='inputspec')
    inputspec.inputs.opt_R = Robust
    if fmri:
        inputspec.inputs.fract_int_thr = globals._fsl_bet_fract_int_thr_func_
    else:
        inputspec.inputs.fract_int_thr = globals._fsl_bet_fract_int_thr_anat_

    inputspec.inputs.vertical_gradient = globals._fsl_bet_vertical_gradient_

    #Wraps command **bet**
    bet = pe.MapNode(interface=fsl.BET(), iterfield=['in_file'], name='bet')
    bet.inputs.mask = True
    # bet.inputs.robust=Robust
    if fmri:
        bet.inputs.functional = True
        myonevol = onevol.onevol_workflow(wf_name="onevol")
        applymask = pe.MapNode(fsl.ApplyMask(),
                               iterfield=['in_file', 'mask_file'],
                               name="apply_mask")

    myqc = qc.vol2png(wf_name, overlay=True)

    #Basic interface class generates identity mappings
    outputspec = pe.Node(
        utility.IdentityInterface(fields=['brain', 'brain_mask']),
        name='outputspec')

    # Save outputs which are important
    ds = pe.Node(interface=io.DataSink(), name='ds')
    ds.inputs.base_directory = SinkDir
    ds.inputs.regexp_substitutions = [("(\/)[^\/]*$", ".nii.gz")]

    #Create a workflow to connect all those nodes
    analysisflow = nipype.Workflow(
        wf_name)  # The name here determine the folder of the workspace
    analysisflow.base_dir = '.'
    analysisflow.connect(inputspec, 'in_file', bet, 'in_file')
    analysisflow.connect(inputspec, 'opt_R', bet, 'robust')
    analysisflow.connect(inputspec, 'fract_int_thr', bet, 'frac')
    analysisflow.connect(inputspec, 'vertical_gradient', bet,
                         'vertical_gradient')
    analysisflow.connect(bet, 'mask_file', outputspec, 'brain_mask')
    if fmri:

        analysisflow.connect(bet, 'mask_file', myonevol, 'inputspec.func')
        analysisflow.connect(myonevol, 'outputspec.func1vol', applymask,
                             'mask_file')
        analysisflow.connect(inputspec, 'in_file', applymask, 'in_file')
        analysisflow.connect(applymask, 'out_file', outputspec, 'brain')
    else:
        analysisflow.connect(bet, 'out_file', outputspec, 'brain')
    analysisflow.connect(bet, 'out_file', ds, 'bet_brain')
    analysisflow.connect(bet, 'mask_file', ds, 'brain_mask')

    analysisflow.connect(inputspec, 'in_file', myqc, 'inputspec.bg_image')
    analysisflow.connect(bet, 'out_file', myqc, 'inputspec.overlay_image')

    return analysisflow
Ejemplo n.º 26
0
    def workflow(self):

        #         self.datasource()
        datasource = self.data_source
        dict_sequences = self.dict_sequences
        nipype_cache = self.nipype_cache
        result_dir = self.result_dir
        sub_id = self.sub_id
        regex = self.regex
        roi_selection = self.roi_selection

        workflow = nipype.Workflow('rtstruct_extraction_workflow',
                                   base_dir=nipype_cache)
        datasink = nipype.Node(nipype.DataSink(base_directory=result_dir),
                               "datasink")
        substitutions = [('subid', sub_id)]
        substitutions += [('results/', '{}/'.format(self.workflow_name))]
        substitutions += [('_mha_convert/', '/')]

        rt_sessions = dict_sequences['RT']
        for key in rt_sessions:
            rt_files = rt_sessions[key]
            if rt_files['phy_dose'] is not None:
                dose_name = '{0}_phy_dose'.format(key)
            elif rt_files['rbe_dose'] is not None:
                dose_name = '{0}_rbe_dose'.format(key)
            elif rt_files['ot_dose'] is not None:
                dose_name = '{0}_ot_dose'.format(key)
            else:
                roi_selection = False

            if rt_files['rtct'] is not None and rt_files[
                    'rtstruct'] is not None:
                ss_convert = nipype.Node(interface=RTStructureCoverter(),
                                         name='ss_convert')
                mha_convert = nipype.Node(interface=MHA2NIIConverter(),
                                          name='mha_convert')

                if roi_selection:
                    select = nipype.Node(interface=CheckRTStructures(),
                                         name='select_gtv')
                    workflow.connect(mha_convert, 'out_files', select, 'rois')
                    workflow.connect(datasource, dose_name, select,
                                     'dose_file')
                    workflow.connect(select, 'checked_roi', datasink,
                                     'results.subid.{}.@masks'.format(key))
                else:
                    workflow.connect(mha_convert, 'out_files', datasink,
                                     'results.subid.{}.@masks'.format(key))

                datasink.inputs.substitutions = substitutions

                workflow.connect(datasource, '{0}_rtct'.format(key),
                                 ss_convert, 'reference_ct')
                workflow.connect(datasource, '{0}_rtstruct'.format(key),
                                 ss_convert, 'input_ss')
                workflow.connect(ss_convert, 'out_structures', mha_convert,
                                 'input_folder')
            else:
                print(
                    'NO RTCT OR RTSTRUCT!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')

#         if datasource is not None:
#
#             workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache)
#
#             datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink")
#             substitutions = [('subid', sub_id)]
#             substitutions += [('results/', '{}/'.format(self.workflow_name))]
#
#             ss_convert = nipype.MapNode(interface=RTStructureCoverter(),
#                                        iterfield=['reference_ct', 'input_ss'],
#                                        name='ss_convert')
#             mha_convert = nipype.MapNode(interface=MHA2NIIConverter(),
#                                          iterfield=['input_folder'],
#                                          name='mha_convert')
#
#             if roi_selection:
#                 select = nipype.MapNode(interface=CheckRTStructures(),
#                                         iterfield=['rois', 'dose_file'],
#                                         name='select_gtv')
#                 workflow.connect(mha_convert, 'out_files', select, 'rois')
#                 workflow.connect(datasource, 'rt_dose', select, 'dose_file')
#                 workflow.connect(select, 'checked_roi', datasink,
#                                  'results.subid.@masks')
#             else:
#                 workflow.connect(mha_convert, 'out_files', datasink,
#                                  'results.subid.@masks')
#
#             for i, session in enumerate(self.rt['session']):
#                 substitutions += [(('_select_gtv{}/'.format(i), session+'/'))]
#                 substitutions += [(('_voxelizer{}/'.format(i), session+'/'))]
#                 substitutions += [(('_mha_convert{}/'.format(i), session+'/'))]
#
#             datasink.inputs.substitutions =substitutions
#
#             workflow.connect(datasource, 'rtct_nifti', ss_convert, 'reference_ct')
#             workflow.connect(datasource, 'rts_dcm', ss_convert, 'input_ss')
#             workflow.connect(ss_convert, 'out_structures', mha_convert, 'input_folder')
#
#             workflow = self.datasink(workflow, datasink)
#         else:
#             workflow = nipype.Workflow('rtstruct_extraction_workflow', base_dir=nipype_cache)

        return workflow
Ejemplo n.º 27
0
                          iterfield=['in_file', 'mask'],
                          name='features_extraction')
features.inputs.first_order = True
features.inputs.cooccurence = True
features.inputs.run_length = True
features.inputs.int_vol_hist = True
features.inputs.local_intensity = True
features.inputs.volume = True
features.inputs.id = True
# features.inputs.ngld = True
features.inputs.ngtd = True
features.inputs.use_header = True

datasink = nipype.Node(nipype.DataSink(base_directory=result_dir), "datasink")
substitutions = []
for i, sub in enumerate(sub_list):
    substitutions += [('_features_extraction{}/'.format(i), sub + '/')]
datasink.inputs.substitutions = substitutions

workflow = nipype.Workflow('features_extraction_workflow', base_dir=cache_dir)
workflow.connect(datasource, 'ct', voxelizer, 'reference')
workflow.connect(datasource, 'rtstruct', voxelizer, 'struct_file')
workflow.connect(datasource, 'ct', features, 'in_file')
workflow.connect(voxelizer, 'out_files', features, 'mask')
workflow.connect(features, 'out_file', datasink,
                 'features_extraction.@csv_file')

workflow.run()
# workflow.run('MultiProc', plugin_args={'n_procs': 4})
print('Done!')
Ejemplo n.º 28
0
        substitutions = [('contrast', contrast), ('sub', sub.split('/')[-1]),
                         ('session', ref_tp + '_reference_tp'),
                         ('seg_0.', 'CSF.'), ('seg_0_trans', 'CSF_mapped'),
                         ('seg_1', 'GM'), ('seg_2', 'WM'),
                         ('antsreg0GenericAffine.mat', 'Affine_mat.mat'),
                         ('antsreg1Warp', 'Warp_field'),
                         ('antsreg1InverseWarp', 'Inverse_warp_field'),
                         ('antsregWarped', '{}_bet_mapped'.format(contrast))]
        for i, session in enumerate(sessions):
            substitutions += [('_fast_1{}/'.format(i), session + '/')]
            substitutions += [('_ants_reg{}/'.format(i), session + '/')]
            substitutions += [('_apply_ts{}/'.format(i), session + '/')]
        datasink.inputs.substitutions = substitutions

        workflow = nipype.Workflow('seg_reg_workflow',
                                   base_dir=os.path.join(
                                       CACHE_DIR, sub_name + '_' + contrast))
        workflow.connect(datasource, 'reference', rs_ref, 'in_file')
        workflow.connect(datasource, 'to_reg', fast_1, 'in_files')
        workflow.connect(datasource, 'to_reg', reg, 'input_file')
        workflow.connect(rs_ref, 'out_file', fast_ref, 'in_files')
        workflow.connect(rs_ref, 'out_file', reg, 'ref_file')
        workflow.connect(fast_1, 'tissue_class_files', datasink,
                         'seg_reg_preprocessing.contrast.sub.@fast_file')
        workflow.connect(
            fast_ref, 'tissue_class_files', datasink,
            'seg_reg_preprocessing.contrast.sub.reference_tp.@fast_ref_file')
        workflow.connect(fast_1, 'tissue_class_files', split_1, 'inlist')
        workflow.connect(reg, 'warp_file', merge_1, 'in1')
        workflow.connect(reg, 'regmat', merge_1, 'in2')
        workflow.connect(merge_1, 'out', apply_ts, 'transforms')
Ejemplo n.º 29
0
#Generic datasink module to store structured outputs
NodeHash_6000010a5b80 = pe.Node(interface = io.DataSink(), name = 'NodeName_6000010a5b80')
NodeHash_6000010a5b80.inputs.base_directory = SinkDir
NodeHash_6000010a5b80.inputs.regexp_substitutions = [("func_fieldmapcorr/_NodeName_.{13}", "")]

#Generic datasink module to store structured outputs
NodeHash_608001eb9bc0 = pe.Node(interface = io.DataSink(), name = 'NodeName_608001eb9bc0')
NodeHash_608001eb9bc0.inputs.base_directory = SinkDir
NodeHash_608001eb9bc0.inputs.regexp_substitutions = [("_NodeName_.{13}", "")]

#Very simple frontend for storing values into a JSON file.
NodeHash_6000024a5820 = pe.Node(interface = io.JSONFileSink(), name = 'NodeName_6000024a5820')
NodeHash_6000024a5820.inputs.out_file = OutJSON

#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow('MyWorkflow')
analysisflow.connect(NodeHash_608001eb9bc0, 'out_file', NodeHash_6000024a5820, 'fieldmap')
analysisflow.connect(NodeHash_6000018b2600, 'out_fieldmap', NodeHash_608001eb9bc0, 'fieldmap')
analysisflow.connect(NodeHash_6000010a5b80, 'out_file', NodeHash_6000024a5820, 'func_fieldmapcorr')
analysisflow.connect(NodeHash_600001eab220, 'abs', NodeHash_6000018b2600, 'delta_TE')
analysisflow.connect(NodeHash_60c0018a4860, 'dif', NodeHash_600001eab220, 'x')
analysisflow.connect(NodeHash_604000eb5d20, 'unwarp_direction', NodeHash_60c0018a5a60, 'unwarp_direction')
analysisflow.connect(NodeHash_60c0018a5a60, 'unwarped_file', NodeHash_6000010a5b80, 'func_fieldmapcorr')
analysisflow.connect(NodeHash_6000018b2600, 'out_fieldmap', NodeHash_60c0018a5a60, 'fmap_in_file')
analysisflow.connect(NodeHash_60c0018a6e40, 'out_file', NodeHash_60c0018a5a60, 'mask_file')
analysisflow.connect(NodeHash_604000cba700, 'mask_file', NodeHash_60c0018a6e40, 'in_file')
analysisflow.connect(NodeHash_604000eb5d20, 'dwell_time', NodeHash_60c0018a5a60, 'dwell_time')
analysisflow.connect(NodeHash_604000eb5d20, 'func', NodeHash_60c0018a5a60, 'in_file')
analysisflow.connect(NodeHash_604000cba700, 'out_file', NodeHash_600001ab26c0, 'in_file')
analysisflow.connect(NodeHash_604000eb5d20, 'TE2', NodeHash_60c0018a4860, 'b')
analysisflow.connect(NodeHash_604000eb5d20, 'TE1', NodeHash_60c0018a4860, 'a')
Ejemplo n.º 30
0
dc = nipype.MapNode(interface=DicomCheck(), iterfield=['dicom_dir'], name='dc')
dc.inputs.working_dir = result_dir

converter = nipype.MapNode(interface=Dcm2niix(),
                           iterfield=['source_dir', 'out_filename', 'output_dir'],
                           name='converter')
converter.inputs.compress = 'y'
converter.inputs.philips_float = False
converter.inputs.merge_imgs = True

check = nipype.MapNode(interface=ConversionCheck(),
                       iterfield=['in_file', 'file_name'],
                       name='check_conversion')

workflow = nipype.Workflow('data_preparation_workflow', base_dir=cache_dir)
workflow.connect(inputnode, 'contrasts', datasource, 'contrasts')
workflow.connect(datasource, 'directory', dc, 'dicom_dir')
workflow.connect(inputnode_rt, 'rt_files', datasource_rt, 'rt_files')
workflow.connect(datasource_rt, 'directory', dc_rt, 'dicom_dir')
workflow.connect(dc, 'outdir', converter, 'source_dir')
workflow.connect(dc, 'scan_name', converter, 'out_filename')
workflow.connect(dc, 'base_dir', converter, 'output_dir')
workflow.connect(dc, 'scan_name', check, 'file_name')
workflow.connect(converter, 'converted_files', check, 'in_file')

# workflow.run()
workflow.run('MultiProc', plugin_args={'n_procs': 8})

print('Done!')