Beispiel #1
0
 def tbss_pipeline(self, **kwargs):  # @UnusedVariable
     pipeline = self.create_pipeline(
         name='tbss',
         inputs=[DatasetSpec('fa', nifti_gz_format)],
         outputs=[
             DatasetSpec('tbss_mean_fa', nifti_gz_format),
             DatasetSpec('tbss_proj_fa',
                         nifti_gz_format,
                         frequency='per_project'),
             DatasetSpec('tbss_skeleton',
                         nifti_gz_format,
                         frequency='per_project'),
             DatasetSpec('tbss_skeleton_mask',
                         nifti_gz_format,
                         frequency='per_project')
         ],
         version=1,
         citations=[tbss_cite, fsl_cite],
         **kwargs)
     # Create TBSS workflow
     tbss = create_tbss_all(name='tbss')
     # Connect inputs
     pipeline.connect_input('fa', tbss, 'inputnode.fa_list')
     # Connect outputs
     pipeline.connect_output('tbss_mean_fa', tbss, 'outputnode.meanfa_file')
     pipeline.connect_output('tbss_proj_fa', tbss,
                             'outputnode.projectedfa_file')
     pipeline.connect_output('tbss_skeleton', tbss,
                             'outputnode.skeleton_file')
     pipeline.connect_output('tbss_skeleton_mask', tbss,
                             'outputnode.skeleton_mask')
     # Check inputs/output are connected
     return pipeline
Beispiel #2
0
    def intrascan_alignment_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='MCFLIRT_pipeline',
            inputs=[DatasetSpec('preproc', nifti_gz_format)],
            outputs=[
                DatasetSpec('moco', nifti_gz_format),
                DatasetSpec('align_mats', directory_format),
                DatasetSpec('moco_par', par_format)
            ],
            desc=("Intra-epi volumes alignment."),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        mcflirt = pipeline.create_node(fsl.MCFLIRT(),
                                       name='mcflirt',
                                       requirements=[fsl509_req])
        mcflirt.inputs.ref_vol = 0
        mcflirt.inputs.save_mats = True
        mcflirt.inputs.save_plots = True
        mcflirt.inputs.output_type = 'NIFTI_GZ'
        mcflirt.inputs.out_file = 'moco.nii.gz'
        pipeline.connect_input('preproc', mcflirt, 'in_file')
        pipeline.connect_output('moco', mcflirt, 'out_file')
        pipeline.connect_output('moco_par', mcflirt, 'par_file')

        merge = pipeline.create_node(MergeListMotionMat(), name='merge')
        pipeline.connect(mcflirt, 'mat_file', merge, 'file_list')
        pipeline.connect_output('align_mats', merge, 'out_dir')

        return pipeline
Beispiel #3
0
    def cet_T1(self, **kwargs):
        pipeline = self.create_pipeline(
            name='CET_T1',
            inputs=[
                DatasetSpec('betted_T1', nifti_gz_format),
                DatasetSpec(self._lookup_l_tfm_to_name('MNI'),
                            text_matrix_format),
                DatasetSpec(self._lookup_nl_tfm_inv_name('MNI'),
                            nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('cetted_T1_mask', nifti_gz_format),
                DatasetSpec('cetted_T1', nifti_gz_format)
            ],
            desc=("Construct cerebellum mask using SUIT template"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        # Initially use MNI space to warp SUIT into T1 and threshold to mask
        merge_trans = pipeline.create_node(utils.Merge(2),
                                           name='merge_transforms')
        pipeline.connect_input(self._lookup_nl_tfm_inv_name('MNI'),
                               merge_trans, 'in2')
        pipeline.connect_input(self._lookup_l_tfm_to_name('MNI'), merge_trans,
                               'in1')
Beispiel #4
0
    def fix_regression_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='signal_regression',
            inputs=[
                DatasetSpec('fix_dir', directory_format),
                DatasetSpec('labelled_components', text_format)
            ],
            outputs=[DatasetSpec('cleaned_file', nifti_gz_format)],
            desc=("Regression of the noisy components from the rsfMRI data "
                  "using a python implementation equivalent to that in FIX."),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        signal_reg = pipeline.create_node(SignalRegression(),
                                          name="signal_reg",
                                          wall_time=30,
                                          requirements=[fsl509_req, fix_req])
        pipeline.connect_input("fix_dir", signal_reg, "fix_dir")
        pipeline.connect_input("labelled_components", signal_reg,
                               "labelled_components")
        signal_reg.inputs.motion_regression = self.parameter('motion_reg')
        signal_reg.inputs.highpass = self.parameter('highpass')

        pipeline.connect_output('cleaned_file', signal_reg, 'output')

        return pipeline
Beispiel #5
0
 def fa_pipeline(self, **kwargs):  # @UnusedVariable
     """
     Fits the apparrent diffusion tensor (DT) to each voxel of the image
     """
     pipeline = self.create_pipeline(
         name='fa',
         inputs=[
             DatasetSpec('tensor', nifti_gz_format),
             DatasetSpec('brain_mask', nifti_gz_format)
         ],
         outputs=[
             DatasetSpec('fa', nifti_gz_format),
             DatasetSpec('adc', nifti_gz_format)
         ],
         desc=("Calculates the FA and ADC from a tensor image"),
         version=1,
         citations=[],
         **kwargs)
     # Create tensor fit node
     metrics = pipeline.create_node(TensorMetrics(),
                                    name='metrics',
                                    requirements=[mrtrix3_req])
     metrics.inputs.out_fa = 'fa.nii.gz'
     metrics.inputs.out_adc = 'adc.nii.gz'
     # Connect to inputs
     pipeline.connect_input('tensor', metrics, 'in_file')
     pipeline.connect_input('brain_mask', metrics, 'in_mask')
     # Connect to outputs
     pipeline.connect_output('fa', metrics, 'out_fa')
     pipeline.connect_output('adc', metrics, 'out_adc')
     # Check inputs/output are connected
     return pipeline
Beispiel #6
0
 def average_response_pipeline(self, **kwargs):
     """
     Averages the estimate response function over all subjects in the
     project
     """
     pipeline = self.create_pipeline(
         name='average_response',
         inputs=[DatasetSpec('wm_response', text_format)],
         outputs=[
             DatasetSpec('avg_response',
                         text_format,
                         frequency='per_project')
         ],
         desc=("Averages the fibre response function over the project"),
         version=1,
         citations=[mrtrix_cite],
         **kwargs)
     join_subjects = pipeline.create_join_subjects_node(
         IdentityInterface(['responses']),
         name='join_subjects',
         joinfield=['responses'])
     join_visits = pipeline.create_join_visits_node(Chain(['responses']),
                                                    name='join_visits',
                                                    joinfield=['responses'])
     avg_response = pipeline.create_node(AverageResponse(),
                                         name='avg_response')
     # Connect inputs
     pipeline.connect_input('wm_response', join_subjects, 'responses')
     # Connect inter-nodes
     pipeline.connect(join_subjects, 'responses', join_visits, 'responses')
     pipeline.connect(join_visits, 'responses', avg_response, 'in_files')
     # Connect outputs
     pipeline.connect_output('avg_response', avg_response, 'out_file')
     # Check inputs/output are connected
     return pipeline
Beispiel #7
0
    def freesurfer_pipeline(self, **kwargs):
        """
        Segments grey matter, white matter and CSF from T1 images using
        SPM "NewSegment" function.

        NB: Default values come from the W2MHS toolbox
        """
        pipeline = self.create_pipeline(
            name='segmentation',
            inputs=[DatasetSpec('primary', nifti_gz_format)],
            outputs=[DatasetSpec('fs_recon_all',
                                 freesurfer_recon_all_format)],
            desc="Segment white/grey matter and csf",
            version=1,
            citations=copy(freesurfer_cites),
            **kwargs)
        # FS ReconAll node
        recon_all = pipeline.create_node(
            interface=ReconAll(), name='recon_all',
            requirements=[freesurfer_req], wall_time=2000)
        recon_all.inputs.directive = 'all'
        recon_all.inputs.openmp = self.runner.num_processes
        # Wrapper around os.path.join
        join = pipeline.create_node(interface=JoinPath(), name='join')
        pipeline.connect(recon_all, 'subjects_dir', join, 'dirname')
        pipeline.connect(recon_all, 'subject_id', join, 'filename')
        # Connect inputs/outputs
        pipeline.connect_input('primary', recon_all, 'T1_files')
        pipeline.connect_output('fs_recon_all', join, 'path')
        return pipeline
Beispiel #8
0
 def tensor_pipeline(self, **kwargs):  # @UnusedVariable
     """
     Fits the apparrent diffusion tensor (DT) to each voxel of the image
     """
     pipeline = self.create_pipeline(
         name='tensor',
         inputs=[
             DatasetSpec('bias_correct', nifti_gz_format),
             DatasetSpec('grad_dirs', fsl_bvecs_format),
             DatasetSpec('bvalues', fsl_bvals_format),
             DatasetSpec('brain_mask', nifti_gz_format)
         ],
         outputs=[DatasetSpec('tensor', nifti_gz_format)],
         desc=("Estimates the apparent diffusion tensor in each "
               "voxel"),
         version=1,
         citations=[],
         **kwargs)
     # Create tensor fit node
     dwi2tensor = pipeline.create_node(FitTensor(), name='dwi2tensor')
     dwi2tensor.inputs.out_file = 'dti.nii.gz'
     # Gradient merge node
     fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
     # Connect nodes
     pipeline.connect(fsl_grads, 'out', dwi2tensor, 'grad_fsl')
     # Connect to inputs
     pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
     pipeline.connect_input('bvalues', fsl_grads, 'in2')
     pipeline.connect_input('bias_correct', dwi2tensor, 'in_file')
     pipeline.connect_input('brain_mask', dwi2tensor, 'in_mask')
     # Connect to outputs
     pipeline.connect_output('tensor', dwi2tensor, 'out_file')
     # Check inputs/output are connected
     return pipeline
Beispiel #9
0
    def segmentation_pipeline(self, img_type=2, **kwargs):
        pipeline = self.create_pipeline(
            name='FAST_segmentation',
            inputs=[DatasetSpec('brain', nifti_gz_format)],
            outputs=[DatasetSpec('wm_seg', nifti_gz_format)],
            desc="White matter segmentation of the reference image",
            version=1,
            citations=[fsl_cite],
            **kwargs)

        fast = pipeline.create_node(fsl.FAST(),
                                    name='fast',
                                    requirements=[fsl509_req])
        fast.inputs.img_type = img_type
        fast.inputs.segments = True
        fast.inputs.out_basename = 'Reference_segmentation'
        pipeline.connect_input('brain', fast, 'in_files')
        split = pipeline.create_node(Split(), name='split')
        split.inputs.splits = [1, 1, 1]
        split.inputs.squeeze = True
        pipeline.connect(fast, 'tissue_class_files', split, 'inlist')
        if img_type == 1:
            pipeline.connect_output('wm_seg', split, 'out3')
        elif img_type == 2:
            pipeline.connect_output('wm_seg', split, 'out2')
        else:
            raise ArcanaUsageError(
                "'img_type' parameter can either be 1 or 2 (not {})".format(
                    img_type))

        return pipeline
Beispiel #10
0
    def ApplyTransform_pipeline(self, **kwargs):
        pipeline = self.create_pipeline(
            name='applytransform',
            inputs=[DatasetSpec('pet_volumes', nifti_gz_format),
                    DatasetSpec('warp_file', nifti_gz_format),
                    DatasetSpec('affine_mat', text_matrix_format)],
            outputs=[DatasetSpec('registered_volumes', nifti_gz_format)],
            desc=('Apply transformation the the 4D PET timeseries'),
            version=1,
            citations=[],
            **kwargs)

        merge_trans = pipeline.create_node(Merge(2), name='merge_transforms')
        pipeline.connect_input('warp_file', merge_trans, 'in1')
        pipeline.connect_input('affine_mat', merge_trans, 'in2')

        apply_trans = pipeline.create_node(
            ApplyTransforms(), name='ApplyTransform')
        apply_trans.inputs.reference_image = self.parameter(
            'trans_template')
        apply_trans.inputs.interpolation = 'Linear'
        apply_trans.inputs.input_image_type = 3
        pipeline.connect(merge_trans, 'out', apply_trans, 'transforms')
        pipeline.connect_input('pet_volumes', apply_trans, 'input_image')

        pipeline.connect_output('registered_volumes', apply_trans,
                                'output_image')
        return pipeline
Beispiel #11
0
    def _ants_linear_coreg_pipeline(self, name, to_reg, ref, reg, matrix,
                                    **kwargs):

        pipeline = self.create_pipeline(
            name=name,
            inputs=[
                DatasetSpec(to_reg, nifti_gz_format),
                DatasetSpec(ref, nifti_gz_format)
            ],
            outputs=[
                DatasetSpec(reg, nifti_gz_format),
                DatasetSpec(matrix, text_matrix_format)
            ],
            desc="Registers a MR scan against a reference image using ANTs",
            version=1,
            citations=[],
            **kwargs)

        ants_linear = pipeline.create_node(AntsRegSyn(num_dimensions=3,
                                                      transformation='r',
                                                      out_prefix='reg2hires'),
                                           name='ANTs_linear_Reg',
                                           wall_time=10,
                                           requirements=[ants2_req])
        pipeline.connect_input(ref, ants_linear, 'ref_file')
        pipeline.connect_input(to_reg, ants_linear, 'input_file')

        pipeline.connect_output(reg, ants_linear, 'reg_file')
        pipeline.connect_output(matrix, ants_linear, 'regmat')

        return pipeline
Beispiel #12
0
 def _fsl_bet_brain_extraction_pipeline(self, in_file, **kwargs):
     """
     Generates a whole brain mask using FSL's BET command.
     """
     pipeline = self.create_pipeline(
         name='brain_extraction',
         inputs=[DatasetSpec(in_file, nifti_gz_format)],
         outputs=[
             DatasetSpec('brain', nifti_gz_format),
             DatasetSpec('brain_mask', nifti_gz_format)
         ],
         desc="Generate brain mask from mr_scan",
         version=1,
         citations=[fsl_cite, bet_cite, bet2_cite],
         **kwargs)
     # Create mask node
     bet = pipeline.create_node(interface=fsl.BET(),
                                name="bet",
                                requirements=[fsl509_req])
     bet.inputs.mask = True
     bet.inputs.output_type = 'NIFTI_GZ'
     if self.parameter('bet_robust'):
         bet.inputs.robust = True
     if self.parameter('bet_reduce_bias'):
         bet.inputs.reduce_bias = True
     bet.inputs.frac = self.parameter('bet_f_threshold')
     bet.inputs.vertical_gradient = self.parameter('bet_g_threshold')
     # Connect inputs/outputs
     pipeline.connect_input(in_file, bet, 'in_file')
     pipeline.connect_output('brain', bet, 'out_file')
     pipeline.connect_output('brain_mask', bet, 'mask_file')
     return pipeline
Beispiel #13
0
 def _qform_transform_factory(self, name, to_reg, ref, qformed, qformed_mat,
                              **kwargs):
     pipeline = self.create_pipeline(
         name=name,
         inputs=[
             DatasetSpec(to_reg, nifti_gz_format),
             DatasetSpec(ref, nifti_gz_format)
         ],
         outputs=[
             DatasetSpec(qformed, nifti_gz_format),
             DatasetSpec(qformed_mat, text_matrix_format)
         ],
         desc="Registers a MR scan against a reference image",
         version=1,
         citations=[fsl_cite],
         **kwargs)
     flirt = pipeline.create_node(interface=FLIRT(),
                                  name='flirt',
                                  requirements=[fsl5_req],
                                  wall_time=5)
     flirt.inputs.uses_qform = True
     flirt.inputs.apply_xfm = True
     # Connect inputs
     pipeline.connect_input(to_reg, flirt, 'in_file')
     pipeline.connect_input(ref, flirt, 'reference')
     # Connect outputs
     pipeline.connect_output(qformed, flirt, 'out_file')
     pipeline.connect_output(qformed_mat, flirt, 'out_matrix_file')
     return pipeline
Beispiel #14
0
    def group_melodic_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='group_melodic',
            inputs=[
                DatasetSpec('smoothed_ts', nifti_gz_format),
                FieldSpec('tr', float)
            ],
            outputs=[DatasetSpec('group_melodic', directory_format)],
            desc=("Group ICA"),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        gica = pipeline.create_join_subjects_node(MELODIC(),
                                                  joinfield=['in_files'],
                                                  name='gica',
                                                  requirements=[fsl510_req],
                                                  wall_time=7200)
        gica.inputs.no_bet = True
        gica.inputs.bg_threshold = self.parameter('brain_thresh_percent')
        gica.inputs.bg_image = self.parameter('MNI_template')
        gica.inputs.dim = self.parameter('group_ica_components')
        gica.inputs.report = True
        gica.inputs.out_stats = True
        gica.inputs.mm_thresh = 0.5
        gica.inputs.sep_vn = True
        gica.inputs.mask = self.parameter('MNI_template_mask')
        gica.inputs.out_dir = 'group_melodic.ica'
        pipeline.connect_input('smoothed_ts', gica, 'in_files')
        pipeline.connect_input('tr', gica, 'tr_sec')

        pipeline.connect_output('group_melodic', gica, 'out_dir')

        return pipeline
Beispiel #15
0
class StaticPETStudy(PETStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('pet_image', nifti_gz_format),
        DatasetSpec('base_mask', nifti_gz_format),
        DatasetSpec('SUVR_image', nifti_gz_format, 'suvr_pipeline')
    ]

    def suvr_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='SUVR',
            inputs=[
                DatasetSpec('registered_volume', nifti_gz_format),
                DatasetSpec('base_mask', nifti_gz_format)
            ],
            outputs=[DatasetSpec('SUVR_image', nifti_gz_format)],
            desc=('Calculate SUVR image'),
            version=1,
            citations=[],
            **kwargs)

        suvr = pipeline.create_node(SUVRCalculation(), name='SUVR')
        pipeline.connect_input('registered_volume', suvr, 'volume')
        pipeline.connect_input('base_mask', suvr, 'base_mask')
        pipeline.connect_output('SUVR_image', suvr, 'SUVR_file')
        return pipeline

    def _ica_inputs(self):
        pass
Beispiel #16
0
    def fix_classification_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='fix_classification',
            inputs=[
                DatasetSpec('train_data',
                            rfile_format,
                            frequency='per_project'),
                DatasetSpec('fix_dir', directory_format)
            ],
            outputs=[DatasetSpec('labelled_components', text_format)],
            desc=("Automatic classification of noisy components from the "
                  "rsfMRI data using fsl FIX."),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        fix = pipeline.create_node(FSLFIX(),
                                   name="fix",
                                   wall_time=30,
                                   requirements=[fsl509_req, fix_req])
        pipeline.connect_input("fix_dir", fix, "feat_dir")
        pipeline.connect_input("train_data", fix, "train_data")
        fix.inputs.component_threshold = self.parameter('component_threshold')
        fix.inputs.motion_reg = self.parameter('motion_reg')
        fix.inputs.classification = True

        pipeline.connect_output('labelled_components', fix, 'label_file')

        return pipeline
Beispiel #17
0
    def motion_mat_pipeline(self, **kwargs):
        if not self.spec('coreg_matrix').derivable:
            logger.info("Cannot derive 'coreg_matrix' for {} required for "
                        "motion matrix calculation, assuming that it "
                        "is the reference study".format(self))
            inputs = [DatasetSpec('primary', dicom_format)]
            ref = True
        else:
            inputs = [
                DatasetSpec('coreg_matrix', text_matrix_format),
                DatasetSpec('qform_mat', text_matrix_format)
            ]
            if 'align_mats' in self.data_spec_names():
                inputs.append(DatasetSpec('align_mats', directory_format))
            ref = False
        pipeline = self.create_pipeline(
            name='motion_mat_calculation',
            inputs=inputs,
            outputs=[DatasetSpec('motion_mats', motion_mats_format)],
            desc=("Motion matrices calculation"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        mm = pipeline.create_node(MotionMatCalculation(), name='motion_mats')
        if ref:
            mm.inputs.reference = True
            pipeline.connect_input('primary', mm, 'dummy_input')
        else:
            pipeline.connect_input('coreg_matrix', mm, 'reg_mat')
            pipeline.connect_input('qform_mat', mm, 'qform_mat')
            if 'align_mats' in self.data_spec_names():
                pipeline.connect_input('align_mats', mm, 'align_mats')
        pipeline.connect_output('motion_mats', mm, 'motion_mats')
        return pipeline
Beispiel #18
0
 def concatenate_pipeline(self, **kwargs):  # @UnusedVariable
     """
     Concatenates two dMRI datasets (with different b-values) along the
     DW encoding (4th) axis
     """
     pipeline = self.create_pipeline(
         name='concatenation',
         inputs=[
             DatasetSpec('low_b_dw_scan', mrtrix_format),
             DatasetSpec('high_b_dw_scan', mrtrix_format)
         ],
         outputs=[DatasetSpec('dwi_scan', mrtrix_format)],
         desc=("Concatenate low and high b-value dMRI datasets for NODDI "
               "processing"),
         version=1,
         citations=[mrtrix_cite],
         **kwargs)
     # Create concatenation node
     mrcat = pipeline.create_node(MRCat(),
                                  name='mrcat',
                                  requirements=[mrtrix3_req])
     mrcat.inputs.quiet = True
     # Connect inputs
     pipeline.connect_input('low_b_dw_scan', mrcat, 'first_scan')
     pipeline.connect_input('high_b_dw_scan', mrcat, 'second_scan')
     # Connect outputs
     pipeline.connect_output('dwi_scan', mrcat, 'out_file')
     # Check inputs/outputs are connected
     return pipeline
Beispiel #19
0
 def t1_brain_extraction_pipeline(self, **kwargs):
     """
     Masks the T1 image using the coregistered T2 brain mask as the brain
     mask from T2 is usually more reliable (using BET in any case)
     """
     pipeline = self.create_pipeline(
         name='t1_brain_extraction_pipeline',
         inputs=[
             DatasetSpec('t1', nifti_gz_format),
             DatasetSpec('brain_mask', nifti_gz_format)
         ],
         outputs=[DatasetSpec('t1_brain', nifti_gz_format)],
         version=1,
         desc="Mask T1 with T2 brain mask",
         citations=[fsl_cite],
         **kwargs)
     # Create apply mask node
     apply_mask = pipeline.create_node(ApplyMask(),
                                       name='appy_mask',
                                       requirements=[fsl5_req])
     apply_mask.inputs.output_type = 'NIFTI_GZ'
     # Connect inputs
     pipeline.connect_input('t1', apply_mask, 'in_file')
     pipeline.connect_input('brain_mask', apply_mask, 'mask_file')
     # Connect outputs
     pipeline.connect_output('t1_brain', apply_mask, 'out_file')
     # Check and return
     return pipeline
Beispiel #20
0
    def single_subject_melodic_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='MelodicL1',
            inputs=[
                DatasetSpec('filtered_data', nifti_gz_format),
                FieldSpec('tr', float),
                DatasetSpec('brain_mask', nifti_gz_format)
            ],
            outputs=[DatasetSpec('melodic_ica', directory_format)],
            desc=("Single subject ICA analysis using FSL MELODIC."),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        mel = pipeline.create_node(MELODIC(),
                                   name='melodic_L1',
                                   wall_time=15,
                                   requirements=[fsl5_req])
        mel.inputs.no_bet = True
        pipeline.connect_input('brain_mask', mel, 'mask')
        mel.inputs.bg_threshold = self.parameter('brain_thresh_percent')
        mel.inputs.report = True
        mel.inputs.out_stats = True
        mel.inputs.mm_thresh = 0.5
        mel.inputs.out_dir = 'melodic_ica'
        pipeline.connect_input('tr', mel, 'tr_sec')
        pipeline.connect_input('filtered_data', mel, 'in_files')

        pipeline.connect_output('melodic_ica', mel, 'out_dir')

        return pipeline
Beispiel #21
0
    def bet_T1(self, **kwargs):

        pipeline = self.create_pipeline(
            name='BET_T1',
            inputs=[DatasetSpec('t1', nifti_gz_format)],
            outputs=[
                DatasetSpec('betted_T1', nifti_gz_format),
                DatasetSpec('betted_T1_mask', nifti_gz_format)
            ],
            desc=("python implementation of BET"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        bias = pipeline.create_node(interface=ants.N4BiasFieldCorrection(),
                                    name='n4_bias_correction',
                                    requirements=[ants19_req],
                                    wall_time=60,
                                    memory=12000)
        pipeline.connect_input('t1', bias, 'input_image')

        bet = pipeline.create_node(fsl.BET(frac=0.15, reduce_bias=True),
                                   name='bet',
                                   requirements=[fsl5_req],
                                   memory=8000,
                                   wall_time=45)

        pipeline.connect(bias, 'output_image', bet, 'in_file')
        pipeline.connect_output('betted_T1', bet, 'out_file')
        pipeline.connect_output('betted_T1_mask', bet, 'mask_file')

        return pipeline
Beispiel #22
0
    def qsm_de_pipeline(self, **kwargs):  # @UnusedVariable @IgnorePep8
        """
        Process dual echo data for QSM (TE=[7.38, 22.14])

        NB: Default values come from the STI-Suite
        """
        pipeline = self.create_pipeline(
            name='qsmrecon',
            inputs=[DatasetSpec('coils', directory_format)],
            # TODO should this be primary?
            outputs=[
                DatasetSpec('qsm', nifti_gz_format),
                DatasetSpec('tissue_phase', nifti_gz_format),
                DatasetSpec('tissue_mask', nifti_gz_format),
                DatasetSpec('qsm_mask', nifti_gz_format)
            ],
            desc="Resolve QSM from t2star coils",
            citations=[sti_cites, fsl_cite, matlab_cite],
            version=1,
            **kwargs)

        # Prepare and reformat SWI_COILS
        prepare = pipeline.create_node(interface=Prepare(),
                                       name='prepare',
                                       requirements=[matlab2015_req],
                                       wall_time=30,
                                       memory=16000)

        # Brain Mask
        mask = pipeline.create_node(interface=fsl.BET(),
                                    name='bet',
                                    requirements=[fsl5_req],
                                    wall_time=30,
                                    memory=8000)
        mask.inputs.reduce_bias = True
        mask.inputs.output_type = 'NIFTI_GZ'
        mask.inputs.frac = 0.3
        mask.inputs.mask = True

        # Phase and QSM for dual echo
        qsmrecon = pipeline.create_node(interface=STI_DE(),
                                        name='qsmrecon',
                                        requirements=[matlab2015_req],
                                        wall_time=600,
                                        memory=24000)

        # Connect inputs/outputs
        pipeline.connect_input('coils', prepare, 'in_dir')
        pipeline.connect_output('qsm_mask', mask, 'mask_file')
        pipeline.connect_output('qsm', qsmrecon, 'qsm')
        pipeline.connect_output('tissue_phase', qsmrecon, 'tissue_phase')
        pipeline.connect_output('tissue_mask', qsmrecon, 'tissue_mask')

        pipeline.connect(prepare, 'out_file', mask, 'in_file')
        pipeline.connect(mask, 'mask_file', qsmrecon, 'mask_file')
        pipeline.connect(prepare, 'out_dir', qsmrecon, 'in_dir')

        return pipeline
Beispiel #23
0
    def rsfMRI_filtering_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='rsfMRI_filtering',
            inputs=[
                DatasetSpec('preproc', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format),
                DatasetSpec('coreg_ref_brain', nifti_gz_format),
                FieldSpec('tr', float)
            ],
            outputs=[
                DatasetSpec('filtered_data', nifti_gz_format),
                DatasetSpec('mc_par', par_format)
            ],
            desc=("Spatial and temporal rsfMRI filtering"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        afni_mc = pipeline.create_node(Volreg(),
                                       name='AFNI_MC',
                                       wall_time=5,
                                       requirements=[afni_req])
        afni_mc.inputs.zpad = 1
        afni_mc.inputs.out_file = 'rsfmri_mc.nii.gz'
        afni_mc.inputs.oned_file = 'prefiltered_func_data_mcf.par'
        pipeline.connect_input('preproc', afni_mc, 'in_file')

        filt = pipeline.create_node(Tproject(),
                                    name='Tproject',
                                    wall_time=5,
                                    requirements=[afni_req])
        filt.inputs.stopband = (0, 0.01)
        filt.inputs.polort = 3
        filt.inputs.blur = 3
        filt.inputs.out_file = 'filtered_func_data.nii.gz'
        pipeline.connect_input('tr', filt, 'delta_t')
        pipeline.connect(afni_mc, 'out_file', filt, 'in_file')
        pipeline.connect_input('brain_mask', filt, 'mask')

        meanfunc = pipeline.create_node(ImageMaths(op_string='-Tmean',
                                                   suffix='_mean'),
                                        name='meanfunc',
                                        wall_time=5,
                                        requirements=[fsl5_req])
        pipeline.connect(afni_mc, 'out_file', meanfunc, 'in_file')

        add_mean = pipeline.create_node(ImageMaths(op_string='-add'),
                                        name='add_mean',
                                        wall_time=5,
                                        requirements=[fsl5_req])
        pipeline.connect(filt, 'out_file', add_mean, 'in_file')
        pipeline.connect(meanfunc, 'out_file', add_mean, 'in_file2')

        pipeline.connect_output('filtered_data', add_mean, 'out_file')
        pipeline.connect_output('mc_par', afni_mc, 'oned_file')

        return pipeline
Beispiel #24
0
    def _fugue_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='preproc_pipeline',
            inputs=[
                DatasetSpec('primary', nifti_gz_format),
                DatasetSpec('field_map_mag', nifti_gz_format),
                DatasetSpec('field_map_phase', nifti_gz_format),
                FieldSpec('field_map_delta_te', float)
            ],
            outputs=[DatasetSpec('preproc', nifti_gz_format)],
            desc=("Fugue distortion correction pipeline"),
            version=1,
            citations=[fsl_cite],
            **kwargs)

        reorient_epi_in = pipeline.create_node(fsl.utils.Reorient2Std(),
                                               name='reorient_epi_in',
                                               requirements=[fsl509_req])
        pipeline.connect_input('primary', reorient_epi_in, 'in_file')
        fm_mag_reorient = pipeline.create_node(fsl.utils.Reorient2Std(),
                                               name='reorient_fm_mag',
                                               requirements=[fsl509_req])
        pipeline.connect_input('field_map_mag', fm_mag_reorient, 'in_file')
        fm_phase_reorient = pipeline.create_node(fsl.utils.Reorient2Std(),
                                                 name='reorient_fm_phase',
                                                 requirements=[fsl509_req])
        pipeline.connect_input('field_map_phase', fm_phase_reorient, 'in_file')
        bet = pipeline.create_node(BET(),
                                   name="bet",
                                   wall_time=5,
                                   requirements=[fsl509_req])
        bet.inputs.robust = True
        pipeline.connect(fm_mag_reorient, 'out_file', bet, 'in_file')
        create_fmap = pipeline.create_node(PrepareFieldmap(),
                                           name="prepfmap",
                                           wall_time=5,
                                           requirements=[fsl509_req])
        #         create_fmap.inputs.delta_TE = 2.46
        pipeline.connect_input('field_map_delta_te', create_fmap, 'delta_TE')
        pipeline.connect(bet, "out_file", create_fmap, "in_magnitude")
        pipeline.connect(fm_phase_reorient, 'out_file', create_fmap,
                         'in_phase')

        fugue = pipeline.create_node(FUGUE(),
                                     name='fugue',
                                     wall_time=5,
                                     requirements=[fsl509_req])
        fugue.inputs.unwarp_direction = 'x'
        fugue.inputs.dwell_time = self.parameter('fugue_echo_spacing')
        fugue.inputs.unwarped_file = 'example_func.nii.gz'
        pipeline.connect(create_fmap, 'out_fieldmap', fugue, 'fmap_in_file')
        pipeline.connect(reorient_epi_in, 'out_file', fugue, 'in_file')
        pipeline.connect_output('preproc', fugue, 'unwarped_file')
        return pipeline
class PETPCAMotionDetectionStudy(PETStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('list_mode', list_mode_format),
        FieldSpec('time_offset', int),
        FieldSpec('temporal_length', float),
        FieldSpec('num_frames', int),
        DatasetSpec('ssrb_sinograms', directory_format,
                    'sinogram_unlisting_pipeline')
    ]

    def sinogram_unlisting_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='prepare_sinogram',
            inputs=[
                DatasetSpec('list_mode', list_mode_format),
                FieldSpec('time_offset', int),
                FieldSpec('temporal_length', float),
                FieldSpec('num_frames', int)
            ],
            outputs=[DatasetSpec('ssrb_sinograms', directory_format)],
            desc=('Unlist pet listmode data into several sinograms and '
                  'perform ssrb compression to prepare data for motion '
                  'detection using PCA pipeline.'),
            version=1,
            citations=[],
            **kwargs)

        prepare_inputs = pipeline.create_node(PrepareUnlistingInputs(),
                                              name='prepare_inputs')
        pipeline.connect_input('list_mode', prepare_inputs, 'list_mode')
        pipeline.connect_input('time_offset', prepare_inputs, 'time_offset')
        pipeline.connect_input('num_frames', prepare_inputs, 'num_frames')
        pipeline.connect_input('temporal_length', prepare_inputs,
                               'temporal_len')
        unlisting = pipeline.create_node(PETListModeUnlisting(),
                                         iterfield=['list_inputs'],
                                         name='unlisting')
        pipeline.connect(prepare_inputs, 'out', unlisting, 'list_inputs')

        ssrb = pipeline.create_node(SSRB(),
                                    name='ssrb',
                                    requirements=[stir_req])
        pipeline.connect(unlisting, 'pet_sinogram', ssrb, 'unlisted_sinogram')

        merge = pipeline.create_join_node(MergeUnlistingOutputs(),
                                          joinsource='unlisting',
                                          joinfield=['sinograms'],
                                          name='merge_sinograms')
        pipeline.connect(ssrb, 'ssrb_sinograms', merge, 'sinograms')
        pipeline.connect_output('ssrb_sinograms', merge, 'sinogram_folder')

        return pipeline
Beispiel #26
0
class TestMatchStudy(Study, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('gre_phase', dicom_format),
        DatasetSpec('gre_mag', dicom_format)
    ]

    def dummy_pipeline1(self):
        pass

    def dummy_pipeline2(self):
        pass
Beispiel #27
0
 def pipeline(self):
     pipeline = self.create_pipeline(
         name='pipeline',
         inputs=[
             DatasetSpec('mrtrix', text_format),
             DatasetSpec('nifti_gz', text_format),
             DatasetSpec('dicom', text_format),
             DatasetSpec('directory', directory_format),
             DatasetSpec('zip', directory_format)
         ],
         outputs=[
             DatasetSpec('nifti_gz_from_dicom', text_format),
             DatasetSpec('mrtrix_from_nifti_gz', text_format),
             DatasetSpec('nifti_from_mrtrix', text_format),
             DatasetSpec('directory_from_zip', directory_format),
             DatasetSpec('zip_from_directory', directory_format)
         ],
         desc=("A pipeline that tests out various data format "
               "conversions"),
         version=1,
         citations=[],
     )
     # Convert from DICOM to NIfTI.gz format on input
     nifti_gz_from_dicom = pipeline.create_node(
         IdentityInterface(fields=['file']), "nifti_gz_from_dicom")
     pipeline.connect_input('dicom', nifti_gz_from_dicom, 'file')
     pipeline.connect_output('nifti_gz_from_dicom', nifti_gz_from_dicom,
                             'file')
     # Convert from NIfTI.gz to MRtrix format on output
     mrtrix_from_nifti_gz = pipeline.create_node(
         IdentityInterface(fields=['file']), name='mrtrix_from_nifti_gz')
     pipeline.connect_input('nifti_gz', mrtrix_from_nifti_gz, 'file')
     pipeline.connect_output('mrtrix_from_nifti_gz', mrtrix_from_nifti_gz,
                             'file')
     # Convert from MRtrix to NIfTI format on output
     nifti_from_mrtrix = pipeline.create_node(
         IdentityInterface(fields=['file']), 'nifti_from_mrtrix')
     pipeline.connect_input('mrtrix', nifti_from_mrtrix, 'file')
     pipeline.connect_output('nifti_from_mrtrix', nifti_from_mrtrix, 'file')
     # Convert from zip file to directory format on input
     directory_from_zip = pipeline.create_node(
         IdentityInterface(fields=['file']), 'directory_from_zip')
     pipeline.connect_input('zip', directory_from_zip, 'file')
     pipeline.connect_output('directory_from_zip', directory_from_zip,
                             'file')
     # Convert from NIfTI.gz to MRtrix format on output
     zip_from_directory = pipeline.create_node(
         IdentityInterface(fields=['file']), 'zip_from_directory')
     pipeline.connect_input('directory', zip_from_directory, 'file')
     pipeline.connect_output('zip_from_directory', zip_from_directory,
                             'file')
     pipeline.assert_connected()
     return pipeline
Beispiel #28
0
 def track_gen_pipeline(self, **kwargs):
     pipeline = self.create_pipeline(
         name='extract_b0',
         inputs=[
             DatasetSpec('bias_correct', nifti_gz_format),
             DatasetSpec('grad_dirs', fsl_bvecs_format),
             DatasetSpec('bvalues', fsl_bvals_format)
         ],
         outputs=[DatasetSpec('b0', nifti_gz_format)],
         desc="Extract b0 image from a DWI study",
         version=1,
         citations=[mrtrix_cite])
     return pipeline
Beispiel #29
0
 def extract_b0_pipeline(self, **kwargs):  # @UnusedVariable
     """
     Extracts the b0 images from a DWI study and takes their mean
     """
     pipeline = self.create_pipeline(
         name='extract_b0',
         inputs=[
             DatasetSpec('bias_correct', nifti_gz_format),
             DatasetSpec('grad_dirs', fsl_bvecs_format),
             DatasetSpec('bvalues', fsl_bvals_format)
         ],
         outputs=[DatasetSpec('b0', nifti_gz_format)],
         desc="Extract b0 image from a DWI study",
         version=1,
         citations=[mrtrix_cite],
         **kwargs)
     # Gradient merge node
     fsl_grads = pipeline.create_node(MergeTuple(2), name="fsl_grads")
     # Extraction node
     extract_b0s = pipeline.create_node(ExtractDWIorB0(),
                                        name='extract_b0s',
                                        requirements=[mrtrix3_req])
     extract_b0s.inputs.bzero = True
     extract_b0s.inputs.quiet = True
     # FIXME: Need a registration step before the mean
     # Mean calculation node
     mean = pipeline.create_node(MRMath(),
                                 name="mean",
                                 requirements=[mrtrix3_req])
     mean.inputs.axis = 3
     mean.inputs.operation = 'mean'
     mean.inputs.quiet = True
     # Convert to Nifti
     mrconvert = pipeline.create_node(MRConvert(),
                                      name="output_conversion",
                                      requirements=[mrtrix3_req])
     mrconvert.inputs.out_ext = '.nii.gz'
     mrconvert.inputs.quiet = True
     # Connect inputs
     pipeline.connect_input('bias_correct', extract_b0s, 'in_file')
     pipeline.connect_input('grad_dirs', fsl_grads, 'in1')
     pipeline.connect_input('bvalues', fsl_grads, 'in2')
     # Connect between nodes
     pipeline.connect(extract_b0s, 'out_file', mean, 'in_files')
     pipeline.connect(fsl_grads, 'out', extract_b0s, 'grad_fsl')
     pipeline.connect(mean, 'out_file', mrconvert, 'in_file')
     # Connect outputs
     pipeline.connect_output('b0', mrconvert, 'out_file')
     pipeline.assert_connected()
     # Check inputs/outputs are connected
     return pipeline
Beispiel #30
0
class T1Study(MRIStudy, metaclass=StudyMetaClass):

    add_data_specs = [
        DatasetSpec('fs_recon_all', freesurfer_recon_all_format,
                    'freesurfer_pipeline'),
        DatasetSpec('brain', nifti_gz_format, 'brain_extraction_pipeline')]

    add_parameter_specs = [
        ParameterSpec('bet_robust', True),
        ParameterSpec('bet_f_threshold', 0.57),
        ParameterSpec('bet_g_threshold', -0.1)]
    
    add_switch_specs = [
        SwitchSpec('bet_method', 'optibet',
                   choices=MRIStudy.switch_spec('bet_method').choices)]

    def freesurfer_pipeline(self, **kwargs):
        """
        Segments grey matter, white matter and CSF from T1 images using
        SPM "NewSegment" function.

        NB: Default values come from the W2MHS toolbox
        """
        pipeline = self.create_pipeline(
            name='segmentation',
            inputs=[DatasetSpec('primary', nifti_gz_format)],
            outputs=[DatasetSpec('fs_recon_all',
                                 freesurfer_recon_all_format)],
            desc="Segment white/grey matter and csf",
            version=1,
            citations=copy(freesurfer_cites),
            **kwargs)
        # FS ReconAll node
        recon_all = pipeline.create_node(
            interface=ReconAll(), name='recon_all',
            requirements=[freesurfer_req], wall_time=2000)
        recon_all.inputs.directive = 'all'
        recon_all.inputs.openmp = self.runner.num_processes
        # Wrapper around os.path.join
        join = pipeline.create_node(interface=JoinPath(), name='join')
        pipeline.connect(recon_all, 'subjects_dir', join, 'dirname')
        pipeline.connect(recon_all, 'subject_id', join, 'filename')
        # Connect inputs/outputs
        pipeline.connect_input('primary', recon_all, 'T1_files')
        pipeline.connect_output('fs_recon_all', join, 'path')
        return pipeline

    def segmentation_pipeline(self, **kwargs):
        pipeline = super(T1Study, self).segmentation_pipeline(img_type=1,
                                                              **kwargs)
        return pipeline